summaryrefslogtreecommitdiff
path: root/src/mongo/gotools
diff options
context:
space:
mode:
authorRamon Fernandez <ramon@mongodb.com>2017-01-20 13:02:13 -0500
committerRamon Fernandez <ramon@mongodb.com>2017-01-20 13:02:13 -0500
commit32fab3f75af439dc9ef0e95b6107d5bbf7021de4 (patch)
tree842edf36ec0bce1667a47ebee745bfbe53795ca4 /src/mongo/gotools
parenta25baa98c2ba51b1bd10a39538911b43be4db64f (diff)
downloadmongo-32fab3f75af439dc9ef0e95b6107d5bbf7021de4.tar.gz
Import tools: 8bda55730d30c414a71dfbe6f45f5c54ef97811d from branch master
ref: 3cc9a07766..8bda55730d for: 3.5.2 TOOLS-1498 Stats collection has large playback performance impact TOOLS-1501 Add option to set capture buffer size to avoid packet loss TOOLS-1502 Playback file contains full reply payload TOOLS-1516 mongoreplay: out of bounds error in "shortenreply" during record TOOLS-1534 Running mongodump then mongorestore should restore the _id index with its exact original spec TOOLS-1535 Add test for restoring a collection with a default collation TOOLS-1539 Make mongodump/mongorestore code easier to use outside mongo-tools project TOOLS-1541 support exporting views TOOLS-1548 create qa-tests-3.2 task on the 3.4 branch TOOLS-1549 --gssapiServiceName should not require --gssapiHostName in the tools TOOLS-1553 PreProcessing is failling with "got invalid document size" TOOLS-1558 use mongodb 3.4 "current" tests in master TOOLS-1559 TestCommandOpGetMoreLiveDB is failing after upgrade to 3.4 in evergreen TOOLS-1561 nil pointer dereference in mongoreplay when error on new playback file creation TOOLS-1563 windows tests are failing after `use mongodb 3.4 "current" tests` TOOLS-1569 mongo-replay crashes during playback TOOLS-1576 nil pointer derereference when error occurs TOOLS-1579 Progress bar exceeding 100% TOOLS-1588 Parallelize qa-test to improve test runtime Remove test directory, not needed for server import.
Diffstat (limited to 'src/mongo/gotools')
-rw-r--r--src/mongo/gotools/common.yml148
-rw-r--r--src/mongo/gotools/common/db/kerberos/gssapi.go3
-rw-r--r--src/mongo/gotools/common/db/namespaces.go19
-rw-r--r--src/mongo/gotools/import.data5
-rw-r--r--src/mongo/gotools/mongodump/metadata_dump.go2
-rw-r--r--src/mongo/gotools/mongodump/mongodump.go76
-rw-r--r--src/mongo/gotools/mongodump/mongodump_test.go15
-rw-r--r--src/mongo/gotools/mongodump/oplog_dump.go9
-rw-r--r--src/mongo/gotools/mongodump/prepare.go34
-rw-r--r--src/mongo/gotools/mongoexport/main/mongoexport.go4
-rw-r--r--src/mongo/gotools/mongoexport/mongoexport.go29
-rw-r--r--src/mongo/gotools/mongofiles/main/mongofiles.go2
-rw-r--r--src/mongo/gotools/mongoimport/main/mongoimport.go2
-rw-r--r--src/mongo/gotools/mongoimport/mongoimport.go2
-rw-r--r--src/mongo/gotools/mongooplog/main/mongooplog.go4
-rw-r--r--src/mongo/gotools/mongoreplay/auth_test.go4
-rw-r--r--src/mongo/gotools/mongoreplay/cursors.go12
-rw-r--r--src/mongo/gotools/mongoreplay/execute.go2
-rw-r--r--src/mongo/gotools/mongoreplay/mongoreplay_test.go64
-rw-r--r--src/mongo/gotools/mongoreplay/monitor.go3
-rw-r--r--src/mongo/gotools/mongoreplay/pcap_test.go2
-rw-r--r--src/mongo/gotools/mongoreplay/play.go3
-rw-r--r--src/mongo/gotools/mongoreplay/play_livedb_test.go23
-rw-r--r--src/mongo/gotools/mongoreplay/raw_op.go60
-rw-r--r--src/mongo/gotools/mongoreplay/record.go34
-rw-r--r--src/mongo/gotools/mongoreplay/stat_collector.go22
-rw-r--r--src/mongo/gotools/mongorestore/filepath.go2
-rw-r--r--src/mongo/gotools/mongorestore/main/mongorestore.go2
-rw-r--r--src/mongo/gotools/mongorestore/metadata.go3
-rw-r--r--src/mongo/gotools/mongorestore/mongorestore.go14
-rw-r--r--src/mongo/gotools/mongorestore/mongorestore_test.go2
-rw-r--r--src/mongo/gotools/mongorestore/restore.go24
-rw-r--r--src/mongo/gotools/test/legacy24/buildscripts/buildlogger.py480
-rw-r--r--src/mongo/gotools/test/legacy24/buildscripts/cleanbb.py105
-rwxr-xr-xsrc/mongo/gotools/test/legacy24/buildscripts/smoke.py1314
-rw-r--r--src/mongo/gotools/test/legacy24/buildscripts/utils.py230
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/libs/use_extended_timeout.js12
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/replsets/rslib.js115
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/csv1.js42
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/csvexport1.js47
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/csvexport2.js31
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/csvimport1.js40
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/data/a.tsv2
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/data/csvimport1.csv8
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/data/dumprestore6/foo.bsonbin44 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/data/dumprestore6/system.indexes.bsonbin144 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumpauth.js29
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumpfilename1.js13
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumprestore1.js23
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumprestore10.js63
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumprestore3.js60
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumprestore4.js42
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumprestore6.js27
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumprestore7.js62
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumprestore8.js105
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumprestore9.js79
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumprestoreWithNoOptions.js117
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumprestore_auth.js28
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/dumpsecondary.js38
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/exportimport1.js66
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/exportimport3.js27
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/exportimport4.js56
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/exportimport5.js81
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/files1.js27
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/oplog1.js29
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/oplog_all_ops.js61
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/restorewithauth.js113
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/stat1.js23
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/tool_replset.js89
-rw-r--r--src/mongo/gotools/test/legacy24/jstests/tool/tsv1.js32
-rw-r--r--src/mongo/gotools/test/legacy26/buildscripts/buildlogger.py480
-rw-r--r--src/mongo/gotools/test/legacy26/buildscripts/cleanbb.py105
-rwxr-xr-xsrc/mongo/gotools/test/legacy26/buildscripts/smoke.py1314
-rw-r--r--src/mongo/gotools/test/legacy26/buildscripts/utils.py230
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/authTestsKey1
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/ca.pem17
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/client.pem101
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/client_revoked.pem34
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/cluster-cert.pem101
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/command_line/test_parsed_options.js202
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/disable_noscripting.ini1
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_auth.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_autosplit.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_httpinterface.json7
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_indexbuildretry.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_journal.json7
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_objcheck.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_paranoia.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_prealloc.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_scripting.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_unixsocket.json7
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_profiling.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_replsetname.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_shardingrole.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_verbosity.json5
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/crl.pem10
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/crl_client_revoked.pem12
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/crl_expired.pem10
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_bad_first.journalbin32768 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_bad_last.journalbin32768 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_good.journalbin32768 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/fts.js18
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/fun.js32
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/geo_near_random.js99
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/grid.js171
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/key11
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/key21
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/localhostnameCN.pem101
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/localhostnameSAN.pem100
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/mockkrb5.conf13
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/mockservice.keytabbin442 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/mockuser.keytabbin340 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/network.js37
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/parallelTester.js259
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/password_protected.pem51
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/server.pem34
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/slow_weekly_util.js20
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/smoke.pem50
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/test_background_ops.js340
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/testconfig4
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/testconfig.json4
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/trace_missing_docs.js90
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/libs/use_extended_timeout.js12
-rwxr-xr-xsrc/mongo/gotools/test/legacy26/jstests/misc/biginsert.js18
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/replsets/rslib.js115
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/csv1.js42
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/csvexport1.js65
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/csvexport2.js31
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/csvimport1.js40
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/data/a.tsv2
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/data/csvimport1.csv8
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/data/dumprestore6/foo.bsonbin44 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/data/dumprestore6/system.indexes.bsonbin144 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumpauth.js27
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumpfilename1.js14
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore1.js23
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore10.js63
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore3.js60
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore4.js42
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore6.js27
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore7.js66
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore8.js105
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore9.js79
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestoreWithNoOptions.js107
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth.js35
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth2.js96
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth3.js199
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/dumpsecondary.js38
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/exportimport1.js66
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/exportimport3.js27
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/exportimport4.js57
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/exportimport5.js82
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/exportimport6.js26
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/exportimport_bigarray.js62
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/exportimport_date.js49
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/files1.js27
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/oplog1.js26
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/oplog_all_ops.js61
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/restorewithauth.js113
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/stat1.js22
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/tool1.js44
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/tool_replset.js89
-rw-r--r--src/mongo/gotools/test/legacy26/jstests/tool/tsv1.js32
-rw-r--r--src/mongo/gotools/test/legacy28/buildscripts/buildlogger.py479
-rw-r--r--src/mongo/gotools/test/legacy28/buildscripts/cleanbb.py105
-rwxr-xr-xsrc/mongo/gotools/test/legacy28/buildscripts/smoke.py1447
-rw-r--r--src/mongo/gotools/test/legacy28/buildscripts/utils.py235
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/analyze_plan.js80
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/authTestsKey1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/badSAN.pem48
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/ca.pem48
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/client.pem48
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/client_revoked.pem48
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/cluster_cert.pem48
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/command_line/test_parsed_options.js214
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_auth.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_dur.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_httpinterface.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_ipv6.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_journal.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_jsonp.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_jsonp.json7
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_moveparanoia.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noauth.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noautosplit.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nodur.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nohttpinterface.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noindexbuildretry.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nojournal.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nomoveparanoia.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noobjcheck.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noprealloc.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noscripting.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nounixsocket.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_objcheck.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_rest_interface.json7
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_auth.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_autosplit.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_httpinterface.json7
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_indexbuildretry.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_journal.json7
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_objcheck.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_paranoia.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_prealloc.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_scripting.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_unixsocket.json7
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_dur.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_journal.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_nodur.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_nojournal.ini1
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_component_verbosity.json16
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_profiling.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_replsetname.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_shardingrole.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_verbosity.json5
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/crl.pem38
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/crl_client_revoked.pem41
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/crl_expired.pem38
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_bad_first.journalbin32768 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_bad_last.journalbin32768 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_good.journalbin32768 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/expired.pem48
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/fts.js18
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/geo_near_random.js101
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/host_ipaddr.js38
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/key11
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/key21
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/localhostnameCN.pem48
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/localhostnameSAN.pem49
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/mockkrb5.conf13
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/mockservice.keytabbin442 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/mockuser.keytabbin340 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/not_yet_valid.pem48
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/parallelTester.js259
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/password_protected.pem51
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/server.pem48
-rwxr-xr-xsrc/mongo/gotools/test/legacy28/jstests/libs/servers.js961
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/servers_misc.js357
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/slow_weekly_util.js20
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/smoke.pem48
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/test_background_ops.js340
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/testconfig6
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/testconfig.json4
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/libs/trace_missing_docs.js90
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/replsets/rslib.js115
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/csv1.js43
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/csvexport1.js65
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/csvexport2.js32
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/csvimport1.js41
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/data/a.tsv2
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/data/csvimport1.csv8
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/data/dumprestore6/foo.bsonbin44 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/data/dumprestore6/system.indexes.bsonbin144 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumpauth.js29
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumpfilename1.js13
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore1.js32
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore10.js64
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore3.js61
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore4.js43
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore6.js54
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore7.js66
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore8.js107
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore9.js79
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestoreWithNoOptions.js114
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth.js117
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth2.js98
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth3.js200
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_excludecollections.js112
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/dumpsecondary.js39
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/exportimport1.js67
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/exportimport3.js28
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/exportimport4.js57
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/exportimport5.js82
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/exportimport6.js27
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/exportimport_bigarray.js59
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/exportimport_date.js50
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/exportimport_minkey_maxkey.js38
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/files1.js28
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/oplog1.js29
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/oplog_all_ops.js62
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/restorewithauth.js117
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/stat1.js18
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/tool1.js44
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/tool_replset.js89
-rw-r--r--src/mongo/gotools/test/legacy28/jstests/tool/tsv1.js33
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/__init__.py1
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/buildlogger.py479
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/cleanbb.py105
-rwxr-xr-xsrc/mongo/gotools/test/qa-tests/buildscripts/resmoke.py216
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/__init__.py4
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/__init__.py36
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/buildlogger.yml13
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/console.yml13
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/file.yml19
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/suppress.yml10
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/__init__.py36
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/core.yml27
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/core_ssl.yml38
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/native_cert_ssl.yml14
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/restore_archive.yml23
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/restore_gzip.yml21
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/__init__.py7
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/config.py165
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/__init__.py5
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/network.py114
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/pipe.py87
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/process.py234
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/programs.py311
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/errors.py52
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/__init__.py14
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/buildlogger.py284
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/config.py161
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/flush.py97
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/formatters.py50
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/handlers.py178
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/loggers.py37
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/parser.py368
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/selector.py291
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/__init__.py9
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/executor.py307
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/__init__.py32
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/interface.py128
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/masterslave.py209
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/replicaset.py211
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/shardedcluster.py347
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/standalone.py151
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/hooks.py704
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/job.py195
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/report.py330
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/suite.py140
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/summary.py22
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/testcases.py407
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/testgroup.py132
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/__init__.py88
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/globstar.py202
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/jscomment.py78
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/queue.py52
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/timer.py125
-rwxr-xr-xsrc/mongo/gotools/test/qa-tests/buildscripts/setup_multiversion_mongodb.py291
-rwxr-xr-xsrc/mongo/gotools/test/qa-tests/buildscripts/smoke.py1451
-rw-r--r--src/mongo/gotools/test/qa-tests/buildscripts/utils.py235
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/all_types.js33
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/all_types_json.js29
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/bad_files.js41
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/bsondump_broken_pipe.js18
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/bsondump_options.js57
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/deep_nested.js8
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/output_file.js71
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/testdata/all_types.bsonbin1090 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/testdata/bad_cstring.bsonbin283 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/testdata/bad_type.bsonbin283 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/testdata/broken_array.bsonbin1090 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/testdata/deep_nested.bsonbin5022 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/testdata/invalid_field_name.bsonbin284 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/testdata/partial_file.bsonbin177 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/testdata/random_bytes.bsonbin1024 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/bson/testdata/sample.bsonbin283 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/common/check_version.js47
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/common/topology_helper.js164
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/archive_targets.js32
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/auth_28.config.js38
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/gzip_targets.js36
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/kerberos.config.yml7
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/kerberos.linux.sh5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/kerberos_28.config.js39
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/kerberos_28_windows.config.js59
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/plain_26.config.js19
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/plain_28.config.js21
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/replset_28.config.js39
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/replset_auth_28.config.js58
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/sharding_28.config.js40
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/ssl_28.config.js26
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/configs/standard_dump_targets.config.js30
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/bad_options.js47
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/basic_data.js60
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/data_types.js70
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/export_broken_pipe.js46
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/field_file.js60
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/fields_csv.js173
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/fields_json.js92
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/force_table_scan.js113
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/json_array.js57
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/limit.js61
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/namespace_validation.js25
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/nested_fields_csv.js65
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/no_data.js21
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/pretty.js33
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/query.js198
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/slave_ok.js63
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/sort_and_skip.js69
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/stdout.js42
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/testdata/query.json1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/testdata/simple_field_file2
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/export/type_case.js115
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_db.js61
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_delete.js47
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_get.js81
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_host.js59
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_invalid.js37
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_list.js96
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_local.js102
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_port.js52
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_prefix.js49
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_put.js108
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_replace.js79
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_search.js110
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_type.js63
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_version.js29
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_write_concern.js54
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_write_concern_mongos.js59
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/testdata/files1.txt1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/testdata/files2.txt1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/testdata/files3.txt1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/files/util/mongofiles_common.js10
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/all_primaries_down_error_code.js65
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/boolean_type.js57
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/collections.js77
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/decimal128.js44
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/drop.js48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/fields.js107
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/import_document_validation.js110
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/import_types.js75
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/import_write_concern.js72
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/import_write_concern_mongos.js77
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/mode.js147
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/mode_upsert_id_subdoc.js86
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/no_primary_error_code.js65
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/options.js123
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/parse_grace.js113
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/replset.js48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/stoponerror.js40
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/basic.json36
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/boolean.json19
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/csv_header.csv4
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/csv_noheader.csv3
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/dupes.json34
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/extrafields.csv3
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/fieldfile4
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/parse_grace.csv4
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/tsv_header.tsv4
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/tsv_noheader.tsv3
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_extrafields.csv3
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_extrafields.tsv3
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_header.csv3
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_header.tsv3
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_noheader.csv2
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_noheader.tsv2
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/typedfieldfile5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/types.json27
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert1.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert2.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert3.json2000
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/type_case.js98
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/typed_fields.js114
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/import/types.js117
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/.eslintrc.yml3
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/analyze_plan.js76
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/authTestsKey1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/badSAN.pem48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/ca.pem48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/client.pem48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/client_revoked.pem48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/cluster_cert.pem48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/command_line/test_parsed_options.js213
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_auth.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_dur.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_httpinterface.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_ipv6.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_journal.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_jsonp.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_jsonp.json7
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_moveparanoia.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noauth.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noautosplit.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nodur.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nohttpinterface.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noindexbuildretry.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nojournal.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nomoveparanoia.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noobjcheck.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noprealloc.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noscripting.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nounixsocket.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_objcheck.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_rest_interface.json7
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_auth.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_autosplit.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_httpinterface.json7
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_indexbuildretry.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_journal.json7
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_objcheck.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_paranoia.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_prealloc.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_scripting.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_unixsocket.json7
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_dur.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_journal.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_nodur.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_nojournal.ini1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_component_verbosity.json16
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_profiling.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_replsetname.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_shardingrole.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_verbosity.json5
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/crl.pem38
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/crl_client_revoked.pem41
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/crl_expired.pem38
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_bad_first.journalbin32768 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_bad_last.journalbin32768 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_good.journalbin32768 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/expired.pem48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/extended_assert.js61
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/fts.js22
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/geo_near_random.js100
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/host_ipaddr.js38
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/key11
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/key21
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/localhostnameCN.pem48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/localhostnameSAN.pem49
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/mockkrb5.conf13
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/mockservice.keytabbin442 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/mockuser.keytabbin340 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/mongostat.js114
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/not_yet_valid.pem48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/parallelTester.js268
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/password_protected.pem51
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/server.pem48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/servers.js1092
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/servers_misc.js379
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/slow_weekly_util.js25
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/smoke.pem48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/test_background_ops.js334
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/testconfig6
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/testconfig.json4
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/trace_missing_docs.js99
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/trusted-ca.pem22
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/trusted-client.pem49
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/trusted-server.pem49
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/libs/wc_framework.js72
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/oplog/all_ops_tests.js164
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/oplog/apply_ops_tests.js137
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/oplog/asymmetric_ssl_test.js48
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/oplog/deprecated_flags_tests.js31
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/oplog/drop_database_test.js97
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/oplog/informational_flags_test.js28
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/oplog/oplog_server_ko_test.js52
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/oplog/real_oplog_tests.js75
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/oplog/unreachable_host_tests.js46
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/15k_collections.js38
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/24_to_28.js72
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/26_to_28.js67
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/28_to_26.js68
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/archive_stdout.js54
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/bad_options.js54
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/blank_collection_bson.js43
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/blank_db.js29
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/conflicting_auth_schema_version.js138
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/different_collection.js91
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/different_db.js86
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/drop_authenticated_user.js111
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/drop_nonexistent_db.js58
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/drop_one_collection.js92
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/drop_with_data.js77
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/duplicate_keys.js75
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/empty_users_and_roles.js33
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/extended_json_metadata.js42
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/indexes.js98
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/invalid_dump_target.js32
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/invalid_metadata.js22
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/keep_index_version.js90
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/large_bulk.js54
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/malformed_bson.js20
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/malformed_metadata.js22
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/missing_dump.js32
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/multiple_dbs.js82
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/namespaces.js152
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/no_index_restore.js77
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/no_options_restore.js127
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/nonempty_temp_users.js46
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/norestore_profile.js58
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/objcheck_valid_bson.js46
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_and_limit.js78
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_conflict.js33
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_local_main.js60
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_local_rs.js67
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_no_oplog.js19
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_noop.js37
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_priority_oplog.js40
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_size_safety.js71
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_specify_file.js70
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/partial_restore.js83
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/preserve_oplog_structure_order.js25
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/restore_document_validation.js180
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/sharded_fullrestore.js45
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/stop_on_error.js50
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/symlinks.js46
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank.bson0
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank_metadata.bson0
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank_metadata.metadata.json1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankdb/README1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_empty/db_empty/coll_empty.bson0
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_extended_json_options/test/changelog.bsonbin429 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_extended_json_options/test/changelog.metadata.json1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_local_oplog/local/oplog.rs.bsonbin9190 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_local_oplog/local/oplog.rs.metadata.json1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_oplog_conflict/oplog.bsonbin525 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_complex_id_oplog/oplog.bsonbin269 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_invalid/dbOne/invalid_metadata.bson0
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_invalid/dbOne/invalid_metadata.metadata.json1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_coll.bson1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_metadata.bson0
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_metadata.metadata.json1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/missing_metadata.bson0
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_noop_in_oplog/oplog.bsonbin370 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/oplog.bsonbin525 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/data.bsonbin180 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/data.metadata.json1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/system.indexes.bsonbin64 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/data.bsonbin220 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/data.metadata.json1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/system.indexes.bsonbin65 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/data.bsonbin220 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/data.metadata.json1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/system.indexes.bsonbin65 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/not_a_dirbin525 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/extra_oplog.bsonbin525 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_collection.bsonbin220 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/data.bsonbin220 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/data.metadata.json1
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/system.indexes.bsonbin65 -> 0 bytes
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles.js87
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_26_to_28_to_26.js145
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_28_to_26.js159
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_full_dump.js144
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_temp_collections.js106
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/write_concern.js66
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/restore/write_concern_mongos.js71
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/ssl/ssl_with_system_ca.js43
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/stat/stat_auth.js31
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/stat/stat_broken_pipe.js34
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/stat/stat_custom_headers.js107
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/stat/stat_discover.js60
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/stat/stat_discover_shard.js14
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/stat/stat_header.js27
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/stat/stat_mixed_storage_engine.js45
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/stat/stat_rowcount.js56
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/top/mongotop_json.js44
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/top/mongotop_reports.js151
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/top/mongotop_sharded.js47
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/top/mongotop_stress.js56
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/top/mongotop_validation.js46
-rw-r--r--src/mongo/gotools/test/qa-tests/jstests/top/util/mongotop_common.js25
-rw-r--r--src/mongo/gotools/vendor/src/github.com/10gen/llmgo/socket.go8
653 files changed, 446 insertions, 43933 deletions
diff --git a/src/mongo/gotools/common.yml b/src/mongo/gotools/common.yml
index f5d14936b6b..b43b70980e1 100644
--- a/src/mongo/gotools/common.yml
+++ b/src/mongo/gotools/common.yml
@@ -21,8 +21,11 @@ mongo_tools_variables:
- name: legacy26
- name: legacy24
- name: qa-tests
- - name: qa-dump-restore-archiving
+ - name: qa-tests-3.2
- name: qa-dump-restore-gzip
+ - name: qa-dump-restore-gzip-3.2
+ - name: qa-dump-restore-archiving
+ - name: qa-dump-restore-archiving-3.2
- name: unit
# disabled until BUILD-2273 is done
# - name: replay-dist
@@ -54,9 +57,12 @@ mongo_tools_variables:
- name: lint-go
- name: lint-js
- name: qa-tests
+ - name: qa-tests-3.2
- name: qa-tests-unstable
- - name: qa-dump-restore-archiving
- name: qa-dump-restore-gzip
+ - name: qa-dump-restore-gzip-3.2
+ - name: qa-dump-restore-archiving
+ - name: qa-dump-restore-archiving-3.2
- name: qa-tests-wt
- name: unit
- name: vet
@@ -146,12 +152,22 @@ mongo_tools_variables:
- name: qa-tests
distros:
- windows-64-vs2013-test
+ - name: qa-tests-3.2
+ distros:
+ - windows-64-vs2013-test
- name: qa-dump-restore-archiving
distros:
- windows-64-vs2013-test
+ - name: qa-dump-restore-archiving-3.2
+ distros:
+ - windows-64-vs2013-test
- name: qa-dump-restore-gzip
distros:
- windows-64-vs2013-test
+ - name: qa-dump-restore-gzip-3.2
+ distros:
+ - windows-64-vs2013-test
+ - name: qa-tests-unstable
- name: unit
windows_64_ssl_task_list: &windows_64_ssl_tasks
- name: dist
@@ -841,7 +857,7 @@ tasks:
value: "${args} -test.types=db"
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "start mongod"
- func: "wait for mongod to be ready"
- func: "setup integration test"
@@ -925,7 +941,7 @@ tasks:
value: "${args} -test.types=${integration_test_args}"
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "start mongod"
- func: "wait for mongod to be ready"
- func: "run tool integration tests"
@@ -949,7 +965,7 @@ tasks:
value: "db.createUser({ user: '${auth_username}', pwd: '${auth_password}', roles: [{ role: '__system', db: 'admin' }] });"
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "start mongod"
- func: "wait for mongod to be ready"
- func: "run tool integration tests"
@@ -1193,7 +1209,7 @@ tasks:
- func: "setup credentials"
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "fetch tool"
vars:
tool: mongoimport
@@ -1226,6 +1242,48 @@ tasks:
resmoke_suite: "core${resmoke_use_ssl}"
excludes: "requires_unstable,${excludes}"
+- name: qa-tests-3.2
+ depends_on:
+ - name: dist
+ commands:
+ - func: "fetch source"
+ - func: "get buildnumber"
+ - func: "setup credentials"
+ - func: "download mongod"
+ vars:
+ mongo_version: "3.2"
+ - func: "fetch tool"
+ vars:
+ tool: mongoimport
+ - func: "fetch tool"
+ vars:
+ tool: mongoexport
+ - func: "fetch tool"
+ vars:
+ tool: mongodump
+ - func: "fetch tool"
+ vars:
+ tool: mongorestore
+ - func: "fetch tool"
+ vars:
+ tool: mongostat
+ - func: "fetch tool"
+ vars:
+ tool: mongotop
+ - func: "fetch tool"
+ vars:
+ tool: mongooplog
+ - func: "fetch tool"
+ vars:
+ tool: mongofiles
+ - func: "fetch tool"
+ vars:
+ tool: bsondump
+ - func: "run qa-tests"
+ vars:
+ resmoke_suite: "core${resmoke_use_ssl}"
+ excludes: "requires_unstable,requires_mongo_34,${excludes}"
+
- name: native-cert-ssl
depends_on:
- name: dist
@@ -1288,7 +1346,7 @@ tasks:
- func: "setup credentials"
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "fetch tool"
vars:
tool: mongodump
@@ -1300,7 +1358,7 @@ tasks:
resmoke_suite: "restore_archive"
excludes: "requires_unstable,${excludes}"
-- name: qa-dump-restore-gzip
+- name: qa-dump-restore-archiving-3.2
depends_on:
- name: dist
commands:
@@ -1318,10 +1376,31 @@ tasks:
tool: mongorestore
- func: "run qa-tests"
vars:
+ resmoke_suite: "restore_archive"
+ excludes: "requires_unstable,requires_mongo_34,${excludes}"
+
+- name: qa-dump-restore-gzip
+ depends_on:
+ - name: dist
+ commands:
+ - func: "fetch source"
+ - func: "get buildnumber"
+ - func: "setup credentials"
+ - func: "download mongod"
+ vars:
+ mongo_version: "3.4"
+ - func: "fetch tool"
+ vars:
+ tool: mongodump
+ - func: "fetch tool"
+ vars:
+ tool: mongorestore
+ - func: "run qa-tests"
+ vars:
resmoke_suite: "restore_gzip"
excludes: "requires_unstable,${excludes}"
-- name: qa-tests-wt
+- name: qa-dump-restore-gzip-3.2
depends_on:
- name: dist
commands:
@@ -1333,6 +1412,27 @@ tasks:
mongo_version: "3.2"
- func: "fetch tool"
vars:
+ tool: mongodump
+ - func: "fetch tool"
+ vars:
+ tool: mongorestore
+ - func: "run qa-tests"
+ vars:
+ resmoke_suite: "restore_gzip"
+ excludes: "requires_unstable,requires_mongo_34,${excludes}"
+
+- name: qa-tests-wt
+ depends_on:
+ - name: dist
+ commands:
+ - func: "fetch source"
+ - func: "get buildnumber"
+ - func: "setup credentials"
+ - func: "download mongod"
+ vars:
+ mongo_version: "3.4"
+ - func: "fetch tool"
+ vars:
tool: mongoimport
- func: "fetch tool"
vars:
@@ -1452,7 +1552,7 @@ tasks:
tool: mongoreplay
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "start mongod"
- func: "wait for mongod to be ready"
- command: shell.exec
@@ -1482,7 +1582,7 @@ tasks:
pcapFname: getmore_single_channel.pcap
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "start mongod"
- func: "wait for mongod to be ready"
- func: "run go_test"
@@ -1507,7 +1607,7 @@ tasks:
pcapFname: getmore_single_channel.pcap
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "create sharded_cluster"
- func: "run go_test"
vars:
@@ -1531,7 +1631,7 @@ tasks:
pcapFname: getmore_single_channel.pcap
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "start mongod"
vars:
additional_args: --auth
@@ -1561,7 +1661,7 @@ tasks:
pcapFname: getmore_single_channel.pcap
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "create repl_set"
vars:
mongod_port: ${mongod_port}
@@ -1581,7 +1681,7 @@ tasks:
tool: mongoreplay
- func: "download mongod"
vars:
- mongo_version: "3.2"
+ mongo_version: "3.4"
- func: "fetch ftdc"
- command: shell.exec
params:
@@ -1788,21 +1888,6 @@ buildvariants:
#######################################
# Windows Buildvariants #
#######################################
-- name: windows-32
- display_name: Windows 32-bit
- run_on:
- - windows-32
- expansions:
- <<: *mongod_win32_startup_args
- <<: *mongo_default_startup_args
- mongo_target: "windows_i686"
- mongo_arch: "i386"
- extension: .exe
- preproc_gpm: "perl -pi -e 's/\\r\\n/\\n/g' "
- excludes: requires_large_ram
- integration_test_args: "integration"
- tasks: *windows_32_tasks
-
- name: windows-64
display_name: Windows 64-bit
run_on:
@@ -1812,6 +1897,7 @@ buildvariants:
<<: *mongo_default_startup_args
mongo_os: "windows-64"
mongo_target: "windows_x86_64-2008plus"
+ resmoke_args: --jobs $(grep -c ^processor /proc/cpuinfo)
excludes: requires_large_ram
extension: .exe
arch: "win32/x86_64"
@@ -1832,6 +1918,7 @@ buildvariants:
edition: ssl
smoke_use_ssl: --use-ssl
resmoke_use_ssl: _ssl
+ resmoke_args: --jobs $(grep -c ^processor /proc/cpuinfo)
excludes: requires_large_ram,requires_mongo_24
multiversion_override: "2.6"
extension: .exe
@@ -1853,6 +1940,7 @@ buildvariants:
mongo_target: "windows"
build_tags: "sasl ssl"
smoke_use_ssl: --use-ssl
+ resmoke_args: --jobs $(grep -c ^processor /proc/cpuinfo)
resmoke_use_ssl: _ssl
excludes: requires_large_ram,requires_mongo_24
multiversion_override: "2.6"
diff --git a/src/mongo/gotools/common/db/kerberos/gssapi.go b/src/mongo/gotools/common/db/kerberos/gssapi.go
index c2b93ef6fc9..def14954e1c 100644
--- a/src/mongo/gotools/common/db/kerberos/gssapi.go
+++ b/src/mongo/gotools/common/db/kerberos/gssapi.go
@@ -15,8 +15,7 @@ func AddKerberosOpts(opts options.ToolOptions, dialInfo *mgo.DialInfo) {
if dialInfo == nil {
return
}
- if opts.Kerberos == nil || opts.Kerberos.Service == "" ||
- opts.Kerberos.ServiceHost == "" {
+ if opts.Kerberos == nil {
return
}
if opts.Auth == nil || (opts.Auth.Mechanism != authMechanism &&
diff --git a/src/mongo/gotools/common/db/namespaces.go b/src/mongo/gotools/common/db/namespaces.go
index 149400543ef..908687b1c56 100644
--- a/src/mongo/gotools/common/db/namespaces.go
+++ b/src/mongo/gotools/common/db/namespaces.go
@@ -129,6 +129,7 @@ func GetCollectionOptions(coll *mgo.Collection) (*bson.D, error) {
if err != nil {
return nil, err
}
+ defer iter.Close()
comparisonName := coll.Name
if useFullName {
comparisonName = coll.FullName
@@ -143,17 +144,23 @@ func GetCollectionOptions(coll *mgo.Collection) (*bson.D, error) {
if nameStr, ok := name.(string); ok {
if nameStr == comparisonName {
// we've found the collection we're looking for
- return collInfo, nil
+ break
}
} else {
collInfo = nil
continue
}
}
- err = iter.Err()
- if err != nil {
- return nil, err
+
+ if collInfo != nil {
+ optsInterface, _ := bsonutil.FindValueByKey("options", collInfo)
+ if optsInterface != nil {
+ optsD, ok := optsInterface.(bson.D)
+ if !ok {
+ return nil, fmt.Errorf("Cannot unmarshal collection options for collection %v.%v", coll.Database, coll.Name)
+ }
+ return &optsD, nil
+ }
}
- // The given collection was not found, but no error encountered.
- return nil, nil
+ return nil, iter.Err()
}
diff --git a/src/mongo/gotools/import.data b/src/mongo/gotools/import.data
new file mode 100644
index 00000000000..bdf1375db42
--- /dev/null
+++ b/src/mongo/gotools/import.data
@@ -0,0 +1,5 @@
+{
+ "commit": "8bda55730d30c414a71dfbe6f45f5c54ef97811d",
+ "github": "mongodb/mongo-tools.git",
+ "branch": "master"
+}
diff --git a/src/mongo/gotools/mongodump/metadata_dump.go b/src/mongo/gotools/mongodump/metadata_dump.go
index e1c6d594364..a692bf06fda 100644
--- a/src/mongo/gotools/mongodump/metadata_dump.go
+++ b/src/mongo/gotools/mongodump/metadata_dump.go
@@ -52,7 +52,7 @@ func (dump *MongoDump) dumpMetadata(intent *intents.Intent, buffer resettableOut
// that list as the "indexes" field of the metadata document.
log.Logvf(log.DebugHigh, "\treading indexes for `%v`", intent.Namespace())
- session, err := dump.sessionProvider.GetSession()
+ session, err := dump.SessionProvider.GetSession()
if err != nil {
return err
}
diff --git a/src/mongo/gotools/mongodump/mongodump.go b/src/mongo/gotools/mongodump/mongodump.go
index 079ec3c737a..f869d32673d 100644
--- a/src/mongo/gotools/mongodump/mongodump.go
+++ b/src/mongo/gotools/mongodump/mongodump.go
@@ -36,10 +36,13 @@ type MongoDump struct {
InputOptions *InputOptions
OutputOptions *OutputOptions
+ // Skip dumping users and roles, regardless of namespace, when true.
+ SkipUsersAndRoles bool
+
ProgressManager progress.Manager
// useful internals that we don't directly expose as options
- sessionProvider *db.SessionProvider
+ SessionProvider *db.SessionProvider
manager *intents.Manager
query bson.M
oplogCollection string
@@ -51,8 +54,9 @@ type MongoDump struct {
// as well as the signal handler, and allows them to notify
// the intent dumpers that they should shutdown
shutdownIntentsNotifier *notifier
- // the value of stdout gets initizlied to os.Stdout if it's unset
- stdout io.Writer
+ // Writer to take care of BSON output when not writing to the local filesystem.
+ // This is initialized to os.Stdout if unset.
+ OutputWriter io.Writer
readPrefMode mgo.Mode
readPrefTags []bson.D
}
@@ -115,17 +119,17 @@ func (dump *MongoDump) Init() error {
if err != nil {
return fmt.Errorf("bad option: %v", err)
}
- if dump.stdout == nil {
- dump.stdout = os.Stdout
+ if dump.OutputWriter == nil {
+ dump.OutputWriter = os.Stdout
}
- dump.sessionProvider, err = db.NewSessionProvider(*dump.ToolOptions)
+ dump.SessionProvider, err = db.NewSessionProvider(*dump.ToolOptions)
if err != nil {
return fmt.Errorf("can't create session: %v", err)
}
// temporarily allow secondary reads for the isMongos check
- dump.sessionProvider.SetReadPreference(mgo.Nearest)
- dump.isMongos, err = dump.sessionProvider.IsMongos()
+ dump.SessionProvider.SetReadPreference(mgo.Nearest)
+ dump.isMongos, err = dump.SessionProvider.IsMongos()
if err != nil {
return err
}
@@ -148,7 +152,7 @@ func (dump *MongoDump) Init() error {
return fmt.Errorf("error parsing --readPreference : %v", err)
}
if len(tags) > 0 {
- dump.sessionProvider.SetTags(tags)
+ dump.SessionProvider.SetTags(tags)
}
}
@@ -157,9 +161,9 @@ func (dump *MongoDump) Init() error {
log.Logvf(log.Always, db.WarningNonPrimaryMongosConnection)
}
- dump.sessionProvider.SetReadPreference(mode)
- dump.sessionProvider.SetTags(tags)
- dump.sessionProvider.SetFlags(db.DisableSocketTimeout)
+ dump.SessionProvider.SetReadPreference(mode)
+ dump.SessionProvider.SetTags(tags)
+ dump.SessionProvider.SetFlags(db.DisableSocketTimeout)
// return a helpful error message for mongos --repair
if dump.OutputOptions.Repair && dump.isMongos {
@@ -172,7 +176,7 @@ func (dump *MongoDump) Init() error {
// Dump handles some final options checking and executes MongoDump.
func (dump *MongoDump) Dump() (err error) {
- defer dump.sessionProvider.Close()
+ defer dump.SessionProvider.Close()
dump.shutdownIntentsNotifier = newNotifier()
@@ -199,11 +203,11 @@ func (dump *MongoDump) Dump() (err error) {
dump.query = bson.M(asMap)
}
- if dump.OutputOptions.DumpDBUsersAndRoles {
+ if !dump.SkipUsersAndRoles && dump.OutputOptions.DumpDBUsersAndRoles {
// first make sure this is possible with the connected database
- dump.authVersion, err = auth.GetAuthVersion(dump.sessionProvider)
+ dump.authVersion, err = auth.GetAuthVersion(dump.SessionProvider)
if err == nil {
- err = auth.VerifySystemAuthVersion(dump.sessionProvider)
+ err = auth.VerifySystemAuthVersion(dump.SessionProvider)
}
if err != nil {
return fmt.Errorf("error getting auth schema version for dumpDbUsersAndRoles: %v", err)
@@ -267,7 +271,7 @@ func (dump *MongoDump) Dump() (err error) {
}
}
- if dump.OutputOptions.DumpDBUsersAndRoles && dump.ToolOptions.DB != "admin" {
+ if !dump.SkipUsersAndRoles && dump.OutputOptions.DumpDBUsersAndRoles && dump.ToolOptions.DB != "admin" {
err = dump.CreateUsersRolesVersionIntentsForDB(dump.ToolOptions.DB)
if err != nil {
return err
@@ -282,7 +286,7 @@ func (dump *MongoDump) Dump() (err error) {
}
exampleIntent := dump.manager.Peek()
if exampleIntent != nil {
- supported, err := dump.sessionProvider.SupportsRepairCursor(
+ supported, err := dump.SessionProvider.SupportsRepairCursor(
exampleIntent.DB, exampleIntent.C)
if !supported {
return err // no extra context needed
@@ -302,7 +306,7 @@ func (dump *MongoDump) Dump() (err error) {
}
if dump.OutputOptions.Archive != "" {
- session, err := dump.sessionProvider.GetSession()
+ session, err := dump.SessionProvider.GetSession()
if err != nil {
return err
}
@@ -330,20 +334,22 @@ func (dump *MongoDump) Dump() (err error) {
return fmt.Errorf("error dumping system indexes: %v", err)
}
- if dump.ToolOptions.DB == "admin" || dump.ToolOptions.DB == "" {
- err = dump.DumpUsersAndRoles()
- if err != nil {
- return fmt.Errorf("error dumping users and roles: %v", err)
- }
- }
- if dump.OutputOptions.DumpDBUsersAndRoles {
- log.Logvf(log.Always, "dumping users and roles for %v", dump.ToolOptions.DB)
- if dump.ToolOptions.DB == "admin" {
- log.Logvf(log.Always, "skipping users/roles dump, already dumped admin database")
- } else {
- err = dump.DumpUsersAndRolesForDB(dump.ToolOptions.DB)
+ if !dump.SkipUsersAndRoles {
+ if dump.ToolOptions.DB == "admin" || dump.ToolOptions.DB == "" {
+ err = dump.DumpUsersAndRoles()
if err != nil {
- return fmt.Errorf("error dumping users and roles for db: %v", err)
+ return fmt.Errorf("error dumping users and roles: %v", err)
+ }
+ }
+ if dump.OutputOptions.DumpDBUsersAndRoles {
+ log.Logvf(log.Always, "dumping users and roles for %v", dump.ToolOptions.DB)
+ if dump.ToolOptions.DB == "admin" {
+ log.Logvf(log.Always, "skipping users/roles dump, already dumped admin database")
+ } else {
+ err = dump.DumpUsersAndRolesForDB(dump.ToolOptions.DB)
+ if err != nil {
+ return fmt.Errorf("error dumping users and roles: %v", err)
+ }
}
}
}
@@ -504,7 +510,7 @@ func (dump *MongoDump) DumpIntents() error {
// DumpIntent dumps the specified database's collection.
func (dump *MongoDump) DumpIntent(intent *intents.Intent, buffer resettableOutputBuffer) error {
- session, err := dump.sessionProvider.GetSession()
+ session, err := dump.SessionProvider.GetSession()
if err != nil {
return err
}
@@ -686,7 +692,7 @@ func (dump *MongoDump) dumpIterToWriter(
// DumpUsersAndRolesForDB queries and dumps the users and roles tied to the given
// database. Only works with an authentication schema version >= 3.
func (dump *MongoDump) DumpUsersAndRolesForDB(db string) error {
- session, err := dump.sessionProvider.GetSession()
+ session, err := dump.SessionProvider.GetSession()
buffer := dump.getResettableOutputBuffer()
if err != nil {
return err
@@ -782,7 +788,7 @@ func (*nopCloseWriter) Close() error {
func (dump *MongoDump) getArchiveOut() (out io.WriteCloser, err error) {
if dump.OutputOptions.Archive == "-" {
- out = &nopCloseWriter{dump.stdout}
+ out = &nopCloseWriter{dump.OutputWriter}
} else {
targetStat, err := os.Stat(dump.OutputOptions.Archive)
if err == nil && targetStat.IsDir() {
diff --git a/src/mongo/gotools/mongodump/mongodump_test.go b/src/mongo/gotools/mongodump/mongodump_test.go
index 92a209ca5f8..9efa6bd7119 100644
--- a/src/mongo/gotools/mongodump/mongodump_test.go
+++ b/src/mongo/gotools/mongodump/mongodump_test.go
@@ -3,6 +3,13 @@ package mongodump
import (
"bytes"
"fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "testing"
+
"github.com/mongodb/mongo-tools/common/bsonutil"
"github.com/mongodb/mongo-tools/common/db"
"github.com/mongodb/mongo-tools/common/json"
@@ -13,12 +20,6 @@ import (
. "github.com/smartystreets/goconvey/convey"
"gopkg.in/mgo.v2"
"gopkg.in/mgo.v2/bson"
- "io/ioutil"
- "os"
- "path/filepath"
- "regexp"
- "strings"
- "testing"
)
var (
@@ -434,7 +435,7 @@ func TestMongoDumpBSON(t *testing.T) {
Convey("it dumps to standard output", func() {
md.OutputOptions.Out = "-"
stdoutBuf := &bytes.Buffer{}
- md.stdout = stdoutBuf
+ md.OutputWriter = stdoutBuf
err = md.Dump()
So(err, ShouldBeNil)
var count int
diff --git a/src/mongo/gotools/mongodump/oplog_dump.go b/src/mongo/gotools/mongodump/oplog_dump.go
index b0800ff4318..ab7492bafd3 100644
--- a/src/mongo/gotools/mongodump/oplog_dump.go
+++ b/src/mongo/gotools/mongodump/oplog_dump.go
@@ -2,6 +2,7 @@ package mongodump
import (
"fmt"
+
"github.com/mongodb/mongo-tools/common/db"
"github.com/mongodb/mongo-tools/common/log"
"github.com/mongodb/mongo-tools/common/util"
@@ -12,7 +13,7 @@ import (
// the name of the oplog collection in the connected db
func (dump *MongoDump) determineOplogCollectionName() error {
masterDoc := bson.M{}
- err := dump.sessionProvider.Run("isMaster", &masterDoc, "admin")
+ err := dump.SessionProvider.Run("isMaster", &masterDoc, "admin")
if err != nil {
return fmt.Errorf("error running command: %v", err)
}
@@ -38,7 +39,7 @@ func (dump *MongoDump) determineOplogCollectionName() error {
func (dump *MongoDump) getOplogStartTime() (bson.MongoTimestamp, error) {
mostRecentOplogEntry := db.Oplog{}
- err := dump.sessionProvider.FindOne("local", dump.oplogCollection, 0, nil, []string{"-$natural"}, &mostRecentOplogEntry, 0)
+ err := dump.SessionProvider.FindOne("local", dump.oplogCollection, 0, nil, []string{"-$natural"}, &mostRecentOplogEntry, 0)
if err != nil {
return 0, err
}
@@ -51,7 +52,7 @@ func (dump *MongoDump) getOplogStartTime() (bson.MongoTimestamp, error) {
// captured at the start of the dump.
func (dump *MongoDump) checkOplogTimestampExists(ts bson.MongoTimestamp) (bool, error) {
oldestOplogEntry := db.Oplog{}
- err := dump.sessionProvider.FindOne("local", dump.oplogCollection, 0, nil, []string{"+$natural"}, &oldestOplogEntry, 0)
+ err := dump.SessionProvider.FindOne("local", dump.oplogCollection, 0, nil, []string{"+$natural"}, &oldestOplogEntry, 0)
if err != nil {
return false, fmt.Errorf("unable to read entry from oplog: %v", err)
}
@@ -68,7 +69,7 @@ func (dump *MongoDump) checkOplogTimestampExists(ts bson.MongoTimestamp) (bool,
// DumpOplogAfterTimestamp takes a timestamp and writer and dumps all oplog entries after
// the given timestamp to the writer. Returns any errors that occur.
func (dump *MongoDump) DumpOplogAfterTimestamp(ts bson.MongoTimestamp) error {
- session, err := dump.sessionProvider.GetSession()
+ session, err := dump.SessionProvider.GetSession()
if err != nil {
return err
}
diff --git a/src/mongo/gotools/mongodump/prepare.go b/src/mongo/gotools/mongodump/prepare.go
index ff36bdfb02a..81a258e73eb 100644
--- a/src/mongo/gotools/mongodump/prepare.go
+++ b/src/mongo/gotools/mongodump/prepare.go
@@ -3,16 +3,16 @@ package mongodump
import (
"bytes"
"fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+
"github.com/mongodb/mongo-tools/common/archive"
- "github.com/mongodb/mongo-tools/common/bsonutil"
"github.com/mongodb/mongo-tools/common/db"
"github.com/mongodb/mongo-tools/common/intents"
"github.com/mongodb/mongo-tools/common/log"
"gopkg.in/mgo.v2/bson"
- "io"
- "os"
- "path/filepath"
- "strings"
)
type NilPos struct{}
@@ -186,7 +186,7 @@ func (dump *MongoDump) NewIntent(dbName, colName string) (*intents.Intent, error
C: colName,
}
if dump.OutputOptions.Out == "-" {
- intent.BSONFile = &stdoutFile{Writer: dump.stdout}
+ intent.BSONFile = &stdoutFile{Writer: dump.OutputWriter}
} else {
if dump.OutputOptions.Archive != "" {
intent.BSONFile = &archive.MuxIn{Intent: intent, Mux: dump.archive.Mux}
@@ -213,7 +213,7 @@ func (dump *MongoDump) NewIntent(dbName, colName string) (*intents.Intent, error
}
// get a document count for scheduling purposes
- session, err := dump.sessionProvider.GetSession()
+ session, err := dump.SessionProvider.GetSession()
if err != nil {
return nil, err
}
@@ -295,29 +295,17 @@ func (dump *MongoDump) CreateCollectionIntent(dbName, colName string) error {
return err
}
- session, err := dump.sessionProvider.GetSession()
+ session, err := dump.SessionProvider.GetSession()
if err != nil {
return err
}
defer session.Close()
- opts, err := db.GetCollectionOptions(session.DB(dbName).C(colName))
+ intent.Options, err = db.GetCollectionOptions(session.DB(dbName).C(colName))
if err != nil {
return fmt.Errorf("error getting collection options: %v", err)
}
- intent.Options = nil
- if opts != nil {
- optsInterface, _ := bsonutil.FindValueByKey("options", opts)
- if optsInterface != nil {
- if optsD, ok := optsInterface.(bson.D); ok {
- intent.Options = &optsD
- } else {
- return fmt.Errorf("Failed to parse collection options as bson.D")
- }
- }
- }
-
dump.manager.Put(intent)
log.Logvf(log.DebugLow, "enqueued collection '%v'", intent.Namespace())
@@ -360,7 +348,7 @@ func (dump *MongoDump) createIntentFromOptions(dbName string, ci *collectionInfo
func (dump *MongoDump) CreateIntentsForDatabase(dbName string) error {
// we must ensure folders for empty databases are still created, for legacy purposes
- session, err := dump.sessionProvider.GetSession()
+ session, err := dump.SessionProvider.GetSession()
if err != nil {
return err
}
@@ -403,7 +391,7 @@ func (dump *MongoDump) CreateIntentsForDatabase(dbName string) error {
// CreateAllIntents iterates through all dbs and collections and builds
// dump intents for each collection.
func (dump *MongoDump) CreateAllIntents() error {
- dbs, err := dump.sessionProvider.DatabaseNames()
+ dbs, err := dump.SessionProvider.DatabaseNames()
if err != nil {
return fmt.Errorf("error getting database names: %v", err)
}
diff --git a/src/mongo/gotools/mongoexport/main/mongoexport.go b/src/mongo/gotools/mongoexport/main/mongoexport.go
index c2580621393..d7c7dba3e06 100644
--- a/src/mongo/gotools/mongoexport/main/mongoexport.go
+++ b/src/mongo/gotools/mongoexport/main/mongoexport.go
@@ -62,6 +62,10 @@ func main() {
opts.ReplicaSetName = setName
provider, err := db.NewSessionProvider(*opts)
+ if err != nil {
+ log.Logvf(log.Always, "%v", err)
+ os.Exit(util.ExitError)
+ }
defer provider.Close()
// temporarily allow secondary reads for the isMongos check
diff --git a/src/mongo/gotools/mongoexport/mongoexport.go b/src/mongo/gotools/mongoexport/mongoexport.go
index 6b4e96c68f1..4feab4894d2 100644
--- a/src/mongo/gotools/mongoexport/mongoexport.go
+++ b/src/mongo/gotools/mongoexport/mongoexport.go
@@ -233,16 +233,31 @@ func (exp *MongoExport) getCursor() (*mgo.Iter, *mgo.Session, error) {
}
}
- flags := 0
- if len(query) == 0 && exp.InputOpts != nil &&
- exp.InputOpts.ForceTableScan != true && exp.InputOpts.Sort == "" {
- flags = flags | db.Snapshot
+ session, err := exp.SessionProvider.GetSession()
+ if err != nil {
+ return nil, nil, err
}
+ collection := session.DB(exp.ToolOptions.Namespace.DB).C(exp.ToolOptions.Namespace.Collection)
- session, err := exp.SessionProvider.GetSession()
+ // figure out if we're exporting a view
+ isView := false
+ opts, err := db.GetCollectionOptions(collection)
if err != nil {
return nil, nil, err
}
+ if opts != nil {
+ viewOn, _ := bsonutil.FindValueByKey("viewOn", opts)
+ if viewOn != nil {
+ isView = true
+ }
+ }
+
+ flags := 0
+ // don't snapshot if we've been asked not to,
+ // or if we cannot because we are querying, sorting, or if the collection is a view
+ if !exp.InputOpts.ForceTableScan && len(query) == 0 && exp.InputOpts != nil && exp.InputOpts.Sort == "" && !isView {
+ flags = flags | db.Snapshot
+ }
skip := 0
if exp.InputOpts != nil {
@@ -265,9 +280,7 @@ func (exp *MongoExport) getCursor() (*mgo.Iter, *mgo.Session, error) {
}
// build the query
- q := session.DB(exp.ToolOptions.Namespace.DB).
- C(exp.ToolOptions.Namespace.Collection).Find(query).Sort(sortFields...).
- Skip(skip).Limit(limit)
+ q := collection.Find(query).Sort(sortFields...).Skip(skip).Limit(limit)
if len(exp.OutputOpts.Fields) > 0 {
q.Select(makeFieldSelector(exp.OutputOpts.Fields))
diff --git a/src/mongo/gotools/mongofiles/main/mongofiles.go b/src/mongo/gotools/mongofiles/main/mongofiles.go
index 1227fcc2f43..028dc940302 100644
--- a/src/mongo/gotools/mongofiles/main/mongofiles.go
+++ b/src/mongo/gotools/mongofiles/main/mongofiles.go
@@ -50,11 +50,11 @@ func main() {
// create a session provider to connect to the db
provider, err := db.NewSessionProvider(*opts)
- defer provider.Close()
if err != nil {
log.Logvf(log.Always, "error connecting to host: %v", err)
os.Exit(util.ExitError)
}
+ defer provider.Close()
mf := mongofiles.MongoFiles{
ToolOptions: opts,
StorageOptions: storageOpts,
diff --git a/src/mongo/gotools/mongoimport/main/mongoimport.go b/src/mongo/gotools/mongoimport/main/mongoimport.go
index 2a0e92aa5b5..837e6d0ac26 100644
--- a/src/mongo/gotools/mongoimport/main/mongoimport.go
+++ b/src/mongo/gotools/mongoimport/main/mongoimport.go
@@ -50,11 +50,11 @@ func main() {
// create a session provider to connect to the db
sessionProvider, err := db.NewSessionProvider(*opts)
- defer sessionProvider.Close()
if err != nil {
log.Logvf(log.Always, "error connecting to host: %v", err)
os.Exit(util.ExitError)
}
+ defer sessionProvider.Close()
sessionProvider.SetBypassDocumentValidation(ingestOpts.BypassDocumentValidation)
m := mongoimport.MongoImport{
diff --git a/src/mongo/gotools/mongoimport/mongoimport.go b/src/mongo/gotools/mongoimport/mongoimport.go
index eff9770a2de..7fcdf91a990 100644
--- a/src/mongo/gotools/mongoimport/mongoimport.go
+++ b/src/mongo/gotools/mongoimport/mongoimport.go
@@ -288,7 +288,7 @@ type fileSizeProgressor struct {
}
func (fsp *fileSizeProgressor) Progress() (int64, int64) {
- return fsp.max, fsp.sizeTracker.Size()
+ return fsp.sizeTracker.Size(), fsp.max
}
// ImportDocuments is used to write input data to the database. It returns the
diff --git a/src/mongo/gotools/mongooplog/main/mongooplog.go b/src/mongo/gotools/mongooplog/main/mongooplog.go
index d75bd8c8ec8..a9215a64c36 100644
--- a/src/mongo/gotools/mongooplog/main/mongooplog.go
+++ b/src/mongo/gotools/mongooplog/main/mongooplog.go
@@ -63,21 +63,21 @@ func main() {
// create a session provider for the destination server
sessionProviderTo, err := db.NewSessionProvider(*opts)
- defer sessionProviderTo.Close()
if err != nil {
log.Logvf(log.Always, "error connecting to destination host: %v", err)
os.Exit(util.ExitError)
}
+ defer sessionProviderTo.Close()
// create a session provider for the source server
opts.Connection.Host = sourceOpts.From
opts.Connection.Port = ""
sessionProviderFrom, err := db.NewSessionProvider(*opts)
- defer sessionProviderFrom.Close()
if err != nil {
log.Logvf(log.Always, "error connecting to source host: %v", err)
os.Exit(util.ExitError)
}
+ defer sessionProviderFrom.Close()
// initialize mongooplog
oplog := mongooplog.MongoOplog{
diff --git a/src/mongo/gotools/mongoreplay/auth_test.go b/src/mongo/gotools/mongoreplay/auth_test.go
index 8b7a0038b81..58ba964a753 100644
--- a/src/mongo/gotools/mongoreplay/auth_test.go
+++ b/src/mongo/gotools/mongoreplay/auth_test.go
@@ -29,7 +29,7 @@ func TestCommandsAgainstAuthedDBWhenAuthed(t *testing.T) {
t.Error(err)
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
context := NewExecutionContext(statCollector)
t.Logf("Beginning mongoreplay playback of generated traffic against host: %v\n", urlAuth)
err := Play(context, generator.opChan, testSpeed, urlAuth, 1, 10)
@@ -94,7 +94,7 @@ func TestCommandsAgainstAuthedDBWhenNotAuthed(t *testing.T) {
t.Error(err)
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
context := NewExecutionContext(statCollector)
err := Play(context, generator.opChan, testSpeed, urlNonAuth, 1, 10)
if err != nil {
diff --git a/src/mongo/gotools/mongoreplay/cursors.go b/src/mongo/gotools/mongoreplay/cursors.go
index a6e58357d3a..2dce931f389 100644
--- a/src/mongo/gotools/mongoreplay/cursors.go
+++ b/src/mongo/gotools/mongoreplay/cursors.go
@@ -175,7 +175,8 @@ func newPreprocessCursorManager(opChan <-chan *RecordedOp) (*preprocessCursorMan
if op.RawOp.Header.OpCode == OpCodeCommand {
commandName, err := getCommandName(&op.RawOp)
if err != nil {
- return nil, err
+ userInfoLogger.Logvf(DebugLow, "preprocessing op no command name: %v", err)
+ continue
}
if commandName != "getMore" && commandName != "getmore" {
continue
@@ -184,7 +185,8 @@ func newPreprocessCursorManager(opChan <-chan *RecordedOp) (*preprocessCursorMan
parsedOp, err := op.RawOp.Parse()
if err != nil {
- return nil, err
+ userInfoLogger.Logvf(DebugLow, "preprocessing op parse error: %v", err)
+ continue
}
switch castOp := parsedOp.(type) {
@@ -194,7 +196,8 @@ func newPreprocessCursorManager(opChan <-chan *RecordedOp) (*preprocessCursorMan
// cursor
cursorIDs, err := castOp.getCursorIDs()
if err != nil {
- return nil, err
+ userInfoLogger.Logvf(DebugLow, "preprocessing op no cursorId: %v", err)
+ continue
}
for _, cursorID := range cursorIDs {
if cursorID == 0 {
@@ -209,7 +212,8 @@ func newPreprocessCursorManager(opChan <-chan *RecordedOp) (*preprocessCursorMan
// cursor id.
cursorID, err := castOp.getCursorID()
if err != nil {
- return nil, err
+ userInfoLogger.Logvf(DebugLow, "preprocessing op no cursorId: %v", err)
+ continue
}
if cursorID == 0 {
continue
diff --git a/src/mongo/gotools/mongoreplay/execute.go b/src/mongo/gotools/mongoreplay/execute.go
index dce06fbb2f6..ff3e58104e4 100644
--- a/src/mongo/gotools/mongoreplay/execute.go
+++ b/src/mongo/gotools/mongoreplay/execute.go
@@ -150,10 +150,10 @@ func (context *ExecutionContext) newExecutionSession(url string, start time.Time
var connected bool
time.Sleep(start.Add(-5 * time.Second).Sub(now)) // Sleep until five seconds before the start time
session, err := mgo.Dial(url)
- defer session.Close()
if err == nil {
userInfoLogger.Logvf(Info, "(Connection %v) New connection CREATED.", connectionNum)
connected = true
+ defer session.Close()
} else {
userInfoLogger.Logvf(Info, "(Connection %v) New Connection FAILED: %v", connectionNum, err)
}
diff --git a/src/mongo/gotools/mongoreplay/mongoreplay_test.go b/src/mongo/gotools/mongoreplay/mongoreplay_test.go
index 5b54ebda216..8df441765e2 100644
--- a/src/mongo/gotools/mongoreplay/mongoreplay_test.go
+++ b/src/mongo/gotools/mongoreplay/mongoreplay_test.go
@@ -559,6 +559,16 @@ func TestShortenLegacyReply(t *testing.T) {
}
}
+type cursorDoc struct {
+ Batch []interface{} `bson:"firstBatch"`
+ Id int64 `bson:"id"`
+ Ns string `bson:"ns"`
+}
+type findReply struct {
+ Cursor cursorDoc `bson:"cursor"`
+ Ok int `bson:"ok"`
+}
+
func TestShortenCommandReply(t *testing.T) {
generator := newRecordedOpGenerator()
@@ -568,11 +578,6 @@ func TestShortenCommandReply(t *testing.T) {
DocumentNumber: 100000,
Success: true,
}
- op.CommandReply = &testDoc{
- Name: "Command Reply",
- DocumentNumber: 200000,
- Success: true,
- }
doc1 := testDoc{
Name: "Op Raw Short Reply Test 1",
@@ -584,14 +589,21 @@ func TestShortenCommandReply(t *testing.T) {
DocumentNumber: 2,
Success: true,
}
- op.OutputDocs = []interface{}{doc1, doc2}
+
+ batch := []interface{}{doc1, doc2}
+
+ cursorDocIn := cursorDoc{
+ batch, 12345678, "test"}
+
+ op.CommandReply = findReply{cursorDocIn, 1}
+ op.OutputDocs = []interface{}{}
result, err := generator.fetchRecordedOpsFromConn(&op.CommandReplyOp)
// reply should be functional and parseable
parsed, err := result.RawOp.Parse()
if err != nil {
- t.Errorf("error parsing op: %v", err)
+ t.Errorf("error parsing op: %#v", err)
}
t.Logf("parsed Op: %v", parsed)
@@ -600,8 +612,23 @@ func TestShortenCommandReply(t *testing.T) {
if !ok {
t.Errorf("parsed op was wrong type")
}
- if !(len(fullReply.OutputDocs) == 2) {
- t.Errorf("parsed reply has wrong number of docs: %d", len(fullReply.OutputDocs))
+
+ commandReplyCheckRaw, ok := fullReply.CommandReply.(*bson.Raw)
+ if !ok {
+ t.Errorf("comamndReply not bson.Raw")
+ }
+
+ commandReplyCheck := &findReply{
+ Cursor: cursorDoc{},
+ }
+ err = bson.Unmarshal(commandReplyCheckRaw.Data, commandReplyCheck)
+ if err != nil {
+ t.Errorf("error unmarshaling commandReply %v", err)
+ }
+
+ // ensure that the reply now has 2 document
+ if !(len(commandReplyCheck.Cursor.Batch) == 2) {
+ t.Errorf("parsed reply has wrong number of docs: %d", len(commandReplyCheck.Cursor.Batch))
}
// shorten the reply
@@ -617,9 +644,22 @@ func TestShortenCommandReply(t *testing.T) {
t.Errorf("parsed op was wrong type")
}
- // ensure that the reply now has only 1 document
- if !(len(fullReply.OutputDocs) == 1) {
- t.Errorf("parsed reply has wrong number of docs: %d", len(fullReply.OutputDocs))
+ commandReplyRaw, ok := fullReply.CommandReply.(*bson.Raw)
+ if !ok {
+ t.Errorf("comamndReply not bson.Raw")
+ }
+
+ commandReplyOut := &findReply{
+ Cursor: cursorDoc{},
+ }
+ err = bson.Unmarshal(commandReplyRaw.Data, commandReplyOut)
+ if err != nil {
+ t.Errorf("error unmarshaling commandReply %v", err)
+ }
+
+ // ensure that the reply now has 0 documents
+ if !(len(commandReplyOut.Cursor.Batch) == 0) {
+ t.Errorf("parsed reply has wrong number of docs: %d", len(commandReplyOut.Cursor.Batch))
}
}
diff --git a/src/mongo/gotools/mongoreplay/monitor.go b/src/mongo/gotools/mongoreplay/monitor.go
index 502dc27ca7e..f79578e70c0 100644
--- a/src/mongo/gotools/mongoreplay/monitor.go
+++ b/src/mongo/gotools/mongoreplay/monitor.go
@@ -14,6 +14,7 @@ type MonitorCommand struct {
GlobalOpts *Options `no-flag:"true"`
StatOptions
OpStreamSettings
+ Collect string `long:"collect" description:"Stat collection format; 'format' option uses the --format string" choice:"json" choice:"format" choice:"none" default:"format"`
PairedMode bool `long:"paired" description:"Output only one line for a request/reply pair"`
Gzip bool `long:"gzip" description:"decompress gzipped input"`
PlaybackFile string `short:"p" description:"path to playback file to read from" long:"playback-file"`
@@ -125,7 +126,7 @@ func (monitor *MonitorCommand) Execute(args []string) error {
ctx.packetHandler.Close()
}()
}
- statColl, err := newStatCollector(monitor.StatOptions, monitor.PairedMode, false)
+ statColl, err := newStatCollector(monitor.StatOptions, monitor.Collect, monitor.PairedMode, false)
if err != nil {
return err
}
diff --git a/src/mongo/gotools/mongoreplay/pcap_test.go b/src/mongo/gotools/mongoreplay/pcap_test.go
index 97334a43989..185e9673f1e 100644
--- a/src/mongo/gotools/mongoreplay/pcap_test.go
+++ b/src/mongo/gotools/mongoreplay/pcap_test.go
@@ -165,7 +165,7 @@ func pcapTestHelper(t *testing.T, pcapFname string, preprocess bool, verifier ve
t.Errorf("error opening playback file to write: %v\n", err)
}
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
statRec := statCollector.StatRecorder.(*BufferedStatRecorder)
context := NewExecutionContext(statCollector)
diff --git a/src/mongo/gotools/mongoreplay/play.go b/src/mongo/gotools/mongoreplay/play.go
index 220ee2b6664..baee4db33dd 100644
--- a/src/mongo/gotools/mongoreplay/play.go
+++ b/src/mongo/gotools/mongoreplay/play.go
@@ -21,6 +21,7 @@ type PlayCommand struct {
QueueTime int `long:"queueTime" description:"don't queue ops much further in the future than this number of seconds" default:"15"`
NoPreprocess bool `long:"no-preprocess" description:"don't preprocess the input file to premap data such as mongo cursorIDs"`
Gzip bool `long:"gzip" description:"decompress gzipped input"`
+ Collect string `long:"collect" description:"Stat collection format; 'format' option uses the --format string" choice:"json" choice:"format" choice:"none" default:"none"`
}
const queueGranularity = 1000
@@ -179,7 +180,7 @@ func (play *PlayCommand) Execute(args []string) error {
}
play.GlobalOpts.SetLogging()
- statColl, err := newStatCollector(play.StatOptions, true, true)
+ statColl, err := newStatCollector(play.StatOptions, play.Collect, true, true)
if err != nil {
return err
}
diff --git a/src/mongo/gotools/mongoreplay/play_livedb_test.go b/src/mongo/gotools/mongoreplay/play_livedb_test.go
index 9f5e5c7e9e7..3f1dc3473c8 100644
--- a/src/mongo/gotools/mongoreplay/play_livedb_test.go
+++ b/src/mongo/gotools/mongoreplay/play_livedb_test.go
@@ -29,7 +29,6 @@ var (
authTestServerMode bool
isMongosTestServer bool
testCollectorOpts = StatOptions{
- Collect: "format",
Buffered: true,
}
)
@@ -181,7 +180,7 @@ func TestOpInsertLiveDB(t *testing.T) {
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
statRec := statCollector.StatRecorder.(*BufferedStatRecorder)
context := NewExecutionContext(statCollector)
@@ -297,7 +296,7 @@ func TestUpdateOpLiveDB(t *testing.T) {
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
statRec := statCollector.StatRecorder.(*BufferedStatRecorder)
context := NewExecutionContext(statCollector)
@@ -400,7 +399,7 @@ func TestQueryOpLiveDB(t *testing.T) {
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
statRec := statCollector.StatRecorder.(*BufferedStatRecorder)
context := NewExecutionContext(statCollector)
@@ -488,7 +487,7 @@ func TestOpGetMoreLiveDB(t *testing.T) {
}
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
statRec := statCollector.StatRecorder.(*BufferedStatRecorder)
context := NewExecutionContext(statCollector)
@@ -589,7 +588,7 @@ func TestOpGetMoreMultiCursorLiveDB(t *testing.T) {
}
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
statRec := statCollector.StatRecorder.(*BufferedStatRecorder)
context := NewExecutionContext(statCollector)
@@ -710,7 +709,7 @@ func TestOpKillCursorsLiveDB(t *testing.T) {
t.Error(err)
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
statRec := statCollector.StatRecorder.(*BufferedStatRecorder)
context := NewExecutionContext(statCollector)
@@ -776,7 +775,7 @@ func TestCommandOpInsertLiveDB(t *testing.T) {
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
statRec := statCollector.StatRecorder.(*BufferedStatRecorder)
context := NewExecutionContext(statCollector)
@@ -876,7 +875,7 @@ func TestCommandOpFindLiveDB(t *testing.T) {
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
statRec := statCollector.StatRecorder.(*BufferedStatRecorder)
context := NewExecutionContext(statCollector)
@@ -968,7 +967,7 @@ func TestCommandOpGetMoreLiveDB(t *testing.T) {
}
}
}()
- statCollector, _ := newStatCollector(testCollectorOpts, true, true)
+ statCollector, _ := newStatCollector(testCollectorOpts, "format", true, true)
statRec := statCollector.StatRecorder.(*BufferedStatRecorder)
context := NewExecutionContext(statCollector)
@@ -1118,9 +1117,9 @@ func (generator *recordedOpGenerator) generateCommandFind(filter interface{}, li
func (generator *recordedOpGenerator) generateCommandGetMore(cursorID int64, limit int32) error {
var getmoreArgs bson.D
if limit > 0 {
- getmoreArgs = bson.D{{"collection", testCollection}, {"getMore", cursorID}, {"batchSize", limit}}
+ getmoreArgs = bson.D{{"getMore", cursorID}, {"collection", testCollection}, {"batchSize", limit}}
} else {
- getmoreArgs = bson.D{{"collection", testCollection}, {"getMore", cursorID}}
+ getmoreArgs = bson.D{{"getMore", cursorID}, {"collection", testCollection}}
}
return generator.generateCommandOp("getMore", getmoreArgs, 0)
diff --git a/src/mongo/gotools/mongoreplay/raw_op.go b/src/mongo/gotools/mongoreplay/raw_op.go
index bbe6871aa9e..2ab0c890cd0 100644
--- a/src/mongo/gotools/mongoreplay/raw_op.go
+++ b/src/mongo/gotools/mongoreplay/raw_op.go
@@ -6,8 +6,11 @@ import (
"io"
mgo "github.com/10gen/llmgo"
+ "github.com/10gen/llmgo/bson"
)
+const maxBSONSize = 16 * 1024 * 1024 // 16MB - maximum BSON document size
+
// RawOp may be exactly the same as OpUnknown.
type RawOp struct {
Header MsgHeader
@@ -49,6 +52,16 @@ func (op *RawOp) FromReader(r io.Reader) error {
return err
}
+type CommandReplyStruct struct {
+ Cursor struct {
+ Id int64 `bson:"id"`
+ Ns string `bson:"ns"`
+ FirstBatch bson.Raw `bson:"firstBatch,omitempty"`
+ NextBatch bson.Raw `bson:"nextBatch,omitempty"`
+ } `bson:"cursor"`
+ Ok int `bson:"ok"`
+}
+
// ShortReplyFromReader reads an op from the given reader. It only holds on
// to header-related information and the first document.
func (op *RawOp) ShortenReply() error {
@@ -66,18 +79,51 @@ func (op *RawOp) ShortenReply() error {
return nil
}
firstDocSize := getInt32(op.Body, 20+MsgHeaderLen)
+ if 20+MsgHeaderLen+int(firstDocSize) > len(op.Body) || firstDocSize > maxBSONSize {
+ return fmt.Errorf("the size of the first document is greater then the size of the message")
+ }
op.Body = op.Body[0:(20 + MsgHeaderLen + firstDocSize)]
case OpCodeCommandReply:
- commandReplyDocSize := getInt32(op.Body, MsgHeaderLen)
- metadataDocSize := getInt32(op.Body, int(commandReplyDocSize)+MsgHeaderLen)
- if op.Header.MessageLength <= commandReplyDocSize+metadataDocSize+MsgHeaderLen {
- //there are no reply docs
+ // unmarshal the needed fields for replacing into the buffer
+ commandReply := &CommandReplyStruct{}
+
+ err := bson.Unmarshal(op.Body[MsgHeaderLen:], commandReply)
+ if err != nil {
+ return fmt.Errorf("unmarshaling op to shorten: %v", err)
+ }
+ switch {
+ case commandReply.Cursor.FirstBatch.Data != nil:
+ commandReply.Cursor.FirstBatch.Data, _ = bson.Marshal([0]byte{})
+
+ case commandReply.Cursor.NextBatch.Data != nil:
+ commandReply.Cursor.NextBatch.Data, _ = bson.Marshal([0]byte{})
+
+ default:
+ // it's not a findReply so we don't care about it
return nil
}
- firstOutputDocSize := getInt32(op.Body, int(commandReplyDocSize+metadataDocSize)+MsgHeaderLen)
- shortReplySize := commandReplyDocSize + metadataDocSize + firstOutputDocSize + MsgHeaderLen
- op.Body = op.Body[0:shortReplySize]
+
+ out, err := bson.Marshal(commandReply)
+ if err != nil {
+ return err
+ }
+
+ // calculate the new sizes for offsets into the new buffer
+ commandReplySize := getInt32(op.Body, MsgHeaderLen)
+ newCommandReplySize := getInt32(out, 0)
+ sizeDiff := commandReplySize - newCommandReplySize
+ newSize := op.Header.MessageLength - sizeDiff
+ newBody := make([]byte, newSize)
+
+ // copy the new data into a buffer that will replace the old buffer
+ copy(newBody, op.Body[:MsgHeaderLen])
+ copy(newBody[MsgHeaderLen:], out)
+ copy(newBody[MsgHeaderLen+newCommandReplySize:], op.Body[MsgHeaderLen+commandReplySize:])
+ // update the size of this message in the headers
+ SetInt32(newBody, 0, newSize)
+ op.Header.MessageLength = newSize
+ op.Body = newBody
default:
return fmt.Errorf("unexpected op type : %v", op.Header.OpCode)
diff --git a/src/mongo/gotools/mongoreplay/record.go b/src/mongo/gotools/mongoreplay/record.go
index 670239ab3fe..3137488b923 100644
--- a/src/mongo/gotools/mongoreplay/record.go
+++ b/src/mongo/gotools/mongoreplay/record.go
@@ -51,13 +51,13 @@ func getOpstream(cfg OpStreamSettings) (*packetHandlerContext, error) {
}
} else if len(cfg.NetworkInterface) > 0 {
inactive, err := pcap.NewInactiveHandle(cfg.NetworkInterface)
- // This is safe; calling `Activate()` steals the underlying ptr.
- defer inactive.CleanUp()
if err != nil {
return nil, fmt.Errorf("error creating a pcap handle: %v", err)
}
+ // This is safe; calling `Activate()` steals the underlying ptr.
+ defer inactive.CleanUp()
- err = inactive.SetSnapLen(32*1024*1024)
+ err = inactive.SetSnapLen(64 * 1024)
if err != nil {
return nil, fmt.Errorf("error setting snaplen on pcap handle: %v", err)
}
@@ -140,7 +140,7 @@ func (record *RecordCommand) ValidateParams(args []string) error {
}
if record.OpStreamSettings.CaptureBufSize == 0 {
// default capture buffer size to 2 MiB (same as libpcap)
- record.OpStreamSettings.CaptureBufSize = 2*1024
+ record.OpStreamSettings.CaptureBufSize = 2 * 1024
}
return nil
}
@@ -169,10 +169,10 @@ func (record *RecordCommand) Execute(args []string) error {
ctx.packetHandler.Close()
}()
playbackWriter, err := NewPlaybackWriter(record.PlaybackFile, record.Gzip)
- defer playbackWriter.Close()
if err != nil {
return err
}
+ defer playbackWriter.Close()
return Record(ctx, playbackWriter, record.FullReplies)
@@ -186,23 +186,35 @@ func Record(ctx *packetHandlerContext,
ch := make(chan error)
go func() {
defer close(ch)
+ var fail error
for op := range ctx.mongoOpStream.Ops {
+ // since we don't currently have a way to shutdown packetHandler.Handle()
+ // continue to read from ctx.mongoOpStream.Ops even after a faltal error
+ if fail != nil {
+ toolDebugLogger.Logvf(DebugHigh, "not recording op because of record error %v", fail)
+ continue
+ }
if (op.Header.OpCode == OpCodeReply || op.Header.OpCode == OpCodeCommandReply) &&
!noShortenReply {
- op.ShortenReply()
+ err := op.ShortenReply()
+ if err != nil {
+ userInfoLogger.Logvf(DebugLow, "stream %v problem shortening reply: %v", op.SeenConnectionNum, err)
+ continue
+ }
}
bsonBytes, err := bson.Marshal(op)
if err != nil {
- ch <- fmt.Errorf("error marshaling message: %v", err)
- return
+ userInfoLogger.Logvf(DebugLow, "stream %v error marshaling message: %v", op.SeenConnectionNum, err)
+ continue
}
_, err = playbackWriter.Write(bsonBytes)
if err != nil {
- ch <- fmt.Errorf("error writing message: %v", err)
- return
+ fail = fmt.Errorf("error writing message: %v", err)
+ userInfoLogger.Logvf(Always, "%v", err)
+ continue
}
}
- ch <- nil
+ ch <- fail
}()
if err := ctx.packetHandler.Handle(ctx.mongoOpStream, -1); err != nil {
diff --git a/src/mongo/gotools/mongoreplay/stat_collector.go b/src/mongo/gotools/mongoreplay/stat_collector.go
index 94f133c5d20..91009fe060e 100644
--- a/src/mongo/gotools/mongoreplay/stat_collector.go
+++ b/src/mongo/gotools/mongoreplay/stat_collector.go
@@ -18,8 +18,8 @@ const TruncateLength = 350
// StatOptions stores settings for the mongoreplay subcommands which have stat
// output
type StatOptions struct {
- Collect string `long:"collect" description:"Stat collection format; 'format' option uses the --format string" choice:"json" choice:"format" choice:"none" default:"format"`
Buffered bool `hidden:"yes"`
+ BufferSize int `long:"stats-buffer-size" description:"the size (in events) of the stat collector buffer" default:"1024"`
Report string `long:"report" description:"Write report on execution to given output path"`
NoTruncate bool `long:"no-truncate" description:"Disable truncation of large payload data in log output"`
Format string `long:"format" description:"Format for terminal output, %-escaped. Arguments are provided immediately after the escape, surrounded in curly braces. Supported escapes are:\n %n namespace\n%l latency\n%t time (optional arg -- specify date layout, e.g. '%t{3:04PM}')\n%T op type\n%c command\n%o number of connections\n%i request ID\n%q request (optional arg -- dot-delimited field within the JSON structure, e.g. '%q{command_args.documents}')\n%r response (optional arg -- same as %q)\n%Q{<arg>} conditionally show <arg> on presence of request data\n%R{<arg>} conditionally show <arg> on presence of response data\nANSI escape sequences, start/end:\n%B/%b bold\n%U/%u underline\n%S/%s standout\n%F/%f text color (required arg -- word or number, 8-color)\n%K/%k background color (required arg -- same as %F/%f)\n" default:"%F{blue}%t%f %F{cyan}(Connection: %o:%i)%f %F{yellow}%l%f %F{red}%T %c%f %F{white}%n%f %F{green}%Q{Request:}%f%q %F{green}%R{Response:}%f%r"`
@@ -34,6 +34,7 @@ type StatCollector struct {
sync.Once
done chan struct{}
statStream chan *OpStat
+ statStreamSize int
StatGenerator
StatRecorder
noop bool
@@ -50,11 +51,11 @@ func (statColl *StatCollector) Close() error {
return statColl.StatRecorder.Close()
}
-func newStatCollector(opts StatOptions, isPairedMode bool, isComparative bool) (*StatCollector, error) {
+func newStatCollector(opts StatOptions, collectFormat string, isPairedMode bool, isComparative bool) (*StatCollector, error) {
if opts.Buffered {
- opts.Collect = "buffered"
+ collectFormat = "buffered"
}
- if opts.Collect == "none" {
+ if collectFormat == "none" {
return &StatCollector{noop: true}, nil
}
@@ -84,7 +85,7 @@ func newStatCollector(opts StatOptions, isPairedMode bool, isComparative bool) (
}
var statRec StatRecorder
- switch opts.Collect {
+ switch collectFormat {
case "json":
statRec = &JSONStatRecorder{
out: o,
@@ -101,9 +102,14 @@ func newStatCollector(opts StatOptions, isPairedMode bool, isComparative bool) (
}
}
+ if opts.BufferSize < 1 {
+ opts.BufferSize = 1
+ }
+
return &StatCollector{
- StatGenerator: statGen,
- StatRecorder: statRec,
+ StatGenerator: statGen,
+ StatRecorder: statRec,
+ statStreamSize: opts.BufferSize,
}, nil
}
@@ -145,7 +151,7 @@ func (statColl *StatCollector) Collect(op *RecordedOp, replayedOp Op, reply Repl
return
}
statColl.Do(func() {
- statColl.statStream = make(chan *OpStat, 1024)
+ statColl.statStream = make(chan *OpStat, statColl.statStreamSize)
statColl.done = make(chan struct{})
go func() {
for stat := range statColl.statStream {
diff --git a/src/mongo/gotools/mongorestore/filepath.go b/src/mongo/gotools/mongorestore/filepath.go
index c3d434f68a0..af56f0e74e4 100644
--- a/src/mongo/gotools/mongorestore/filepath.go
+++ b/src/mongo/gotools/mongorestore/filepath.go
@@ -467,7 +467,7 @@ func (restore *MongoRestore) CreateStdinIntentForCollection(db string, collectio
C: collection,
Location: "-",
}
- intent.BSONFile = &stdinFile{Reader: restore.stdin}
+ intent.BSONFile = &stdinFile{Reader: restore.InputReader}
restore.manager.Put(intent)
return nil
}
diff --git a/src/mongo/gotools/mongorestore/main/mongorestore.go b/src/mongo/gotools/mongorestore/main/mongorestore.go
index d16e8a3a9a7..fa6b6d84caa 100644
--- a/src/mongo/gotools/mongorestore/main/mongorestore.go
+++ b/src/mongo/gotools/mongorestore/main/mongorestore.go
@@ -67,11 +67,11 @@ func main() {
opts.ReplicaSetName = setName
provider, err := db.NewSessionProvider(*opts)
- defer provider.Close()
if err != nil {
log.Logvf(log.Always, "error connecting to host: %v", err)
os.Exit(util.ExitError)
}
+ defer provider.Close()
provider.SetBypassDocumentValidation(outputOpts.BypassDocumentValidation)
// disable TCP timeouts for restore jobs
diff --git a/src/mongo/gotools/mongorestore/metadata.go b/src/mongo/gotools/mongorestore/metadata.go
index 2c488fc9b35..8a623f7e25e 100644
--- a/src/mongo/gotools/mongorestore/metadata.go
+++ b/src/mongo/gotools/mongorestore/metadata.go
@@ -499,6 +499,9 @@ func (restore *MongoRestore) ValidateAuthVersions() error {
// ShouldRestoreUsersAndRoles returns true if mongorestore should go through
// through the process of restoring collections pertaining to authentication.
func (restore *MongoRestore) ShouldRestoreUsersAndRoles() bool {
+ if restore.SkipUsersAndRoles {
+ return false
+ }
// If the user has done anything that would indicate the restoration
// of users and roles (i.e. used --restoreDbUsersAndRoles, -d admin, or
// is doing a full restore), then we check if users or roles BSON files
diff --git a/src/mongo/gotools/mongorestore/mongorestore.go b/src/mongo/gotools/mongorestore/mongorestore.go
index 3e96bba0b0e..ee6cf6a5599 100644
--- a/src/mongo/gotools/mongorestore/mongorestore.go
+++ b/src/mongo/gotools/mongorestore/mongorestore.go
@@ -36,6 +36,9 @@ type MongoRestore struct {
TargetDirectory string
+ // Skip restoring users and roles, regardless of namespace, when true.
+ SkipUsersAndRoles bool
+
// other internal state
manager *intents.Manager
safety *mgo.Safe
@@ -62,8 +65,9 @@ type MongoRestore struct {
// channel on which to notify if/when a termination signal is received
termChan chan struct{}
- // for testing. If set, this value will be used instead of os.Stdin
- stdin io.Reader
+ // Reader to take care of BSON input if not reading from the local filesystem.
+ // This is initialized to os.Stdin if unset.
+ InputReader io.Reader
}
type collectionIndexes map[string][]IndexDocument
@@ -225,8 +229,8 @@ func (restore *MongoRestore) ParseAndValidateOptions() error {
return fmt.Errorf("cannot restore from stdin without a specified collection")
}
}
- if restore.stdin == nil {
- restore.stdin = os.Stdin
+ if restore.InputReader == nil {
+ restore.InputReader = os.Stdin
}
return nil
@@ -488,7 +492,7 @@ func (restore *MongoRestore) Restore() error {
func (restore *MongoRestore) getArchiveReader() (rc io.ReadCloser, err error) {
if restore.InputOptions.Archive == "-" {
- rc = ioutil.NopCloser(restore.stdin)
+ rc = ioutil.NopCloser(restore.InputReader)
} else {
targetStat, err := os.Stat(restore.InputOptions.Archive)
if err != nil {
diff --git a/src/mongo/gotools/mongorestore/mongorestore_test.go b/src/mongo/gotools/mongorestore/mongorestore_test.go
index 2221645c14c..c943dad6bde 100644
--- a/src/mongo/gotools/mongorestore/mongorestore_test.go
+++ b/src/mongo/gotools/mongorestore/mongorestore_test.go
@@ -76,7 +76,7 @@ func TestMongorestore(t *testing.T) {
restore.NSOptions.Collection = "c1"
restore.NSOptions.DB = "db1"
So(err, ShouldBeNil)
- restore.stdin = bsonFile
+ restore.InputReader = bsonFile
restore.TargetDirectory = "-"
err = restore.Restore()
So(err, ShouldBeNil)
diff --git a/src/mongo/gotools/mongorestore/restore.go b/src/mongo/gotools/mongorestore/restore.go
index 81adf69e63a..c97f1844161 100644
--- a/src/mongo/gotools/mongorestore/restore.go
+++ b/src/mongo/gotools/mongorestore/restore.go
@@ -141,6 +141,30 @@ func (restore *MongoRestore) RestoreIntent(intent *intents.Intent) error {
return fmt.Errorf("error parsing metadata from %v: %v", intent.MetadataLocation, err)
}
+ // The only way to specify options on the idIndex is at collection creation time.
+ // This loop pulls out the idIndex from `indexes` and sets it in `options`.
+ for i, index := range indexes {
+ // The index with the name "_id_" will always be the idIndex.
+ if index.Options["name"].(string) == "_id_" {
+ // Remove the index version (to use the default) unless otherwise specified.
+ if !restore.OutputOptions.KeepIndexVersion {
+ delete(index.Options, "v")
+ }
+ index.Options["ns"] = intent.Namespace()
+
+ // If the collection has an idIndex, then we are about to create it, so
+ // ignore the value of autoIndexId.
+ for j, opt := range options {
+ if opt.Name == "autoIndexId" {
+ options = append(options[:j], options[j+1:]...)
+ }
+ }
+ options = append(options, bson.DocElem{"idIndex", index})
+ indexes = append(indexes[:i], indexes[i+1:]...)
+ break
+ }
+ }
+
if restore.OutputOptions.NoOptionsRestore {
log.Logv(log.Info, "not restoring collection options")
logMessageSuffix = "with no collection options"
diff --git a/src/mongo/gotools/test/legacy24/buildscripts/buildlogger.py b/src/mongo/gotools/test/legacy24/buildscripts/buildlogger.py
deleted file mode 100644
index a31b3e2dfa1..00000000000
--- a/src/mongo/gotools/test/legacy24/buildscripts/buildlogger.py
+++ /dev/null
@@ -1,480 +0,0 @@
-"""
-buildlogger.py
-
-Wrap a command (specified on the command line invocation of buildlogger.py)
-and send output in batches to the buildlogs web application via HTTP POST.
-
-The script configures itself from environment variables:
-
- required env vars:
- MONGO_BUILDER_NAME (e.g. "Nightly Linux 64-bit")
- MONGO_BUILD_NUMBER (an integer)
- MONGO_TEST_FILENAME (not required when invoked with -g)
-
- optional env vars:
- MONGO_PHASE (e.g. "core", "slow nightly", etc)
- MONGO_* (any other environment vars are passed to the web app)
- BUILDLOGGER_CREDENTIALS (see below)
-
-This script has two modes: a "test" mode, intended to wrap the invocation of
-an individual test file, and a "global" mode, intended to wrap the mongod
-instances that run throughout the duration of a mongo test phase (the logs
-from "global" invocations are displayed interspersed with the logs of each
-test, in order to let the buildlogs web app display the full output sensibly.)
-
-If the BUILDLOGGER_CREDENTIALS environment variable is set, it should be a
-path to a valid Python file containing "username" and "password" variables,
-which should be valid credentials for authenticating to the buildlogger web
-app. For example:
-
- username = "hello"
- password = "world"
-
-If BUILDLOGGER_CREDENTIALS is a relative path, then the working directory
-and the directories one, two, and three levels up, are searched, in that
-order.
-"""
-
-import functools
-import os
-import os.path
-import re
-import signal
-import socket
-import subprocess
-import sys
-import time
-import traceback
-import urllib2
-import utils
-
-# suppress deprecation warnings that happen when
-# we import the 'buildbot.tac' file below
-import warnings
-warnings.simplefilter('ignore', DeprecationWarning)
-
-try:
- import json
-except:
- try:
- import simplejson as json
- except:
- json = None
-
-# try to load the shared secret from settings.py
-# which will be one, two, or three directories up
-# from this file's location
-credentials_file = os.environ.get('BUILDLOGGER_CREDENTIALS', 'buildbot.tac')
-credentials_loc, credentials_name = os.path.split(credentials_file)
-if not credentials_loc:
- here = os.path.abspath(os.path.dirname(__file__))
- possible_paths = [
- os.path.abspath(os.path.join(here, '..')),
- os.path.abspath(os.path.join(here, '..', '..')),
- os.path.abspath(os.path.join(here, '..', '..', '..')),
- ]
-else:
- possible_paths = [credentials_loc]
-
-username, password = None, None
-for path in possible_paths:
- credentials_path = os.path.join(path, credentials_name)
- if os.path.isfile(credentials_path):
- credentials = {}
- try:
- execfile(credentials_path, credentials, credentials)
- username = credentials.get('slavename', credentials.get('username'))
- password = credentials.get('passwd', credentials.get('password'))
- break
- except:
- pass
-
-
-URL_ROOT = os.environ.get('BUILDLOGGER_URL', 'http://buildlogs.mongodb.org/')
-TIMEOUT_SECONDS = 10
-socket.setdefaulttimeout(TIMEOUT_SECONDS)
-
-digest_handler = urllib2.HTTPDigestAuthHandler()
-digest_handler.add_password(
- realm='buildlogs',
- uri=URL_ROOT,
- user=username,
- passwd=password)
-
-# This version of HTTPErrorProcessor is copied from
-# Python 2.7, and allows REST response codes (e.g.
-# "201 Created") which are treated as errors by
-# older versions.
-class HTTPErrorProcessor(urllib2.HTTPErrorProcessor):
- def http_response(self, request, response):
- code, msg, hdrs = response.code, response.msg, response.info()
-
- # According to RFC 2616, "2xx" code indicates that the client's
- # request was successfully received, understood, and accepted.
- if not (200 <= code < 300):
- response = self.parent.error(
- 'http', request, response, code, msg, hdrs)
-
- return response
-
-url_opener = urllib2.build_opener(digest_handler, HTTPErrorProcessor())
-
-def url(endpoint):
- if not endpoint.endswith('/'):
- endpoint = '%s/' % endpoint
-
- return '%s/%s' % (URL_ROOT.rstrip('/'), endpoint)
-
-def post(endpoint, data, headers=None):
- data = json.dumps(data, encoding='utf-8')
-
- headers = headers or {}
- headers.update({'Content-Type': 'application/json; charset=utf-8'})
-
- req = urllib2.Request(url=url(endpoint), data=data, headers=headers)
- try:
- response = url_opener.open(req)
- except urllib2.URLError:
- import traceback
- traceback.print_exc(file=sys.stderr)
- sys.stderr.flush()
- # indicate that the request did not succeed
- return None
-
- response_headers = dict(response.info())
-
- # eg "Content-Type: application/json; charset=utf-8"
- content_type = response_headers.get('content-type')
- match = re.match(r'(?P<mimetype>[^;]+).*(?:charset=(?P<charset>[^ ]+))?$', content_type)
- if match and match.group('mimetype') == 'application/json':
- encoding = match.group('charset') or 'utf-8'
- return json.load(response, encoding=encoding)
-
- return response.read()
-
-def traceback_to_stderr(func):
- """
- decorator which logs any exceptions encountered to stderr
- and returns none.
- """
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- try:
- return func(*args, **kwargs)
- except urllib2.HTTPError, err:
- sys.stderr.write('error: HTTP code %d\n----\n' % err.code)
- if hasattr(err, 'hdrs'):
- for k, v in err.hdrs.items():
- sys.stderr.write("%s: %s\n" % (k, v))
- sys.stderr.write('\n')
- sys.stderr.write(err.read())
- sys.stderr.write('\n----\n')
- sys.stderr.flush()
- except:
- sys.stderr.write('Traceback from buildlogger:\n')
- traceback.print_exc(file=sys.stderr)
- sys.stderr.flush()
- return None
- return wrapper
-
-
-@traceback_to_stderr
-def get_or_create_build(builder, buildnum, extra={}):
- data = {'builder': builder, 'buildnum': buildnum}
- data.update(extra)
- response = post('build', data)
- if response is None:
- return None
- return response['id']
-
-@traceback_to_stderr
-def create_test(build_id, test_filename, test_command, test_phase):
- response = post('build/%s/test' % build_id, {
- 'test_filename': test_filename,
- 'command': test_command,
- 'phase': test_phase,
- })
- if response is None:
- return None
- return response['id']
-
-@traceback_to_stderr
-def append_test_logs(build_id, test_id, log_lines):
- response = post('build/%s/test/%s' % (build_id, test_id), data=log_lines)
- if response is None:
- return False
- return True
-
-@traceback_to_stderr
-def append_global_logs(build_id, log_lines):
- """
- "global" logs are for the mongod(s) started by smoke.py
- that last the duration of a test phase -- since there
- may be output in here that is important but spans individual
- tests, the buildlogs webapp handles these logs specially.
- """
- response = post('build/%s' % build_id, data=log_lines)
- if response is None:
- return False
- return True
-
-@traceback_to_stderr
-def finish_test(build_id, test_id, failed=False):
- response = post('build/%s/test/%s' % (build_id, test_id), data=[], headers={
- 'X-Sendlogs-Test-Done': 'true',
- 'X-Sendlogs-Test-Failed': failed and 'true' or 'false',
- })
- if response is None:
- return False
- return True
-
-def run_and_echo(command):
- """
- this just calls the command, and returns its return code,
- allowing stdout and stderr to work as normal. it is used
- as a fallback when environment variables or python
- dependencies cannot be configured, or when the logging
- webapp is unavailable, etc
- """
- proc = subprocess.Popen(command)
-
- def handle_sigterm(signum, frame):
- try:
- proc.send_signal(signum)
- except AttributeError:
- os.kill(proc.pid, signum)
- orig_handler = signal.signal(signal.SIGTERM, handle_sigterm)
-
- proc.wait()
-
- signal.signal(signal.SIGTERM, orig_handler)
- return proc.returncode
-
-class LogAppender(object):
- def __init__(self, callback, args, send_after_lines=2000, send_after_seconds=10):
- self.callback = callback
- self.callback_args = args
-
- self.send_after_lines = send_after_lines
- self.send_after_seconds = send_after_seconds
-
- self.buf = []
- self.retrybuf = []
- self.last_sent = time.time()
-
- def __call__(self, line):
- self.buf.append((time.time(), line))
-
- delay = time.time() - self.last_sent
- if len(self.buf) >= self.send_after_lines or delay >= self.send_after_seconds:
- self.submit()
-
- # no return value is expected
-
- def submit(self):
- if len(self.buf) + len(self.retrybuf) == 0:
- return True
-
- args = list(self.callback_args)
- args.append(list(self.buf) + self.retrybuf)
-
- self.last_sent = time.time()
-
- if self.callback(*args):
- self.buf = []
- self.retrybuf = []
- return True
- else:
- self.retrybuf += self.buf
- self.buf = []
- return False
-
-
-def wrap_test(command):
- """
- call the given command, intercept its stdout and stderr,
- and send results in batches of 100 lines or 10s to the
- buildlogger webapp
- """
-
- # get builder name and build number from environment
- builder = os.environ.get('MONGO_BUILDER_NAME')
- buildnum = os.environ.get('MONGO_BUILD_NUMBER')
-
- if builder is None or buildnum is None:
- return run_and_echo(command)
-
- try:
- buildnum = int(buildnum)
- except ValueError:
- sys.stderr.write('buildlogger: build number ("%s") was not an int\n' % buildnum)
- sys.stderr.flush()
- return run_and_echo(command)
-
- # test takes some extra info
- phase = os.environ.get('MONGO_PHASE', 'unknown')
- test_filename = os.environ.get('MONGO_TEST_FILENAME', 'unknown')
-
- build_info = dict((k, v) for k, v in os.environ.items() if k.startswith('MONGO_'))
- build_info.pop('MONGO_BUILDER_NAME', None)
- build_info.pop('MONGO_BUILD_NUMBER', None)
- build_info.pop('MONGO_PHASE', None)
- build_info.pop('MONGO_TEST_FILENAME', None)
-
- build_id = get_or_create_build(builder, buildnum, extra=build_info)
- if not build_id:
- return run_and_echo(command)
-
- test_id = create_test(build_id, test_filename, ' '.join(command), phase)
- if not test_id:
- return run_and_echo(command)
-
- # the peculiar formatting here matches what is printed by
- # smoke.py when starting tests
- output_url = '%s/build/%s/test/%s/' % (URL_ROOT.rstrip('/'), build_id, test_id)
- sys.stdout.write(' (output suppressed; see %s)\n' % output_url)
- sys.stdout.flush()
-
- callback = LogAppender(callback=append_test_logs, args=(build_id, test_id))
- returncode = loop_and_callback(command, callback)
- failed = bool(returncode != 0)
-
- # this will append any remaining unsubmitted logs, or
- # return True if there are none left to submit
- tries = 5
- while not callback.submit() and tries > 0:
- sys.stderr.write('failed to finish sending test logs, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- tries = 5
- while not finish_test(build_id, test_id, failed) and tries > 5:
- sys.stderr.write('failed to mark test finished, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- return returncode
-
-def wrap_global(command):
- """
- call the given command, intercept its stdout and stderr,
- and send results in batches of 100 lines or 10s to the
- buildlogger webapp. see :func:`append_global_logs` for the
- difference between "global" and "test" log output.
- """
-
- # get builder name and build number from environment
- builder = os.environ.get('MONGO_BUILDER_NAME')
- buildnum = os.environ.get('MONGO_BUILD_NUMBER')
-
- if builder is None or buildnum is None:
- return run_and_echo(command)
-
- try:
- buildnum = int(buildnum)
- except ValueError:
- sys.stderr.write('int(os.environ["MONGO_BUILD_NUMBER"]):\n')
- sys.stderr.write(traceback.format_exc())
- sys.stderr.flush()
- return run_and_echo(command)
-
- build_info = dict((k, v) for k, v in os.environ.items() if k.startswith('MONGO_'))
- build_info.pop('MONGO_BUILDER_NAME', None)
- build_info.pop('MONGO_BUILD_NUMBER', None)
-
- build_id = get_or_create_build(builder, buildnum, extra=build_info)
- if not build_id:
- return run_and_echo(command)
-
- callback = LogAppender(callback=append_global_logs, args=(build_id, ))
- returncode = loop_and_callback(command, callback)
-
- # this will append any remaining unsubmitted logs, or
- # return True if there are none left to submit
- tries = 5
- while not callback.submit() and tries > 0:
- sys.stderr.write('failed to finish sending global logs, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- return returncode
-
-def loop_and_callback(command, callback):
- """
- run the given command (a sequence of arguments, ordinarily
- from sys.argv), and call the given callback with each line
- of stdout or stderr encountered. after the command is finished,
- callback is called once more with None instead of a string.
- """
- proc = subprocess.Popen(
- command,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- )
-
- def handle_sigterm(signum, frame):
- try:
- proc.send_signal(signum)
- except AttributeError:
- os.kill(proc.pid, signum)
-
- # register a handler to delegate SIGTERM
- # to the child process
- orig_handler = signal.signal(signal.SIGTERM, handle_sigterm)
-
- while proc.poll() is None:
- try:
- line = proc.stdout.readline().strip('\r\n')
- line = utils.unicode_dammit(line)
- callback(line)
- except IOError:
- # if the signal handler is called while
- # we're waiting for readline() to return,
- # don't show a traceback
- break
-
- # There may be additional buffered output
- for line in proc.stdout.readlines():
- callback(line.strip('\r\n'))
-
- # restore the original signal handler, if any
- signal.signal(signal.SIGTERM, orig_handler)
- return proc.returncode
-
-
-if __name__ == '__main__':
- # argv[0] is 'buildlogger.py'
- del sys.argv[0]
-
- if sys.argv[0] in ('-g', '--global'):
- # then this is wrapping a "global" command, and should
- # submit global logs to the build, not test logs to a
- # test within the build
- del sys.argv[0]
- wrapper = wrap_global
-
- else:
- wrapper = wrap_test
-
- # if we are missing credentials or the json module, then
- # we can't use buildlogger; so just echo output, but also
- # log why we can't work.
- if json is None:
- sys.stderr.write('buildlogger: could not import a json module\n')
- sys.stderr.flush()
- wrapper = run_and_echo
-
- elif username is None or password is None:
- sys.stderr.write('buildlogger: could not find or import %s for authentication\n' % credentials_file)
- sys.stderr.flush()
- wrapper = run_and_echo
-
- # otherwise wrap a test command as normal; the
- # wrapper functions return the return code of
- # the wrapped command, so that should be our
- # exit code as well.
- sys.exit(wrapper(sys.argv))
-
diff --git a/src/mongo/gotools/test/legacy24/buildscripts/cleanbb.py b/src/mongo/gotools/test/legacy24/buildscripts/cleanbb.py
deleted file mode 100644
index fee7efdc0c1..00000000000
--- a/src/mongo/gotools/test/legacy24/buildscripts/cleanbb.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-
-import re
-import sys
-import os, os.path
-import utils
-import time
-from optparse import OptionParser
-
-def shouldKill( c, root=None ):
-
- if "smoke.py" in c:
- return False
-
- if "emr.py" in c:
- return False
-
- if "java" in c:
- return False
-
- # if root directory is provided, see if command line matches mongod process running
- # with the same data directory
-
- if root and re.compile("(\W|^)mongod(.exe)?\s+.*--dbpath(\s+|=)%s(\s+|$)" % root).search( c ):
- return True
-
- if ( c.find( "buildbot" ) >= 0 or c.find( "slave" ) >= 0 ) and c.find( "/mongo/" ) >= 0:
- return True
-
- if c.find( "xml-data/build-dir" ) >= 0: # for bamboo
- return True
-
- return False
-
-def killprocs( signal="", root=None ):
- killed = 0
-
- if sys.platform == 'win32':
- return killed
-
- l = utils.getprocesslist()
- print( "num procs:" + str( len( l ) ) )
- if len(l) == 0:
- print( "no procs" )
- try:
- print( execsys( "/sbin/ifconfig -a" ) )
- except Exception,e:
- print( "can't get interfaces" + str( e ) )
-
- for x in l:
- x = x.lstrip()
- if not shouldKill( x, root=root ):
- continue
-
- pid = x.split( " " )[0]
- print( "killing: " + x )
- utils.execsys( "/bin/kill " + signal + " " + pid )
- killed = killed + 1
-
- return killed
-
-
-def tryToRemove(path):
- for _ in range(60):
- try:
- os.remove(path)
- return True
- except OSError, e:
- errno = getattr(e, 'winerror', None)
- # check for the access denied and file in use WindowsErrors
- if errno in (5, 32):
- print("os.remove(%s) failed, retrying in one second." % path)
- time.sleep(1)
- else:
- raise e
- return False
-
-
-def cleanup( root , nokill ):
- if nokill:
- print "nokill requested, not killing anybody"
- else:
- if killprocs( root=root ) > 0:
- time.sleep(3)
- killprocs( "-9", root=root )
-
- # delete all regular files, directories can stay
- # NOTE: if we delete directories later, we can't delete diskfulltest
- for ( dirpath , dirnames , filenames ) in os.walk( root , topdown=False ):
- for x in filenames:
- foo = dirpath + "/" + x
- if os.path.exists(foo):
- if not tryToRemove(foo):
- raise Exception("Couldn't remove file '%s' after 60 seconds" % foo)
-
-if __name__ == "__main__":
- parser = OptionParser(usage="read the script")
- parser.add_option("--nokill", dest='nokill', default=False, action='store_true')
- (options, args) = parser.parse_args()
-
- root = "/data/db/"
- if len(args) > 0:
- root = args[0]
-
- cleanup( root , options.nokill )
diff --git a/src/mongo/gotools/test/legacy24/buildscripts/smoke.py b/src/mongo/gotools/test/legacy24/buildscripts/smoke.py
deleted file mode 100755
index 29fe6dbd712..00000000000
--- a/src/mongo/gotools/test/legacy24/buildscripts/smoke.py
+++ /dev/null
@@ -1,1314 +0,0 @@
-#!/usr/bin/env python
-
-# smoke.py: run some mongo tests.
-
-# Bugs, TODOs:
-
-# 0 Some tests hard-code pathnames relative to the mongo repository,
-# so the smoke.py process and all its children must be run with the
-# mongo repo as current working directory. That's kinda icky.
-
-# 1 The tests that are implemented as standalone executables ("test",
-# "perftest"), don't take arguments for the dbpath, but
-# unconditionally use "/tmp/unittest".
-
-# 2 mongod output gets intermingled with mongo output, and it's often
-# hard to find error messages in the slop. Maybe have smoke.py do
-# some fancier wrangling of child process output?
-
-# 3 Some test suites run their own mongods, and so don't need us to
-# run any mongods around their execution. (It's harmless to do so,
-# but adds noise in the output.)
-
-# 4 Running a separate mongo shell for each js file is slower than
-# loading js files into one mongo shell process. Maybe have runTest
-# queue up all filenames ending in ".js" and run them in one mongo
-# shell at the "end" of testing?
-
-# 5 Right now small-oplog implies master/slave replication. Maybe
-# running with replication should be an orthogonal concern. (And
-# maybe test replica set replication, too.)
-
-# 6 We use cleanbb.py to clear out the dbpath, but cleanbb.py kills
-# off all mongods on a box, which means you can't run two smoke.py
-# jobs on the same host at once. So something's gotta change.
-
-from datetime import datetime
-from itertools import izip
-import glob
-from optparse import OptionParser
-import os
-import pprint
-import re
-import shlex
-import socket
-import stat
-from subprocess import (PIPE, Popen, STDOUT)
-import sys
-import time
-
-from pymongo import MongoClient
-from pymongo.errors import OperationFailure
-from pymongo import ReadPreference
-
-import cleanbb
-import utils
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-try:
- from hashlib import md5 # new in 2.5
-except ImportError:
- from md5 import md5 # deprecated in 2.5
-
-try:
- import json
-except:
- try:
- import simplejson as json
- except:
- json = None
-
-
-# TODO clean this up so we don't need globals...
-mongo_repo = os.getcwd() #'./'
-failfile = os.path.join(mongo_repo, 'failfile.smoke')
-test_path = None
-mongod_executable = None
-mongod_port = None
-shell_executable = None
-continue_on_failure = None
-file_of_commands_mode = False
-start_mongod = True
-temp_path = None
-clean_every_n_tests = 1
-clean_whole_dbroot = False
-
-tests = []
-winners = []
-losers = {}
-fails = [] # like losers but in format of tests
-
-# For replication hash checking
-replicated_collections = []
-lost_in_slave = []
-lost_in_master = []
-screwy_in_slave = {}
-
-smoke_db_prefix = ''
-small_oplog = False
-small_oplog_rs = False
-
-test_report = { "results": [] }
-report_file = None
-
-# This class just implements the with statement API, for a sneaky
-# purpose below.
-class Nothing(object):
- def __enter__(self):
- return self
- def __exit__(self, type, value, traceback):
- return not isinstance(value, Exception)
-
-def buildlogger(cmd, is_global=False):
- # if the environment variable MONGO_USE_BUILDLOGGER
- # is set to 'true', then wrap the command with a call
- # to buildlogger.py, which sends output to the buidlogger
- # machine; otherwise, return as usual.
- if os.environ.get('MONGO_USE_BUILDLOGGER', '').lower().strip() == 'true':
- if is_global:
- return [utils.find_python(), 'buildscripts/buildlogger.py', '-g'] + cmd
- else:
- return [utils.find_python(), 'buildscripts/buildlogger.py'] + cmd
- return cmd
-
-
-def clean_dbroot(dbroot="", nokill=False):
- # Clean entire /data/db dir if --with-cleanbb, else clean specific database path.
- if clean_whole_dbroot and not small_oplog:
- dbroot = os.path.normpath(smoke_db_prefix + "/data/db")
- if os.path.exists(dbroot):
- print("clean_dbroot: %s" % dbroot)
- cleanbb.cleanup(dbroot, nokill)
-
-
-class mongod(object):
- def __init__(self, **kwargs):
- self.kwargs = kwargs
- self.proc = None
- self.auth = False
-
- def __enter__(self):
- self.start()
- return self
-
- def __exit__(self, type, value, traceback):
- try:
- self.stop()
- except Exception, e:
- print >> sys.stderr, "error shutting down mongod"
- print >> sys.stderr, e
- return not isinstance(value, Exception)
-
- def ensure_test_dirs(self):
- utils.ensureDir(smoke_db_prefix + "/tmp/unittest/")
- utils.ensureDir(smoke_db_prefix + "/data/")
- utils.ensureDir(smoke_db_prefix + "/data/db/")
-
- def check_mongo_port(self, port=27017):
- sock = socket.socket()
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- sock.settimeout(1)
- sock.connect(("localhost", int(port)))
- sock.close()
-
- def is_mongod_up(self, port=mongod_port):
- try:
- self.check_mongo_port(int(port))
- return True
- except Exception,e:
- print >> sys.stderr, e
- return False
-
- def did_mongod_start(self, port=mongod_port, timeout=300):
- while timeout > 0:
- time.sleep(1)
- is_up = self.is_mongod_up(port)
- if is_up:
- return True
- timeout = timeout - 1
- print >> sys.stderr, "timeout starting mongod"
- return False
-
- def start(self):
- global mongod_port
- global mongod
- if self.proc:
- print >> sys.stderr, "probable bug: self.proc already set in start()"
- return
- self.ensure_test_dirs()
- dir_name = smoke_db_prefix + "/data/db/sconsTests/"
- self.port = int(mongod_port)
- self.slave = False
- if 'slave' in self.kwargs:
- dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/'
- srcport = mongod_port
- self.port += 1
- self.slave = True
-
- clean_dbroot(dbroot=dir_name, nokill=self.slave)
- utils.ensureDir(dir_name)
-
- argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name]
- # These parameters are alwas set for tests
- # SERVER-9137 Added httpinterface parameter to keep previous behavior
- argv += ['--setParameter', 'enableTestCommands=1']
- if self.kwargs.get('small_oplog'):
- argv += ["--master", "--oplogSize", "511"]
- params = self.kwargs.get('set_parameters', None)
- if params:
- for p in params.split(','): argv += ['--setParameter', p]
- if self.kwargs.get('small_oplog_rs'):
- argv += ["--replSet", "foo", "--oplogSize", "511"]
- if self.slave:
- argv += ['--slave', '--source', 'localhost:' + str(srcport)]
- if self.kwargs.get('no_journal'):
- argv += ['--nojournal']
- if self.kwargs.get('no_preallocj'):
- argv += ['--nopreallocj']
- if self.kwargs.get('auth'):
- argv += ['--auth', '--setParameter', 'enableLocalhostAuthBypass=false']
- authMechanism = self.kwargs.get('authMechanism', 'MONGODB-CR')
- if authMechanism != 'MONGODB-CR':
- argv += ['--setParameter', 'authenticationMechanisms=' + authMechanism]
- self.auth = True
- if self.kwargs.get('keyFile'):
- argv += ['--keyFile', self.kwargs.get('keyFile')]
- if self.kwargs.get('use_ssl') or self.kwargs.get('use_x509'):
- argv += ['--sslMode', "requireSSL",
- '--sslPEMKeyFile', 'jstests/libs/server.pem',
- '--sslCAFile', 'jstests/libs/ca.pem',
- '--sslWeakCertificateValidation']
- if self.kwargs.get('use_x509'):
- argv += ['--clusterAuthMode','x509'];
- self.auth = True
- print "running " + " ".join(argv)
- self.proc = self._start(buildlogger(argv, is_global=True))
-
- if not self.did_mongod_start(self.port):
- raise Exception("Failed to start mongod")
-
- if self.slave:
- local = MongoClient(port=self.port,
- read_preference=ReadPreference.SECONDARY_PREFERRED).local
- synced = False
- while not synced:
- synced = True
- for source in local.sources.find(fields=["syncedTo"]):
- synced = synced and "syncedTo" in source and source["syncedTo"]
-
- def _start(self, argv):
- """In most cases, just call subprocess.Popen(). On windows,
- add the started process to a new Job Object, so that any
- child processes of this process can be killed with a single
- call to TerminateJobObject (see self.stop()).
- """
-
- if os.sys.platform == "win32":
- # Create a job object with the "kill on job close"
- # flag; this is inherited by child processes (ie
- # the mongod started on our behalf by buildlogger)
- # and lets us terminate the whole tree of processes
- # rather than orphaning the mongod.
- import win32job
-
- # Magic number needed to allow job reassignment in Windows 7
- # see: MSDN - Process Creation Flags - ms684863
- CREATE_BREAKAWAY_FROM_JOB = 0x01000000
-
- proc = Popen(argv, creationflags=CREATE_BREAKAWAY_FROM_JOB)
-
- self.job_object = win32job.CreateJobObject(None, '')
-
- job_info = win32job.QueryInformationJobObject(
- self.job_object, win32job.JobObjectExtendedLimitInformation)
- job_info['BasicLimitInformation']['LimitFlags'] |= win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
- win32job.SetInformationJobObject(
- self.job_object,
- win32job.JobObjectExtendedLimitInformation,
- job_info)
-
- win32job.AssignProcessToJobObject(self.job_object, proc._handle)
-
- else:
- proc = Popen(argv)
-
- return proc
-
- def stop(self):
- if not self.proc:
- print >> sys.stderr, "probable bug: self.proc unset in stop()"
- return
- try:
- if os.sys.platform == "win32":
- import win32job
- win32job.TerminateJobObject(self.job_object, -1)
- import time
- # Windows doesn't seem to kill the process immediately, so give it some time to die
- time.sleep(5)
- else:
- # This function not available in Python 2.5
- self.proc.terminate()
- except AttributeError:
- from os import kill
- kill(self.proc.pid, 15)
- self.proc.wait()
- sys.stderr.flush()
- sys.stdout.flush()
-
- def wait_for_repl(self):
- print "Awaiting replicated (w:2, wtimeout:5min) insert (port:" + str(self.port) + ")"
- MongoClient(port=self.port).testing.smokeWait.insert({}, w=2, wtimeout=5*60*1000)
- print "Replicated write completed -- done wait_for_repl"
-
-class Bug(Exception):
- def __str__(self):
- return 'bug in smoke.py: ' + super(Bug, self).__str__()
-
-class TestFailure(Exception):
- pass
-
-class TestExitFailure(TestFailure):
- def __init__(self, *args):
- self.path = args[0]
- self.status=args[1]
-
- def __str__(self):
- return "test %s exited with status %d" % (self.path, self.status)
-
-class TestServerFailure(TestFailure):
- def __init__(self, *args):
- self.path = args[0]
- self.status = -1 # this is meaningless as an exit code, but
- # that's the point.
- def __str__(self):
- return 'mongod not running after executing test %s' % self.path
-
-def check_db_hashes(master, slave):
- # Need to pause a bit so a slave might catch up...
- if not slave.slave:
- raise(Bug("slave instance doesn't have slave attribute set"))
-
- print "waiting for slave (%s) to catch up to master (%s)" % (slave.port, master.port)
- master.wait_for_repl()
- print "caught up!"
-
- # FIXME: maybe make this run dbhash on all databases?
- for mongod in [master, slave]:
- client = MongoClient(port=mongod.port, read_preference=ReadPreference.SECONDARY_PREFERRED)
- mongod.dbhash = client.test.command("dbhash")
- mongod.dict = mongod.dbhash["collections"]
-
- global lost_in_slave, lost_in_master, screwy_in_slave, replicated_collections
-
- replicated_collections += master.dict.keys()
-
- for coll in replicated_collections:
- if coll not in slave.dict and coll not in lost_in_slave:
- lost_in_slave.append(coll)
- mhash = master.dict[coll]
- shash = slave.dict[coll]
- if mhash != shash:
- mTestDB = MongoClient(port=master.port).test
- sTestDB = MongoClient(port=slave.port,
- read_preference=ReadPreference.SECONDARY_PREFERRED).test
- mCount = mTestDB[coll].count()
- sCount = sTestDB[coll].count()
- stats = {'hashes': {'master': mhash, 'slave': shash},
- 'counts':{'master': mCount, 'slave': sCount}}
- try:
- mDocs = list(mTestDB[coll].find().sort("_id", 1))
- sDocs = list(sTestDB[coll].find().sort("_id", 1))
- mDiffDocs = list()
- sDiffDocs = list()
- for left, right in izip(mDocs, sDocs):
- if left != right:
- mDiffDocs.append(left)
- sDiffDocs.append(right)
-
- stats["docs"] = {'master': mDiffDocs, 'slave': sDiffDocs }
- except Exception, e:
- stats["error-docs"] = e;
-
- screwy_in_slave[coll] = stats
- if mhash == "no _id _index":
- mOplog = mTestDB.connection.local["oplog.$main"];
- oplog_entries = list(mOplog.find({"$or": [{"ns":mTestDB[coll].full_name}, \
- {"op":"c"}]}).sort("$natural", 1))
- print "oplog for %s" % mTestDB[coll].full_name
- for doc in oplog_entries:
- pprint.pprint(doc, width=200)
-
-
- for db in slave.dict.keys():
- if db not in master.dict and db not in lost_in_master:
- lost_in_master.append(db)
-
-
-def ternary( b , l="true", r="false" ):
- if b:
- return l
- return r
-
-# Blech.
-def skipTest(path):
- basename = os.path.basename(path)
- parentPath = os.path.dirname(path)
- parentDir = os.path.basename(parentPath)
- if small_oplog: # For tests running in parallel
- if basename in ["cursor8.js", "indexh.js", "dropdb.js", "dropdb_race.js",
- "connections_opened.js", "opcounters.js", "dbadmin.js"]:
- return True
- if use_ssl:
- # Skip tests using mongobridge since it does not support SSL
- # TODO: Remove when SERVER-10910 has been resolved.
- if basename in ["gridfs.js", "initial_sync3.js", "majority.js", "no_chaining.js",
- "rollback4.js", "slavedelay3.js", "sync2.js", "tags.js"]:
- return True
- # TODO: For now skip tests using MongodRunner, remove when SERVER-10909 has been resolved
- if basename in ["fastsync.js", "index_retry.js", "ttl_repl_maintenance.js",
- "unix_socket1.js"]:
- return True;
- if auth or keyFile or use_x509: # For tests running with auth
- # Skip any tests that run with auth explicitly
- if parentDir.lower() == "auth" or "auth" in basename.lower():
- return True
- if parentPath == mongo_repo: # Skip client tests
- return True
- if parentDir == "tool": # SERVER-6368
- return True
- if parentDir == "dur": # SERVER-7317
- return True
- if parentDir == "disk": # SERVER-7356
- return True
-
- authTestsToSkip = [("jstests", "drop2.js"), # SERVER-8589,
- ("jstests", "killop.js"), # SERVER-10128
- ("sharding", "sync3.js"), # SERVER-6388 for this and those below
- ("sharding", "sync6.js"),
- ("sharding", "parallel.js"),
- ("jstests", "bench_test1.js"),
- ("jstests", "bench_test2.js"),
- ("jstests", "bench_test3.js"),
- ("core", "bench_test1.js"),
- ("core", "bench_test2.js"),
- ("core", "bench_test3.js"),
- ]
-
- if os.path.join(parentDir,basename) in [ os.path.join(*test) for test in authTestsToSkip ]:
- return True
-
- return False
-
-forceCommandsForDirs = ["aggregation", "auth", "core", "parallel", "replsets"]
-# look for jstests and one of the above suites separated by either posix or windows slashes
-forceCommandsRE = re.compile(r"jstests[/\\](%s)" % ('|'.join(forceCommandsForDirs)))
-def setShellWriteModeForTest(path, argv):
- swm = shell_write_mode
- if swm == "legacy": # change when the default changes to "commands"
- if use_write_commands or forceCommandsRE.search(path):
- swm = "commands"
- argv += ["--writeMode", swm]
-
-def runTest(test, result):
- # result is a map containing test result details, like result["url"]
-
- # test is a tuple of ( filename , usedb<bool> )
- # filename should be a js file to run
- # usedb is true if the test expects a mongod to be running
-
- (path, usedb) = test
- (ignore, ext) = os.path.splitext(path)
- test_mongod = mongod()
- mongod_is_up = test_mongod.is_mongod_up(mongod_port)
- result["mongod_running_at_start"] = mongod_is_up;
-
- if file_of_commands_mode:
- # smoke.py was invoked like "--mode files --from-file foo",
- # so don't try to interpret the test path too much
- if os.sys.platform == "win32":
- argv = [path]
- else:
- argv = shlex.split(path)
- path = argv[0]
- # if the command is a python script, use the script name
- if os.path.basename(path) in ('python', 'python.exe'):
- path = argv[1]
- elif ext == ".js":
- argv = [shell_executable, "--port", mongod_port, '--authenticationMechanism', authMechanism]
-
- #setShellWriteModeForTest(path, argv)
-
- if not usedb:
- argv += ["--nodb"]
- if small_oplog or small_oplog_rs:
- argv += ["--eval", 'testingReplication = true;']
- if use_ssl:
- argv += ["--ssl",
- "--sslPEMKeyFile", "jstests/libs/client.pem",
- "--sslCAFile", "jstests/libs/ca.pem",
- "--sslAllowInvalidCertificates"]
- argv += [path]
- elif ext in ["", ".exe"]:
- # Blech.
- if os.path.basename(path) in ["test", "test.exe", "perftest", "perftest.exe"]:
- argv = [path]
- # default data directory for test and perftest is /tmp/unittest
- if smoke_db_prefix:
- dir_name = smoke_db_prefix + '/unittests'
- argv.extend(["--dbpath", dir_name] )
- # more blech
- elif os.path.basename(path) in ['mongos', 'mongos.exe']:
- argv = [path, "--test"]
- else:
- argv = [test_path and os.path.abspath(os.path.join(test_path, path)) or path,
- "--port", mongod_port]
- else:
- raise Bug("fell off in extension case: %s" % path)
-
- mongo_test_filename = os.path.basename(path)
-
- # sys.stdout.write() is more atomic than print, so using it prevents
- # lines being interrupted by, e.g., child processes
- sys.stdout.write(" *******************************************\n")
- sys.stdout.write(" Test : %s ...\n" % mongo_test_filename)
- sys.stdout.flush()
-
- # FIXME: we don't handle the case where the subprocess
- # hangs... that's bad.
- if ( argv[0].endswith( 'mongo' ) or argv[0].endswith( 'mongo.exe' ) ) and not '--eval' in argv :
- evalString = 'load("jstests/libs/use_extended_timeout.js");' + \
- 'TestData = new Object();' + \
- 'TestData.testPath = "' + path + '";' + \
- 'TestData.testFile = "' + os.path.basename( path ) + '";' + \
- 'TestData.testName = "' + re.sub( ".js$", "", os.path.basename( path ) ) + '";' + \
- 'TestData.setParameters = "' + ternary( set_parameters, set_parameters, "" ) + '";' + \
- 'TestData.setParametersMongos = "' + ternary( set_parameters_mongos, set_parameters_mongos, "" ) + '";' + \
- 'TestData.noJournal = ' + ternary( no_journal ) + ";" + \
- 'TestData.noJournalPrealloc = ' + ternary( no_preallocj ) + ";" + \
- 'TestData.auth = ' + ternary( auth ) + ";" + \
- 'TestData.keyFile = ' + ternary( keyFile , '"' + str(keyFile) + '"' , 'null' ) + ";" + \
- 'TestData.keyFileData = ' + ternary( keyFile , '"' + str(keyFileData) + '"' , 'null' ) + ";" + \
- 'TestData.authMechanism = ' + ternary( authMechanism,
- '"' + str(authMechanism) + '"', 'null') + ";" + \
- 'TestData.useSSL = ' + ternary( use_ssl ) + ";" + \
- 'TestData.useX509 = ' + ternary( use_x509 ) + ";"
- # this updates the default data directory for mongod processes started through shell (src/mongo/shell/servers.js)
- evalString += 'MongoRunner.dataDir = "' + os.path.abspath(smoke_db_prefix + '/data/db') + '";'
- evalString += 'MongoRunner.dataPath = MongoRunner.dataDir + "/";'
- if os.sys.platform == "win32":
- # double quotes in the evalString on windows; this
- # prevents the backslashes from being removed when
- # the shell (i.e. bash) evaluates this string. yuck.
- evalString = evalString.replace('\\', '\\\\')
-
- if auth and usedb:
- evalString += 'jsTest.authenticate(db.getMongo());'
-
- argv = argv + [ '--eval', evalString]
-
- if argv[0].endswith( 'test' ) or argv[0].endswith( 'test.exe' ):
- if no_preallocj :
- argv = argv + [ '--nopreallocj' ]
- if temp_path:
- argv = argv + [ '--tempPath', temp_path ]
-
-
- sys.stdout.write(" Command : %s\n" % ' '.join(argv))
- sys.stdout.write(" Date : %s\n" % datetime.now().ctime())
- sys.stdout.flush()
-
- os.environ['MONGO_TEST_FILENAME'] = mongo_test_filename
- t1 = time.time()
-
- proc = Popen(buildlogger(argv), cwd=test_path, stdout=PIPE, stderr=STDOUT, bufsize=0)
- first_line = proc.stdout.readline() # Get suppressed output URL
- m = re.search(r"\s*\(output suppressed; see (?P<url>.*)\)" + os.linesep, first_line)
- if m:
- result["url"] = m.group("url")
- sys.stdout.write(first_line)
- sys.stdout.flush()
- while True:
- # print until subprocess's stdout closed.
- # Not using "for line in file" since that has unwanted buffering.
- line = proc.stdout.readline()
- if not line:
- break;
-
- sys.stdout.write(line)
- sys.stdout.flush()
-
- proc.wait() # wait if stdout is closed before subprocess exits.
- r = proc.returncode
-
- t2 = time.time()
- del os.environ['MONGO_TEST_FILENAME']
-
- timediff = t2 - t1
- # timediff is seconds by default
- scale = 1
- suffix = "seconds"
- # if timediff is less than 10 seconds use ms
- if timediff < 10:
- scale = 1000
- suffix = "ms"
- # if timediff is more than 60 seconds use minutes
- elif timediff > 60:
- scale = 1.0 / 60.0
- suffix = "minutes"
- sys.stdout.write(" %10.4f %s\n" % ((timediff) * scale, suffix))
- sys.stdout.flush()
-
- result["exit_code"] = r
-
- is_mongod_still_up = test_mongod.is_mongod_up(mongod_port)
- if not is_mongod_still_up:
- print "mongod is not running after test"
- result["mongod_running_at_end"] = is_mongod_still_up;
- if start_mongod:
- raise TestServerFailure(path)
-
- result["mongod_running_at_end"] = is_mongod_still_up;
-
- if r != 0:
- raise TestExitFailure(path, r)
-
- print ""
-
-def run_tests(tests):
- # FIXME: some suites of tests start their own mongod, so don't
- # need this. (So long as there are no conflicts with port,
- # dbpath, etc., and so long as we shut ours down properly,
- # starting this mongod shouldn't break anything, though.)
-
- # The reason we want to use "with" is so that we get __exit__ semantics
- # but "with" is only supported on Python 2.5+
-
- if start_mongod:
- master = mongod(small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509).__enter__()
- else:
- master = Nothing()
- try:
- if small_oplog:
- slave = mongod(slave=True,
- set_parameters=set_parameters).__enter__()
- elif small_oplog_rs:
- slave = mongod(slave=True,
- small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509).__enter__()
- primary = MongoClient(port=master.port);
-
- primary.admin.command({'replSetInitiate' : {'_id' : 'foo', 'members' : [
- {'_id': 0, 'host':'localhost:%s' % master.port},
- {'_id': 1, 'host':'localhost:%s' % slave.port,'priority':0}]}})
-
- ismaster = False
- while not ismaster:
- result = primary.admin.command("ismaster");
- ismaster = result["ismaster"]
- time.sleep(1)
- else:
- slave = Nothing()
-
- try:
- if small_oplog or small_oplog_rs:
- master.wait_for_repl()
-
- for tests_run, test in enumerate(tests):
- tests_run += 1 # enumerate from 1, python 2.5 compatible
- test_result = { "start": time.time() }
-
- (test_path, use_db) = test
-
- if test_path.startswith(mongo_repo + os.path.sep):
- test_result["test_file"] = test_path[len(mongo_repo)+1:]
- else:
- # user could specify a file not in repo. leave it alone.
- test_result["test_file"] = test_path
-
- try:
- if skipTest(test_path):
- test_result["status"] = "skip"
-
- print "skipping " + test_path
- else:
- fails.append(test)
- runTest(test, test_result)
- fails.pop()
- winners.append(test)
-
- test_result["status"] = "pass"
-
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_report["results"].append( test_result )
- if small_oplog or small_oplog_rs:
- master.wait_for_repl()
- # check the db_hashes
- if isinstance(slave, mongod):
- check_db_hashes(master, slave)
- check_and_report_replication_dbhashes()
-
- elif use_db: # reach inside test and see if "usedb" is true
- if clean_every_n_tests and (tests_run % clean_every_n_tests) == 0:
- # Restart mongod periodically to clean accumulated test data
- # clean_dbroot() is invoked by mongod.start()
- master.__exit__(None, None, None)
- master = mongod(small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509).__enter__()
-
- except TestFailure, f:
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_result["error"] = str(f)
- test_result["status"] = "fail"
- test_report["results"].append( test_result )
- try:
- print f
- # Record the failing test and re-raise.
- losers[f.path] = f.status
- raise f
- except TestServerFailure, f:
- return 2
- except TestFailure, f:
- if not continue_on_failure:
- return 1
- if isinstance(slave, mongod):
- check_db_hashes(master, slave)
- finally:
- slave.__exit__(None, None, None)
- finally:
- master.__exit__(None, None, None)
- return 0
-
-
-def check_and_report_replication_dbhashes():
- def missing(lst, src, dst):
- if lst:
- print """The following collections were present in the %s but not the %s
-at the end of testing:""" % (src, dst)
- for db in lst:
- print db
-
- missing(lost_in_slave, "master", "slave")
- missing(lost_in_master, "slave", "master")
- if screwy_in_slave:
- print """The following collections has different hashes in master and slave
-at the end of testing:"""
- for coll in screwy_in_slave.keys():
- stats = screwy_in_slave[coll]
- # Counts are "approx" because they are collected after the dbhash runs and may not
- # reflect the states of the collections that were hashed. If the hashes differ, one
- # possibility is that a test exited with writes still in-flight.
- print "collection: %s\t (master/slave) hashes: %s/%s counts (approx): %i/%i" % (coll, stats['hashes']['master'], stats['hashes']['slave'], stats['counts']['master'], stats['counts']['slave'])
- if "docs" in stats:
- if (("master" in stats["docs"] and len(stats["docs"]["master"]) != 0) or
- ("slave" in stats["docs"] and len(stats["docs"]["slave"]) != 0)):
- print "All docs matched!"
- else:
- print "Different Docs"
- print "Master docs:"
- pprint.pprint(stats["docs"]["master"], indent=2)
- print "Slave docs:"
- pprint.pprint(stats["docs"]["slave"], indent=2)
- if "error-docs" in stats:
- print "Error getting docs to diff:"
- pprint.pprint(stats["error-docs"])
- return True
-
- if (small_oplog or small_oplog_rs) and not (lost_in_master or lost_in_slave or screwy_in_slave):
- print "replication ok for %d collections" % (len(replicated_collections))
-
- return False
-
-
-def report():
- print "%d tests succeeded" % len(winners)
- num_missed = len(tests) - (len(winners) + len(losers.keys()))
- if num_missed:
- print "%d tests didn't get run" % num_missed
- if losers:
- print "The following tests failed (with exit code):"
- for loser in losers:
- print "%s\t%d" % (loser, losers[loser])
-
- test_result = { "start": time.time() }
- if check_and_report_replication_dbhashes():
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_result["test_file"] = "/#dbhash#"
- test_result["error"] = "dbhash mismatch"
- test_result["status"] = "fail"
- test_report["results"].append( test_result )
-
- if report_file:
- f = open( report_file, "wb" )
- f.write( json.dumps( test_report ) )
- f.close()
-
- if losers or lost_in_slave or lost_in_master or screwy_in_slave:
- raise Exception("Test failures")
-
-# Keys are the suite names (passed on the command line to smoke.py)
-# Values are pairs: (filenames, <start mongod before running tests>)
-suiteGlobalConfig = {"js": ("core/*.js", True),
- "quota": ("quota/*.js", True),
- "jsPerf": ("perf/*.js", True),
- "disk": ("disk/*.js", True),
- "noPassthroughWithMongod": ("noPassthroughWithMongod/*.js", True),
- "noPassthrough": ("noPassthrough/*.js", False),
- "parallel": ("parallel/*.js", True),
- "clone": ("clone/*.js", False),
- "repl": ("repl/*.js", False),
- "replSets": ("replsets/*.js", False),
- "dur": ("dur/*.js", False),
- "auth": ("auth/*.js", False),
- "sharding": ("sharding/*.js", False),
- "tool": ("tool/*.js", False),
- "aggregation": ("aggregation/*.js", True),
- "multiVersion": ("multiVersion/*.js", True),
- "failPoint": ("fail_point/*.js", False),
- "ssl": ("ssl/*.js", True),
- "sslSpecial": ("sslSpecial/*.js", True),
- "jsCore": ("core/*.js", True),
- "gle": ("gle/*.js", True),
- "slow1": ("slow1/*.js", True),
- "slow2": ("slow2/*.js", True),
- }
-
-def get_module_suites():
- """Attempts to discover and return information about module test suites
-
- Returns a dictionary of module suites in the format:
-
- {
- "<suite_name>" : "<full_path_to_suite_directory/[!_]*.js>",
- ...
- }
-
- This means the values of this dictionary can be used as "glob"s to match all jstests in the
- suite directory that don't start with an underscore
-
- The module tests should be put in 'src/mongo/db/modules/<module_name>/<suite_name>/*.js'
-
- NOTE: This assumes that if we have more than one module the suite names don't conflict
- """
- modules_directory = 'src/mongo/db/modules'
- test_suites = {}
-
- # Return no suites if we have no modules
- if not os.path.exists(modules_directory) or not os.path.isdir(modules_directory):
- return {}
-
- module_directories = os.listdir(modules_directory)
- for module_directory in module_directories:
-
- test_directory = os.path.join(modules_directory, module_directory, "jstests")
-
- # Skip this module if it has no "jstests" directory
- if not os.path.exists(test_directory) or not os.path.isdir(test_directory):
- continue
-
- # Get all suites for this module
- for test_suite in os.listdir(test_directory):
- test_suites[test_suite] = os.path.join(test_directory, test_suite, "[!_]*.js")
-
- return test_suites
-
-def expand_suites(suites,expandUseDB=True):
- """Takes a list of suites and expands to a list of tests according to a set of rules.
-
- Keyword arguments:
- suites -- list of suites specified by the user
- expandUseDB -- expand globs (such as [!_]*.js) for tests that are run against a database
- (default True)
-
- This function handles expansion of globs (such as [!_]*.js), aliases (such as "client" and
- "all"), detection of suites in the "modules" directory, and enumerating the test files in a
- given suite. It returns a list of tests of the form (path_to_test, usedb), where the second
- part of the tuple specifies whether the test is run against the database (see --nodb in the
- mongo shell)
-
- """
- globstr = None
- tests = []
- module_suites = get_module_suites()
- for suite in suites:
- if suite == 'all':
- return expand_suites(['test',
- 'perf',
- 'jsCore',
- 'jsPerf',
- 'noPassthroughWithMongod',
- 'noPassthrough',
- 'clone',
- 'parallel',
- 'repl',
- 'auth',
- 'sharding',
- 'slow1',
- 'slow2',
- 'tool'],
- expandUseDB=expandUseDB)
- if suite == 'test':
- if os.sys.platform == "win32":
- program = 'test.exe'
- else:
- program = 'test'
- (globstr, usedb) = (program, False)
- elif suite == 'perf':
- if os.sys.platform == "win32":
- program = 'perftest.exe'
- else:
- program = 'perftest'
- (globstr, usedb) = (program, False)
- elif suite == 'mongosTest':
- if os.sys.platform == "win32":
- program = 'mongos.exe'
- else:
- program = 'mongos'
- tests += [(os.path.join(mongo_repo, program), False)]
- elif os.path.exists( suite ):
- usedb = True
- for name in suiteGlobalConfig:
- if suite in glob.glob( "jstests/" + suiteGlobalConfig[name][0] ):
- usedb = suiteGlobalConfig[name][1]
- break
- tests += [ ( os.path.join( mongo_repo , suite ) , usedb ) ]
- elif suite in module_suites:
- # Currently we connect to a database in all module tests since there's no mechanism yet
- # to configure it independently
- usedb = True
- paths = glob.glob(module_suites[suite])
- paths.sort()
- tests += [(path, usedb) for path in paths]
- else:
- try:
- globstr, usedb = suiteGlobalConfig[suite]
- except KeyError:
- raise Exception('unknown test suite %s' % suite)
-
- if globstr:
- if usedb and not expandUseDB:
- tests += [ (suite,False) ]
- else:
- if globstr.endswith('.js'):
- loc = 'jstests/'
- else:
- loc = ''
- globstr = os.path.join(mongo_repo, (os.path.join(loc, globstr)))
- globstr = os.path.normpath(globstr)
- paths = glob.glob(globstr)
- paths.sort()
- tests += [(path, usedb) for path in paths]
-
- return tests
-
-def add_exe(e):
- if os.sys.platform.startswith( "win" ) and not e.endswith( ".exe" ):
- e += ".exe"
- return e
-
-def set_globals(options, tests):
- global mongod_executable, mongod_port, shell_executable, continue_on_failure
- global small_oplog, small_oplog_rs
- global no_journal, set_parameters, set_parameters_mongos, no_preallocj
- global auth, authMechanism, keyFile, keyFileData, smoke_db_prefix, test_path, start_mongod
- global use_ssl, use_x509
- global file_of_commands_mode
- global report_file, shell_write_mode, use_write_commands
- global temp_path
- global clean_every_n_tests
- global clean_whole_dbroot
-
- start_mongod = options.start_mongod
- if hasattr(options, 'use_ssl'):
- use_ssl = options.use_ssl
- if hasattr(options, 'use_x509'):
- use_x509 = options.use_x509
- use_ssl = use_ssl or use_x509
- #Careful, this can be called multiple times
- test_path = options.test_path
-
- mongod_executable = add_exe(options.mongod_executable)
- if not os.path.exists(mongod_executable):
- raise Exception("no mongod found in this directory.")
-
- mongod_port = options.mongod_port
-
- shell_executable = add_exe( options.shell_executable )
- if not os.path.exists(shell_executable):
- raise Exception("no mongo shell found in this directory.")
-
- continue_on_failure = options.continue_on_failure
- smoke_db_prefix = options.smoke_db_prefix
- small_oplog = options.small_oplog
- if hasattr(options, "small_oplog_rs"):
- small_oplog_rs = options.small_oplog_rs
- no_journal = options.no_journal
- set_parameters = options.set_parameters
- set_parameters_mongos = options.set_parameters_mongos
- no_preallocj = options.no_preallocj
- auth = options.auth
- authMechanism = options.authMechanism
- keyFile = options.keyFile
-
- clean_every_n_tests = options.clean_every_n_tests
- clean_whole_dbroot = options.with_cleanbb
-
- if auth and not keyFile:
- # if only --auth was given to smoke.py, load the
- # default keyFile from jstests/libs/authTestsKey
- keyFile = os.path.join(mongo_repo, 'jstests', 'libs', 'authTestsKey')
-
- if keyFile:
- f = open(keyFile, 'r')
- keyFileData = re.sub(r'\s', '', f.read()) # Remove all whitespace
- f.close()
- os.chmod(keyFile, stat.S_IRUSR | stat.S_IWUSR)
- else:
- keyFileData = None
-
- # if smoke.py is running a list of commands read from a
- # file (or stdin) rather than running a suite of js tests
- file_of_commands_mode = options.File and options.mode == 'files'
- # generate json report
- report_file = options.report_file
- temp_path = options.temp_path
-
- use_write_commands = options.use_write_commands
- shell_write_mode = options.shell_write_mode
-
-def file_version():
- return md5(open(__file__, 'r').read()).hexdigest()
-
-def clear_failfile():
- if os.path.exists(failfile):
- os.remove(failfile)
-
-def run_old_fails():
- global tests
-
- try:
- f = open(failfile, 'r')
- state = pickle.load(f)
- f.close()
- except Exception:
- try:
- f.close()
- except:
- pass
- clear_failfile()
- return # This counts as passing so we will run all tests
-
- if ('version' not in state or state['version'] != file_version()):
- print "warning: old version of failfile.smoke detected. skipping recent fails"
- clear_failfile()
- return
-
- testsAndOptions = state['testsAndOptions']
- tests = [x[0] for x in testsAndOptions]
- passed = []
- try:
- for (i, (test, options)) in enumerate(testsAndOptions):
- # SERVER-5102: until we can figure out a better way to manage
- # dependencies of the --only-old-fails build phase, just skip
- # tests which we can't safely run at this point
- path, usedb = test
-
- if not os.path.exists(path):
- passed.append(i)
- winners.append(test)
- continue
-
- filename = os.path.basename(path)
- if filename in ('test', 'test.exe') or filename.endswith('.js'):
- set_globals(options, [filename])
- oldWinners = len(winners)
- run_tests([test])
- if len(winners) != oldWinners: # can't use return value due to continue_on_failure
- passed.append(i)
- finally:
- for offset, i in enumerate(passed):
- testsAndOptions.pop(i - offset)
-
- if testsAndOptions:
- f = open(failfile, 'w')
- state = {'version':file_version(), 'testsAndOptions':testsAndOptions}
- pickle.dump(state, f)
- else:
- clear_failfile()
-
- report() # exits with failure code if there is an error
-
-def add_to_failfile(tests, options):
- try:
- f = open(failfile, 'r')
- testsAndOptions = pickle.load(f)["testsAndOptions"]
- except Exception:
- testsAndOptions = []
-
- for test in tests:
- if (test, options) not in testsAndOptions:
- testsAndOptions.append( (test, options) )
-
- state = {'version':file_version(), 'testsAndOptions':testsAndOptions}
- f = open(failfile, 'w')
- pickle.dump(state, f)
-
-
-
-def main():
- global mongod_executable, mongod_port, shell_executable, continue_on_failure, small_oplog
- global no_journal, set_parameters, set_parameters_mongos, no_preallocj, auth
- global keyFile, smoke_db_prefix, test_path, use_write_commands
-
- parser = OptionParser(usage="usage: smoke.py [OPTIONS] ARGS*")
- parser.add_option('--mode', dest='mode', default='suite',
- help='If "files", ARGS are filenames; if "suite", ARGS are sets of tests (%default)')
- # Some of our tests hard-code pathnames e.g., to execute, so until
- # that changes we don't have the freedom to run from anyplace.
- # parser.add_option('--mongo-repo', dest='mongo_repo', default=None,
- parser.add_option('--test-path', dest='test_path', default=None,
- help="Path to the test executables to run, "
- "currently only used for 'client' (%default)")
- parser.add_option('--mongod', dest='mongod_executable', default=os.path.join(mongo_repo, 'mongod'),
- help='Path to mongod to run (%default)')
- parser.add_option('--port', dest='mongod_port', default="27999",
- help='Port the mongod will bind to (%default)')
- parser.add_option('--mongo', dest='shell_executable', default=os.path.join(mongo_repo, 'mongo'),
- help='Path to mongo, for .js test files (%default)')
- parser.add_option('--continue-on-failure', dest='continue_on_failure',
- action="store_true", default=False,
- help='If supplied, continue testing even after a test fails')
- parser.add_option('--from-file', dest='File',
- help="Run tests/suites named in FILE, one test per line, '-' means stdin")
- parser.add_option('--smoke-db-prefix', dest='smoke_db_prefix', default=smoke_db_prefix,
- help="Prefix to use for the mongods' dbpaths ('%default')")
- parser.add_option('--small-oplog', dest='small_oplog', default=False,
- action="store_true",
- help='Run tests with master/slave replication & use a small oplog')
- parser.add_option('--small-oplog-rs', dest='small_oplog_rs', default=False,
- action="store_true",
- help='Run tests with replica set replication & use a small oplog')
- parser.add_option('--nojournal', dest='no_journal', default=False,
- action="store_true",
- help='Do not turn on journaling in tests')
- parser.add_option('--nopreallocj', dest='no_preallocj', default=False,
- action="store_true",
- help='Do not preallocate journal files in tests')
- parser.add_option('--auth', dest='auth', default=False,
- action="store_true",
- help='Run standalone mongods in tests with authentication enabled')
- parser.add_option('--use-x509', dest='use_x509', default=False,
- action="store_true",
- help='Use x509 auth for internal cluster authentication')
- parser.add_option('--authMechanism', dest='authMechanism', default='MONGODB-CR',
- help='Use the given authentication mechanism, when --auth is used.')
- parser.add_option('--keyFile', dest='keyFile', default=None,
- help='Path to keyFile to use to run replSet and sharding tests with authentication enabled')
- parser.add_option('--ignore', dest='ignore_files', default=None,
- help='Pattern of files to ignore in tests')
- parser.add_option('--only-old-fails', dest='only_old_fails', default=False,
- action="store_true",
- help='Check the failfile and only run all tests that failed last time')
- parser.add_option('--reset-old-fails', dest='reset_old_fails', default=False,
- action="store_true",
- help='Clear the failfile. Do this if all tests pass')
- parser.add_option('--with-cleanbb', dest='with_cleanbb', action="store_true",
- default=False,
- help='Clear database files before first test')
- parser.add_option('--clean-every', dest='clean_every_n_tests', type='int',
- default=20,
- help='Clear database files every N tests [default %default]')
- parser.add_option('--dont-start-mongod', dest='start_mongod', default=True,
- action='store_false',
- help='Do not start mongod before commencing test running')
- parser.add_option('--use-ssl', dest='use_ssl', default=False,
- action='store_true',
- help='Run mongo shell and mongod instances with SSL encryption')
- parser.add_option('--set-parameters', dest='set_parameters', default="",
- help='Adds --setParameter to mongod for each passed in item in the csv list - ex. "param1=1,param2=foo" ')
- parser.add_option('--set-parameters-mongos', dest='set_parameters_mongos', default="",
- help='Adds --setParameter to mongos for each passed in item in the csv list - ex. "param1=1,param2=foo" ')
- parser.add_option('--temp-path', dest='temp_path', default=None,
- help='If present, passed as --tempPath to unittests and dbtests')
- # Buildlogger invocation from command line
- parser.add_option('--buildlogger-builder', dest='buildlogger_builder', default=None,
- action="store", help='Set the "builder name" for buildlogger')
- parser.add_option('--buildlogger-buildnum', dest='buildlogger_buildnum', default=None,
- action="store", help='Set the "build number" for buildlogger')
- parser.add_option('--buildlogger-url', dest='buildlogger_url', default=None,
- action="store", help='Set the url root for the buildlogger service')
- parser.add_option('--buildlogger-credentials', dest='buildlogger_credentials', default=None,
- action="store", help='Path to Python file containing buildlogger credentials')
- parser.add_option('--buildlogger-phase', dest='buildlogger_phase', default=None,
- action="store", help='Set the "phase" for buildlogger (e.g. "core", "auth") for display in the webapp (optional)')
- parser.add_option('--report-file', dest='report_file', default=None,
- action='store',
- help='Path to generate detailed json report containing all test details')
- parser.add_option('--use-write-commands', dest='use_write_commands', default=False,
- action='store_true',
- help='Deprecated(use --shell-write-mode): Sets the shell to use write commands by default')
- parser.add_option('--shell-write-mode', dest='shell_write_mode', default="legacy",
- help='Sets the shell to use a specific write mode: commands/compatibility/legacy (default:legacy)')
-
- global tests
- (options, tests) = parser.parse_args()
-
- set_globals(options, tests)
-
- buildlogger_opts = (options.buildlogger_builder, options.buildlogger_buildnum, options.buildlogger_credentials)
- if all(buildlogger_opts):
- os.environ['MONGO_USE_BUILDLOGGER'] = 'true'
- os.environ['MONGO_BUILDER_NAME'] = options.buildlogger_builder
- os.environ['MONGO_BUILD_NUMBER'] = options.buildlogger_buildnum
- os.environ['BUILDLOGGER_CREDENTIALS'] = options.buildlogger_credentials
- if options.buildlogger_phase:
- os.environ['MONGO_PHASE'] = options.buildlogger_phase
- elif any(buildlogger_opts):
- # some but not all of the required options were sete
- raise Exception("you must set all of --buildlogger-builder, --buildlogger-buildnum, --buildlogger-credentials")
-
- if options.buildlogger_url: #optional; if None, defaults to const in buildlogger.py
- os.environ['BUILDLOGGER_URL'] = options.buildlogger_url
-
- if options.File:
- if options.File == '-':
- tests = sys.stdin.readlines()
- else:
- f = open(options.File)
- tests = f.readlines()
- tests = [t.rstrip('\n') for t in tests]
-
- if options.only_old_fails:
- run_old_fails()
- return
- elif options.reset_old_fails:
- clear_failfile()
- return
-
- # If we're in suite mode, tests is a list of names of sets of tests.
- if options.mode == 'suite':
- tests = expand_suites(tests)
- elif options.mode == 'files':
- tests = [(os.path.abspath(test), start_mongod) for test in tests]
-
- if options.ignore_files != None :
- ignore_patt = re.compile( options.ignore_files )
- print "Ignoring files with pattern: ", ignore_patt
-
- def ignore_test( test ):
- if ignore_patt.search( test[0] ) != None:
- print "Ignoring test ", test[0]
- return False
- else:
- return True
-
- tests = filter( ignore_test, tests )
-
- if not tests:
- print "warning: no tests specified"
- return
-
- if options.with_cleanbb:
- clean_dbroot(nokill=True)
-
- test_report["start"] = time.time()
- test_report["mongod_running_at_start"] = mongod().is_mongod_up(mongod_port)
- try:
- run_tests(tests)
- finally:
- add_to_failfile(fails, options)
-
- test_report["end"] = time.time()
- test_report["elapsed"] = test_report["end"] - test_report["start"]
- test_report["failures"] = len(losers.keys())
- test_report["mongod_running_at_end"] = mongod().is_mongod_up(mongod_port)
- if report_file:
- f = open( report_file, "wb" )
- f.write( json.dumps( test_report, indent=4, separators=(',', ': ')) )
- f.close()
-
- report()
-
-if __name__ == "__main__":
- main()
diff --git a/src/mongo/gotools/test/legacy24/buildscripts/utils.py b/src/mongo/gotools/test/legacy24/buildscripts/utils.py
deleted file mode 100644
index 68273ee69c8..00000000000
--- a/src/mongo/gotools/test/legacy24/buildscripts/utils.py
+++ /dev/null
@@ -1,230 +0,0 @@
-
-import codecs
-import re
-import socket
-import time
-import os
-import os.path
-import itertools
-import subprocess
-import sys
-import hashlib
-
-# various utilities that are handy
-
-def getAllSourceFiles( arr=None , prefix="." ):
- if arr is None:
- arr = []
-
- if not os.path.isdir( prefix ):
- # assume a file
- arr.append( prefix )
- return arr
-
- for x in os.listdir( prefix ):
- if x.startswith( "." ) or x.startswith( "pcre-" ) or x.startswith( "32bit" ) or x.startswith( "mongodb-" ) or x.startswith("debian") or x.startswith( "mongo-cxx-driver" ):
- continue
- full = prefix + "/" + x
- if os.path.isdir( full ) and not os.path.islink( full ):
- getAllSourceFiles( arr , full )
- else:
- if full.endswith( ".cpp" ) or full.endswith( ".h" ) or full.endswith( ".c" ):
- full = full.replace( "//" , "/" )
- arr.append( full )
-
- return arr
-
-
-def getGitBranch():
- if not os.path.exists( ".git" ) or not os.path.isdir(".git"):
- return None
-
- version = open( ".git/HEAD" ,'r' ).read().strip()
- if not version.startswith( "ref: " ):
- return version
- version = version.split( "/" )
- version = version[len(version)-1]
- return version
-
-def getGitBranchString( prefix="" , postfix="" ):
- t = re.compile( '[/\\\]' ).split( os.getcwd() )
- if len(t) > 2 and t[len(t)-1] == "mongo":
- par = t[len(t)-2]
- m = re.compile( ".*_([vV]\d+\.\d+)$" ).match( par )
- if m is not None:
- return prefix + m.group(1).lower() + postfix
- if par.find("Nightly") > 0:
- return ""
-
-
- b = getGitBranch()
- if b == None or b == "master":
- return ""
- return prefix + b + postfix
-
-def getGitVersion():
- if not os.path.exists( ".git" ) or not os.path.isdir(".git"):
- return "nogitversion"
-
- version = open( ".git/HEAD" ,'r' ).read().strip()
- if not version.startswith( "ref: " ):
- return version
- version = version[5:]
- f = ".git/" + version
- if not os.path.exists( f ):
- return version
- return open( f , 'r' ).read().strip()
-
-def execsys( args ):
- import subprocess
- if isinstance( args , str ):
- r = re.compile( "\s+" )
- args = r.split( args )
- p = subprocess.Popen( args , stdout=subprocess.PIPE , stderr=subprocess.PIPE )
- r = p.communicate()
- return r;
-
-def getprocesslist():
- raw = ""
- try:
- raw = execsys( "/bin/ps axww" )[0]
- except Exception,e:
- print( "can't get processlist: " + str( e ) )
-
- r = re.compile( "[\r\n]+" )
- return r.split( raw )
-
-def removeIfInList( lst , thing ):
- if thing in lst:
- lst.remove( thing )
-
-def findVersion( root , choices ):
- for c in choices:
- if ( os.path.exists( root + c ) ):
- return root + c
- raise "can't find a version of [" + root + "] choices: " + choices
-
-def choosePathExist( choices , default=None):
- for c in choices:
- if c != None and os.path.exists( c ):
- return c
- return default
-
-def filterExists(paths):
- return filter(os.path.exists, paths)
-
-def ensureDir( name ):
- d = os.path.dirname( name )
- if not os.path.exists( d ):
- print( "Creating dir: " + name );
- os.makedirs( d )
- if not os.path.exists( d ):
- raise "Failed to create dir: " + name
-
-
-def distinctAsString( arr ):
- s = set()
- for x in arr:
- s.add( str(x) )
- return list(s)
-
-def checkMongoPort( port=27017 ):
- sock = socket.socket()
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- sock.settimeout(1)
- sock.connect(("localhost", port))
- sock.close()
-
-
-def didMongodStart( port=27017 , timeout=20 ):
- while timeout > 0:
- time.sleep( 1 )
- try:
- checkMongoPort( port )
- return True
- except Exception,e:
- print( e )
- timeout = timeout - 1
- return False
-
-def which(executable):
- if sys.platform == 'win32':
- paths = os.environ.get('Path', '').split(';')
- else:
- paths = os.environ.get('PATH', '').split(':')
-
- for path in paths:
- path = os.path.expandvars(path)
- path = os.path.expanduser(path)
- path = os.path.abspath(path)
- executable_path = os.path.join(path, executable)
- if os.path.exists(executable_path):
- return executable_path
-
- return executable
-
-def md5sum( file ):
- #TODO error handling, etc..
- return execsys( "md5sum " + file )[0].partition(" ")[0]
-
-def md5string( a_string ):
- return hashlib.md5(a_string).hexdigest()
-
-def find_python(min_version=(2, 5)):
- try:
- if sys.version_info >= min_version:
- return sys.executable
- except AttributeError:
- # In case the version of Python is somehow missing sys.version_info or sys.executable.
- pass
-
- version = re.compile(r'[Pp]ython ([\d\.]+)', re.MULTILINE)
- binaries = ('python27', 'python2.7', 'python26', 'python2.6', 'python25', 'python2.5', 'python')
- for binary in binaries:
- try:
- out, err = subprocess.Popen([binary, '-V'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
- for stream in (out, err):
- match = version.search(stream)
- if match:
- versiontuple = tuple(map(int, match.group(1).split('.')))
- if versiontuple >= min_version:
- return which(binary)
- except:
- pass
-
- raise Exception('could not find suitable Python (version >= %s)' % '.'.join(str(v) for v in min_version))
-
-def smoke_command(*args):
- # return a list of arguments that comprises a complete
- # invocation of smoke.py
- here = os.path.dirname(__file__)
- smoke_py = os.path.abspath(os.path.join(here, 'smoke.py'))
- # the --with-cleanbb argument causes smoke.py to run
- # buildscripts/cleanbb.py before each test phase; this
- # prevents us from running out of disk space on slaves
- return [find_python(), smoke_py, '--with-cleanbb'] + list(args)
-
-def run_smoke_command(*args):
- # to run a command line script from a scons Alias (or any
- # Action), the command sequence must be enclosed in a list,
- # otherwise SCons treats it as a list of dependencies.
- return [smoke_command(*args)]
-
-# unicode is a pain. some strings cannot be unicode()'d
-# but we want to just preserve the bytes in a human-readable
-# fashion. this codec error handler will substitute the
-# repr() of the offending bytes into the decoded string
-# at the position they occurred
-def replace_with_repr(unicode_error):
- offender = unicode_error.object[unicode_error.start:unicode_error.end]
- return (unicode(repr(offender).strip("'").strip('"')), unicode_error.end)
-
-codecs.register_error('repr', replace_with_repr)
-
-def unicode_dammit(string, encoding='utf8'):
- # convert a string to a unicode, using the Python
- # representation of non-ascii bytes when necessary
- #
- # name inpsired by BeautifulSoup's "UnicodeDammit"
- return string.decode(encoding, 'repr')
-
diff --git a/src/mongo/gotools/test/legacy24/jstests/libs/use_extended_timeout.js b/src/mongo/gotools/test/legacy24/jstests/libs/use_extended_timeout.js
deleted file mode 100644
index 7f770249214..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/libs/use_extended_timeout.js
+++ /dev/null
@@ -1,12 +0,0 @@
-var _orig_runMongoProgram = runMongoProgram;
-runMongoProgram = function() {
- var args = [];
- for (var i in arguments) {
- args[i] = arguments[i];
- }
- var progName = args[0];
- if (progName !== "bsondump" && args.indexOf("--dialTimeout") === -1) {
- args.push("--dialTimeout", "30");
- }
- return _orig_runMongoProgram.apply(null, args);
-};
diff --git a/src/mongo/gotools/test/legacy24/jstests/replsets/rslib.js b/src/mongo/gotools/test/legacy24/jstests/replsets/rslib.js
deleted file mode 100644
index fb561cc90e1..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/replsets/rslib.js
+++ /dev/null
@@ -1,115 +0,0 @@
-
-var count = 0;
-var w = 0;
-
-var wait = function(f,msg) {
- w++;
- var n = 0;
- while (!f()) {
- if( n % 4 == 0 )
- print("waiting " + w);
- if (++n == 4) {
- print("" + f);
- }
- assert(n < 200, 'tried 200 times, giving up on ' + msg );
- sleep(1000);
- }
-};
-
-/**
- * Use this to do something once every 4 iterations.
- *
- * <pre>
- * for (i=0; i<1000; i++) {
- * occasionally(function() { print("4 more iterations"); });
- * }
- * </pre>
- */
-var occasionally = function(f, n) {
- var interval = n || 4;
- if (count % interval == 0) {
- f();
- }
- count++;
-};
-
-var reconnect = function(a) {
- wait(function() {
- try {
- // make this work with either dbs or connections
- if (typeof(a.getDB) == "function") {
- db = a.getDB('foo');
- }
- else {
- db = a;
- }
- db.bar.stats();
- if (jsTest.options().keyFile) { // SERVER-4241: Shell connections don't re-authenticate on reconnect
- return jsTest.authenticate(db.getMongo());
- }
- return true;
- } catch(e) {
- print(e);
- return false;
- }
- });
-};
-
-
-var getLatestOp = function(server) {
- server.getDB("admin").getMongo().setSlaveOk();
- var log = server.getDB("local")['oplog.rs'];
- var cursor = log.find({}).sort({'$natural': -1}).limit(1);
- if (cursor.hasNext()) {
- return cursor.next();
- }
- return null;
-};
-
-
-var waitForAllMembers = function(master, timeout) {
- var failCount = 0;
-
- assert.soon( function() {
- var state = null
- try {
- state = master.getSisterDB("admin").runCommand({replSetGetStatus:1});
- failCount = 0;
- } catch ( e ) {
- // Connection can get reset on replica set failover causing a socket exception
- print( "Calling replSetGetStatus failed" );
- print( e );
- return false;
- }
- occasionally(function() { printjson(state); }, 10);
-
- for (var m in state.members) {
- if (state.members[m].state != 1 && // PRIMARY
- state.members[m].state != 2 && // SECONDARY
- state.members[m].state != 7) { // ARBITER
- return false;
- }
- }
- printjson( state );
- return true;
- }, "not all members ready", timeout || 60000);
-
- print( "All members are now in state PRIMARY, SECONDARY, or ARBITER" );
-};
-
-var reconfig = function(rs, config) {
- var admin = rs.getPrimary().getDB("admin");
-
- try {
- var ok = admin.runCommand({replSetReconfig : config});
- assert.eq(ok.ok,1);
- }
- catch(e) {
- print(e);
- }
-
- master = rs.getPrimary().getDB("admin");
- waitForAllMembers(master);
-
- return master;
-};
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/csv1.js b/src/mongo/gotools/test/legacy24/jstests/tool/csv1.js
deleted file mode 100644
index 5eb7ab0249a..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/csv1.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// csv1.js
-
-t = new ToolTest( "csv1" )
-
-c = t.startDB( "foo" );
-
-base = { a : 1 , b : "foo,bar\"baz,qux" , c: 5, 'd d': -6 , e: '-', f : "."};
-
-assert.eq( 0 , c.count() , "setup1" );
-c.insert( base );
-delete base._id
-assert.eq( 1 , c.count() , "setup2" );
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c,d d,e,f" )
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop" )
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c,d d,e,f" );
-assert.soon( "2 == c.count()" , "restore 2" );
-
-a = c.find().sort( { a : 1 } ).toArray();
-delete a[0]._id
-delete a[1]._id
-assert.eq( tojson( { a : "a" , b : "b" , c : "c" , 'd d': "d d", e: 'e', f : "f"} ) , tojson( a[1] ) , "csv parse 1" );
-assert.eq( tojson( base ) , tojson(a[0]) , "csv parse 0" )
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop 2" )
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-
-x = c.findOne()
-delete x._id;
-assert.eq( tojson( base ) , tojson(x) , "csv parse 2" )
-
-
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/csvexport1.js b/src/mongo/gotools/test/legacy24/jstests/tool/csvexport1.js
deleted file mode 100644
index 2ae85e37401..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/csvexport1.js
+++ /dev/null
@@ -1,47 +0,0 @@
-// csvexport1.js
-
-t = new ToolTest( "csvexport1" )
-
-c = t.startDB( "foo" );
-
-assert.eq( 0 , c.count() , "setup1" );
-
-objId = ObjectId()
-
-c.insert({ a : new NumberInt(1) , b : objId , c: [1.0, 2.0, 3.0], d : {a : "hello", b : "world"} , e: '-'})
-c.insert({ a : -2.0, c : MinKey, d : "Then he said, \"Hello World!\"", e : new NumberLong(3)})
-c.insert({ a : new BinData(0, "1234"), b : ISODate("2009-08-27"), c : new Timestamp(1234, 9876), d : /foo*\"bar\"/i, e : function foo() { print("Hello World!"); }})
-
-assert.eq( 3 , c.count() , "setup2" );
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b,c,d,e")
-
-
-c.drop()
-
-assert.eq( 0 , c.count() , "after drop" )
-
-t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
-
-assert.soon ( 3 + " == c.count()", "after import");
-
-// Note: Exporting and Importing to/from CSV is not designed to be round-trippable
-expected = []
-expected.push({ a : 1, b : "ObjectId(" + objId.valueOf() + ")", c : "[1.0,2.0,3.0]", d : "{\"a\":\"hello\",\"b\":\"world\"}", e : "-"})
-expected.push({ a : -2.0, b : "", c : "$MinKey", d : "Then he said, \"Hello World!\"", e : 3})
-// "t" should be 1234, but the shell interprets the first field of timestamps as milliseconds while
-// they are stored as seconds. See SERVER-7718.
-expected.push({ a : "D76DF8", b : "2009-08-27T00:00:00.000Z", c : "{ \"$timestamp\": { \"t\": 1234, \"i\": 9876 } }", d : "/foo*\\\"bar\\\"/i", e : tojson(function foo() { print("Hello World!"); })})
-
-actual = []
-actual.push(c.find({a : 1}).toArray()[0]);
-actual.push(c.find({a : -2.0}).toArray()[0]);
-actual.push(c.find({a : "D76DF8"}).toArray()[0]);
-
-for (i = 0; i < expected.length; i++) {
- delete actual[i]._id
- assert.eq( expected[i], actual[i], "CSV export " + i);
-}
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/csvexport2.js b/src/mongo/gotools/test/legacy24/jstests/tool/csvexport2.js
deleted file mode 100644
index 3e0dd2c6829..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/csvexport2.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// csvexport2.js
-
-t = new ToolTest( "csvexport2" )
-
-c = t.startDB( "foo" );
-
-// This test is designed to test exporting of a CodeWithScope object.
-// However, due to SERVER-3391, it is not possible to create a CodeWithScope object in the mongo shell,
-// therefore this test does not work. Once SERVER-3391 is resolved, this test should be un-commented out
-
-//assert.eq( 0 , c.count() , "setup1" );
-
-//c.insert({ a : 1 , b : Code("print(\"Hello \" + x);", {"x" : "World!"})})
-//assert.eq( 1 , c.count() , "setup2" );
-//t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b")
-
-
-//c.drop()
-
-//assert.eq( 0 , c.count() , "after drop" )
-//t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
-//assert.soon ( 1 + " == c.count()", "after import");
-
-//expected = { a : 1, b : "\"{ \"$code\" : print(\"Hello \" + x); , \"$scope\" : { \"x\" : \"World!\" } }"};
-//actual = c.findOne()
-
-//delete actual._id;
-//assert.eq( expected, actual );
-
-
-t.stop() \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/csvimport1.js b/src/mongo/gotools/test/legacy24/jstests/tool/csvimport1.js
deleted file mode 100644
index 3bff1110cbe..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/csvimport1.js
+++ /dev/null
@@ -1,40 +0,0 @@
-// csvimport1.js
-
-t = new ToolTest( "csvimport1" )
-
-c = t.startDB( "foo" );
-
-base = []
-base.push({ a : 1, b : "this is some text.\nThis text spans multiple lines, and just for fun\ncontains a comma", "c" : "This has leading and trailing whitespace!" })
-base.push({a : 2, b : "When someone says something you \"put it in quotes\"", "c" : "I like embedded quotes/slashes\\backslashes" })
-base.push({a : 3, b : " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", "c" : "" })
-base.push({a : 4, b : "", "c" : "How are empty entries handled?" })
-base.push({a : 5, b : "\"\"", c : "\"This string is in quotes and contains empty quotes (\"\")\""})
-base.push({ a : "a" , b : "b" , c : "c"})
-
-assert.eq( 0 , c.count() , "setup" );
-
-t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c" );
-assert.soon( base.length + " == c.count()" , "after import 1 " );
-
-a = c.find().sort( { a : 1 } ).toArray();
-for (i = 0; i < base.length; i++ ) {
- delete a[i]._id
- assert.eq( tojson(base[i]), tojson(a[i]), "csv parse " + i)
-}
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop" )
-
-t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( base.length - 1 , c.count() , "after import 2" );
-
-x = c.find().sort( { a : 1 } ).toArray();
-for (i = 0; i < base.length - 1; i++ ) {
- delete x[i]._id
- assert.eq( tojson(base[i]), tojson(x[i]), "csv parse with headerline " + i)
-}
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/data/a.tsv b/src/mongo/gotools/test/legacy24/jstests/tool/data/a.tsv
deleted file mode 100644
index 1e094179a63..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/data/a.tsv
+++ /dev/null
@@ -1,2 +0,0 @@
-a b c d e
- 1 foobar 5 -6
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/data/csvimport1.csv b/src/mongo/gotools/test/legacy24/jstests/tool/data/csvimport1.csv
deleted file mode 100644
index 256d40a9184..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/data/csvimport1.csv
+++ /dev/null
@@ -1,8 +0,0 @@
-a,b,c
-1,"this is some text.
-This text spans multiple lines, and just for fun
-contains a comma", "This has leading and trailing whitespace!"
-2, "When someone says something you ""put it in quotes""", I like embedded quotes/slashes\backslashes
- 3 , " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", ""
- "4" ,, How are empty entries handled?
-"5","""""", """This string is in quotes and contains empty quotes ("""")"""
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/data/dumprestore6/foo.bson b/src/mongo/gotools/test/legacy24/jstests/tool/data/dumprestore6/foo.bson
deleted file mode 100644
index b8f8f99e6bf..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/data/dumprestore6/foo.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/data/dumprestore6/system.indexes.bson b/src/mongo/gotools/test/legacy24/jstests/tool/data/dumprestore6/system.indexes.bson
deleted file mode 100644
index dde25da302a..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/data/dumprestore6/system.indexes.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumpauth.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumpauth.js
deleted file mode 100644
index 6050fc6b79b..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumpauth.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// dumpauth.js
-// test mongodump with authentication
-port = allocatePorts( 1 )[ 0 ];
-baseName = "tool_dumpauth";
-
-m = startMongod( "--auth", "--port", port, "--dbpath", "/data/db/" + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
-db = m.getDB( "admin" );
-
-t = db[ baseName ];
-t.drop();
-
-for(var i = 0; i < 100; i++) {
- t["testcol"].save({ "x": i });
-}
-
-users = db.getCollection( "system.users" );
-
-db.addUser( "testuser" , "testuser" );
-
-assert( db.auth( "testuser" , "testuser" ) , "auth failed" );
-
-x = runMongoProgram( "mongodump",
- "--db", baseName,
- "--authenticationDatabase=admin",
- "-u", "testuser",
- "-p", "testuser",
- "-h", "127.0.0.1:"+port,
- "--collection", "testcol" );
-assert.eq(x, 0, "mongodump should succeed with authentication");
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumpfilename1.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumpfilename1.js
deleted file mode 100644
index cb0255afefc..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumpfilename1.js
+++ /dev/null
@@ -1,13 +0,0 @@
-//dumpfilename1.js
-
-//Test designed to make sure error that dumping a collection with "/" fails
-
-t = new ToolTest( "dumpfilename1" );
-
-t.startDB( "foo" );
-
-c = t.db;
-c.getCollection("df/").insert({ a: 3 });
-assert(t.runTool( "dump" , "--out" , t.ext ) != 0, "dump should fail with non-zero return code")
-t.stop();
-
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore1.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore1.js
deleted file mode 100644
index fd1e8789ea6..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore1.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// dumprestore1.js
-
-t = new ToolTest( "dumprestore1" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-c.save( { a : 22 } );
-assert.eq( 1 , c.count() , "setup2" );
-
-t.runTool( "dump" , "--out" , t.ext );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" );
-
-t.runTool( "restore" , "--dir" , t.ext );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-assert.eq( 22 , c.findOne().a , "after restore 2" );
-
-// ensure that --collection is used with --db. See SERVER-7721
-var ret = t.runTool( "dump" , "--collection" , "col" );
-assert.neq( ret, 0, "mongodump should return failure code" );
-t.stop();
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore10.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore10.js
deleted file mode 100644
index 49f008ea591..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore10.js
+++ /dev/null
@@ -1,63 +0,0 @@
-// simple test to ensure write concern functions as expected
-
-var name = "dumprestore10";
-
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-step();
-
-var replTest = new ReplSetTest( {name: name, nodes: 2} );
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-var total = 1000;
-
-{
- step("store data");
- var foo = master.getDB("foo");
- for (i = 0; i < total; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-
-{
- step("wait");
- replTest.awaitReplication();
-}
-
-step("mongodump from replset");
-
-var data = MongoRunner.dataDir + "/dumprestore10-dump1/";
-
-runMongoProgram( "mongodump", "--host", "127.0.0.1:"+replTest.ports[0], "--out", data );
-
-
-{
- step("remove data after dumping");
- master.getDB("foo").getCollection("bar").drop();
-}
-
-{
- step("wait");
- replTest.awaitReplication();
-}
-
-step("try mongorestore with write concern");
-
-runMongoProgram( "mongorestore", "--writeConcern", "2", "--host", "127.0.0.1:"+replTest.ports[0], "--dir", data );
-
-var x = 0;
-
-// no waiting for replication
-x = master.getDB("foo").getCollection("bar").count();
-
-assert.eq(x, total, "mongorestore should have successfully restored the collection");
-
-step("stopSet");
-replTest.stopSet();
-
-step("SUCCESS");
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore3.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore3.js
deleted file mode 100644
index fe9f54d704c..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore3.js
+++ /dev/null
@@ -1,60 +0,0 @@
-// dumprestore3.js
-
-var name = "dumprestore3";
-
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-step();
-
-var replTest = new ReplSetTest( {name: name, nodes: 2} );
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-
-{
- step("populate master");
- var foo = master.getDB("foo");
- for (i = 0; i < 20; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-
-{
- step("wait for slaves");
- replTest.awaitReplication();
-}
-
-{
- step("dump & restore a db into a slave");
- var port = 30020;
- var conn = startMongodTest(port, name + "-other");
- var c = conn.getDB("foo").bar;
- c.save({ a: 22 });
- assert.eq(1, c.count(), "setup2");
-}
-
-step("try mongorestore to slave");
-
-var data = MongoRunner.dataDir + "/dumprestore3-other1/";
-resetDbpath(data);
-runMongoProgram( "mongodump", "--host", "127.0.0.1:"+port, "--out", data );
-
-var x = runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+replTest.ports[1], "--dir", data );
-assert.eq(x, 1, "mongorestore should exit w/ 1 on slave");
-
-step("try mongoimport to slave");
-
-dataFile = MongoRunner.dataDir + "/dumprestore3-other2.json";
-runMongoProgram( "mongoexport", "--host", "127.0.0.1:"+port, "--out", dataFile, "--db", "foo", "--collection", "bar" );
-
-x = runMongoProgram( "mongoimport", "--host", "127.0.0.1:"+replTest.ports[1], "--file", dataFile );
-assert.eq(x, 1, "mongoreimport should exit w/ 1 on slave"); // windows return is signed
-
-step("stopSet");
-replTest.stopSet();
-
-step("SUCCESS");
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore4.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore4.js
deleted file mode 100644
index 568e196061f..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore4.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// dumprestore4.js -- see SERVER-2186
-
-// The point of this test is to ensure that mongorestore successfully
-// constructs indexes when the database being restored into has a
-// different name than the database dumped from. There are 2
-// issues here: (1) if you dumped from database "A" and restore into
-// database "B", B should have exactly the right indexes; (2) if for
-// some reason you have another database called "A" at the time of the
-// restore, mongorestore shouldn't touch it.
-
-t = new ToolTest( "dumprestore4" );
-
-c = t.startDB( "dumprestore4" );
-
-db=t.db
-
-dbname = db.getName();
-dbname2 = "NOT_"+dbname;
-
-db2=db.getSisterDB( dbname2 );
-
-db.dropDatabase(); // make sure it's empty
-db2.dropDatabase(); // make sure everybody's empty
-
-assert.eq( 0 , db.system.indexes.count() , "setup1" );
-c.ensureIndex({ x : 1} );
-assert.eq( 2 , db.system.indexes.count() , "setup2" ); // _id and x_1
-
-assert.eq( 0, t.runTool( "dump" , "-d" , dbname, "--out", t.ext ), "dump")
-
-// to ensure issue (2), we have to clear out the first db.
-// By inspection, db.dropIndexes() doesn't get rid of the _id index on c,
-// so we have to drop the collection.
-c.drop();
-assert.eq( 0, t.runTool( "restore" , "--dir" , t.ext + "/" + dbname, "-d", dbname2 ), "restore" );
-
-// issue (1)
-assert.eq( 2 , db2.system.indexes.count() , "after restore 1" );
-// issue (2)
-assert.eq( 0 , db.system.indexes.count() , "after restore 2" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore6.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore6.js
deleted file mode 100644
index d8b349e9589..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore6.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// dumprestore6.js
-// Test restoring from a dump with an old index version
-
-t = new ToolTest( "dumprestore6" );
-
-c = t.startDB( "foo" );
-db = t.db
-assert.eq( 0 , c.count() , "setup1" );
-
-t.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6")
-
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore" );
-assert.eq( 1 , db.system.indexes.findOne({name:'a_1'}).v, "index version wasn't updated")
-assert.eq( 1, c.count({v:0}), "dropped the 'v' field from a non-index collection")
-
-db.dropDatabase()
-assert.eq( 0 , c.count() , "after drop" );
-
-t.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6", "--keepIndexVersion")
-
-assert.soon( "c.findOne()" , "no data after sleep2" );
-assert.eq( 1 , c.count() , "after restore2" );
-assert.eq( 0 , db.system.indexes.findOne({name:'a_1'}).v, "index version wasn't maintained")
-assert.eq( 1, c.count({v:0}), "dropped the 'v' field from a non-index collection")
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore7.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore7.js
deleted file mode 100644
index b28a056422d..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore7.js
+++ /dev/null
@@ -1,62 +0,0 @@
-var name = "dumprestore7";
-
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-step();
-
-var replTest = new ReplSetTest( {name: name, nodes: 1} );
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-
-{
- step("first chunk of data");
- var foo = master.getDB("foo");
- for (i = 0; i < 20; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-
-{
- step("wait");
- replTest.awaitReplication();
- var time = replTest.getPrimary().getDB("local").getCollection("oplog.rs").find().limit(1).sort({$natural:-1}).next();
- step(time.ts.t);
-}
-
-{
- step("second chunk of data");
- var foo = master.getDB("foo");
- for (i = 30; i < 50; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-{
- var port = 30020;
- var conn = startMongodTest(port, name + "-other");
-}
-
-step("try mongodump with $timestamp");
-
-var data = MongoRunner.dataDir + "/data/db/dumprestore7-dump1/";
-var query = "{\"ts\":{\"$gt\":{\"$timestamp\":{\"t\":"+ time.ts.t + ",\"i\":" + time.ts.i +"}}}}";
-
-runMongoProgram( "mongodump", "--host", "127.0.0.1:"+replTest.ports[0], "--db", "local", "--collection", "oplog.rs", "--query", query, "--out", data );
-
-step("try mongorestore from $timestamp");
-
-runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+port, "--dir", data );
-var x = 9;
-x = conn.getDB("local").getCollection("oplog.rs").count();
-
-assert.eq(x, 20, "mongorestore should only have the latter 20 entries");
-
-step("stopSet");
-replTest.stopSet();
-
-step("SUCCESS");
-
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore8.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore8.js
deleted file mode 100644
index 4e6591738d6..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore8.js
+++ /dev/null
@@ -1,105 +0,0 @@
-// dumprestore8.js
-
-// This file tests that indexes and capped collection options get properly dumped and restored.
-// It checks that this works both when doing a full database dump/restore and when doing it just for a single db or collection
-
-t = new ToolTest( "dumprestore8" );
-
-t.startDB( "foo" );
-db = t.db;
-
-dbname = db.getName();
-dbname2 = "NOT_"+dbname;
-
-db.dropDatabase();
-
-assert.eq( 0 , db.foo.count() , "setup1" );
-db.foo.save( { a : 1, b : 1 } );
-db.foo.ensureIndex({a:1});
-db.foo.ensureIndex({b:1, _id:-1});
-assert.eq( 1 , db.foo.count() , "setup2" );
-
-
-assert.eq( 0 , db.bar.count() , "setup3" );
-db.createCollection("bar", {capped:true, size:1000});
-
-for (var i = 0; i < 1000; i++) {
- db.bar.save( { x : i } );
-}
-db.bar.ensureIndex({x:1});
-
-barDocCount = db.bar.count();
-assert.gt( barDocCount, 0 , "No documents inserted" );
-assert.lt( db.bar.count(), 1000 , "Capped collection didn't evict documents" );
-assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created right" );
-
-
-// Full dump/restore
-
-t.runTool( "dump" , "--out" , t.ext );
-
-db.dropDatabase();
-assert.eq( 0 , db.foo.count() , "foo not dropped" );
-assert.eq( 0 , db.bar.count() , "bar not dropped" );
-assert.eq( 0 , db.system.indexes.count() , "indexes not dropped" );
-
-t.runTool( "restore" , "--dir" , t.ext );
-
-assert.soon( "db.foo.findOne()" , "no data after sleep" );
-assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo" );
-assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar" );
-for (var i = 0; i < 10; i++) {
- db.bar.save({x:i});
-}
-assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore." );
-assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created correctly by restore" );
-
-
-// Dump/restore single DB
-
-dumppath = t.ext + "singledbdump/";
-mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
-
-db.dropDatabase();
-assert.eq( 0 , db.foo.count() , "foo not dropped2" );
-assert.eq( 0 , db.bar.count() , "bar not dropped2" );
-assert.eq( 0 , db.system.indexes.count() , "indexes not dropped2" );
-
-t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname );
-
-db = db.getSiblingDB(dbname2);
-
-assert.soon( "db.foo.findOne()" , "no data after sleep 2" );
-assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo 2" );
-assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar 2" );
-for (var i = 0; i < 10; i++) {
- db.bar.save({x:i});
-}
-assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore 2." );
-assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created correctly by restore 2" );
-
-
-// Dump/restore single collection
-
-dumppath = t.ext + "singlecolldump/";
-mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname2, "-c", "bar", "--out" , dumppath );
-
-db.dropDatabase();
-assert.eq( 0 , db.bar.count() , "bar not dropped3" );
-assert.eq( 0 , db.system.indexes.count() , "indexes not dropped3" );
-
-t.runTool( "restore" , "-d", dbname, "-c", "baz", "--dir" , dumppath + dbname2 + "/bar.bson" );
-
-db = db.getSiblingDB(dbname);
-
-assert.soon( "db.baz.findOne()" , "no data after sleep 2" );
-assert.eq( barDocCount, db.baz.count(), "wrong number of docs restored to bar 2" );
-for (var i = 0; i < 10; i++) {
- db.baz.save({x:i});
-}
-assert.eq( barDocCount, db.baz.count(), "Capped collection didn't evict documents after restore 3." );
-assert.eq( 2 , db.system.indexes.count() , "Indexes weren't created correctly by restore 3" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore9.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore9.js
deleted file mode 100644
index a4a98e8e430..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore9.js
+++ /dev/null
@@ -1,79 +0,0 @@
-if (0) { // Test disabled until SERVER-3853 is finished.
-var name = "dumprestore9";
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-s = new ShardingTest( "dumprestore9a", 2, 0, 3, {chunksize:1} );
-
-step("Shard collection");
-
-s.adminCommand( { enablesharding : "aaa" } ); // Make this db alphabetically before 'config' so it gets restored first
-s.adminCommand( { shardcollection : "aaa.foo" , key : { x : 1 } } );
-
-db = s.getDB( "aaa" );
-coll = db.foo;
-
-step("insert data");
-
-str = 'a';
-while (str.length < 1024*512) {
- str += str;
-}
-
-numDocs = 20;
-for (var i = 0; i < numDocs; i++) {
- coll.insert({x:i, str:str});
-}
-
-step("Wait for balancing");
-
-assert.soon( function(){ var x = s.chunkDiff( "foo" , "aaa" ); print( "chunk diff: " + x ); return x < 2; } , "no balance happened" , 8 * 60 * 1000 , 2000 );
-
-assert.eq(numDocs, coll.count(), "Documents weren't inserted correctly");
-
-step("dump cluster");
-
-dumpdir = "/data/db/dumprestore9-dump1/";
-resetDbpath(dumpdir);
-runMongoProgram( "mongodump", "--host", s._mongos[0].host, "--out", dumpdir );
-
-step("Shutting down cluster");
-
-s.stop();
-
-step("Starting up clean cluster");
-s = new ShardingTest( "dumprestore9b", 2, 0, 3, {chunksize:1} );
-
-db = s.getDB( "aaa" );
-coll = db.foo;
-
-assert.eq(0, coll.count(), "Data wasn't cleaned up by restarting sharding test");
-
-step("Restore data and config");
-
-runMongoProgram( "mongorestore", dumpdir, "--host", s._mongos[1].host, "--restoreShardingConfig", "--forceConfigRestore");
-
-config = s.getDB("config");
-assert(config.databases.findOne({_id:'aaa'}).partitioned, "Config data wasn't restored properly");
-
-assert( s.chunkDiff( "foo" , "aaa" ) < 2, "Chunk data wasn't restored properly");
-
-assert.eq(numDocs, coll.count(), "Didn't restore all documents properly2");
-assert.eq(numDocs, coll.find().itcount(), "Didn't restore all documents properly");
-
-for (var i = 0; i < numDocs; i++) {
- doc = coll.findOne({x:i});
- assert.eq(i, doc.x, "Doc missing from the shard it should be on");
-}
-
-for (var i = 0; i < s._connections.length; i++) {
- assert(s._connections[i].getDB("aaa").foo.count() > 0, "No data on shard: " + s._connections[i].host);
-}
-
-step("Stop cluster");
-s.stop();
-step("SUCCESS");
-} \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestoreWithNoOptions.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumprestoreWithNoOptions.js
deleted file mode 100644
index dd3300ad4f9..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestoreWithNoOptions.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// SERVER-6366
-// relates to SERVER-808
-//
-// This file tests that options are not restored upon
-// mongorestore with --noOptionsRestore
-//
-// It checks that this works both when doing a full
-// database dump/restore and when doing it just for a
-// single db or collection.
-
-t = new ToolTest( "dumprestoreWithNoOptions" );
-
-t.startDB( "foo" );
-db = t.db;
-
-dbname = db.getName();
-dbname2 = "NOT_"+dbname;
-
-db.dropDatabase();
-
-var options = { capped: true, size: 1000, autoIndexId: true };
-db.createCollection('capped', options);
-assert.eq( 1, db.system.indexes.count(), "auto index not created" );
-var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt], 'invalid option')
-}
-db.capped.insert({ x: 1 });
-db.getLastError()
-
-// Full dump/restore
-
-t.runTool( "dump" , "--out" , t.ext );
-
-db.dropDatabase();
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
-
-t.runTool( "restore" , "--dir" , t.ext , "--noOptionsRestore", "-vvv");
-
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
-assert(true !== db.capped.stats().capped, "restore options were not ignored");
-
-// Note: The 2.4 server seems to have a bug where part of the "create" command
-// itself ends up being set in the collection options object. So as a workaround,
-// we will just delete that key and make sure there are no other keys set in
-// the options object.
-opts = db.capped.exists().options;
-delete opts.create;
-assert.eq(Object.keys(opts).length, 0, "restore options not ignored");
-
-// Dump/restore single DB
-
-db.dropDatabase();
-var options = { capped: true, size: 1000, autoIndexId: true };
-db.createCollection('capped', options);
-assert.eq( 1, db.system.indexes.count(), "auto index not created" );
-var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt], 'invalid option')
-}
-db.capped.insert({ x: 1 });
-db.getLastError()
-
-dumppath = t.ext + "noOptionsSingleDump/";
-mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
-
-db.dropDatabase();
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
-
-t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname, "--noOptionsRestore");
-
-db = db.getSiblingDB(dbname2);
-
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
-assert(true !== db.capped.stats().capped, "restore options were not ignored");
-
-opts = db.capped.exists().options;
-delete opts.create;
-assert.eq(Object.keys(opts).length, 0, "restore options not ignored");
-
-// Dump/restore single collection
-
-db.dropDatabase();
-var options = { capped: true, size: 1000, autoIndexId: true };
-db.createCollection('capped', options);
-assert.eq( 1, db.system.indexes.count(), "auto index not created" );
-var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt], 'invalid option')
-}
-db.capped.insert({ x: 1 });
-db.getLastError()
-
-dumppath = t.ext + "noOptionsSingleColDump/";
-mkdir(dumppath);
-dbname = db.getName();
-t.runTool( "dump" , "-d", dbname, "-c", "capped", "--out" , dumppath );
-
-db.dropDatabase();
-
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
-
-t.runTool( "restore", "-d", dbname, "--drop", "--noOptionsRestore", "--dir", dumppath + dbname );
-
-db = db.getSiblingDB(dbname);
-
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
-assert( true !== db.capped.stats().capped, "restore options were not ignored" );
-opts = db.capped.exists().options;
-delete opts.create;
-assert.eq(Object.keys(opts).length, 0, "restore options not ignored");
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore_auth.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore_auth.js
deleted file mode 100644
index 6f0e6c0a05c..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumprestore_auth.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// dumprestore_auth.js
-
-t = new ToolTest("dumprestore_auth", { auth : "" });
-
-c = t.startDB("foo");
-
-adminDB = c.getDB().getSiblingDB('admin');
-adminDB.addUser('admin', 'password');
-adminDB.auth('admin','password');
-
-assert.eq(0 , c.count() , "setup1");
-c.save({ a : 22 });
-assert.eq(1 , c.count() , "setup2");
-
-t.runTool("dump" , "--out" , t.ext, "--username", "admin", "--password", "password");
-
-c.drop();
-assert.eq(0 , c.count() , "after drop");
-
-t.runTool("restore" , "--dir" , t.ext); // Should fail
-assert.eq(0 , c.count() , "after restore without auth");
-
-t.runTool("restore" , "--dir" , t.ext, "--username", "admin", "--password", "password");
-assert.soon("c.findOne()" , "no data after sleep");
-assert.eq(1 , c.count() , "after restore 2");
-assert.eq(22 , c.findOne().a , "after restore 2");
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/dumpsecondary.js b/src/mongo/gotools/test/legacy24/jstests/tool/dumpsecondary.js
deleted file mode 100644
index 7a641542498..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/dumpsecondary.js
+++ /dev/null
@@ -1,38 +0,0 @@
-var replTest = new ReplSetTest( {name: 'testSet', nodes: 2} );
-
-var nodes = replTest.startSet();
-replTest.initiate();
-
-var master = replTest.getPrimary();
-db = master.getDB("foo")
-db.foo.save({a: 1000});
-replTest.awaitReplication();
-replTest.awaitSecondaryNodes();
-
-assert.eq( 1 , db.foo.count() , "setup" );
-
-var slaves = replTest.liveNodes.slaves;
-assert( slaves.length == 1, "Expected 1 slave but length was " + slaves.length );
-slave = slaves[0];
-
-var args = ['mongodump', '-h', slave.host, '--out', MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
-var authargs = ['--username', jsTest.options().authUser, '--password', jsTest.options().authPassword];
-if (jsTest.options().keyFile) {
- args = args.concat(authargs);
-}
-runMongoProgram.apply(null, args);
-db.foo.drop()
-
-assert.eq( 0 , db.foo.count() , "after drop" );
-args = ['mongorestore', '-h', master.host, MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
-if (jsTest.options().keyFile) {
- args = args.concat(authargs);
-}
-runMongoProgram.apply(null, args)
-assert.soon( "db.foo.findOne()" , "no data after sleep" );
-assert.eq( 1 , db.foo.count() , "after restore" );
-assert.eq( 1000 , db.foo.findOne().a , "after restore 2" );
-
-resetDbpath(MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external')
-
-replTest.stopSet(15)
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/exportimport1.js b/src/mongo/gotools/test/legacy24/jstests/tool/exportimport1.js
deleted file mode 100644
index a7a7bcee90c..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/exportimport1.js
+++ /dev/null
@@ -1,66 +0,0 @@
-// exportimport1.js
-
-t = new ToolTest( "exportimport1" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-var arr = ["x", undefined, "y", undefined];
-c.save( { a : 22 , b : arr} );
-assert.eq( 1 , c.count() , "setup2" );
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-var doc = c.findOne();
-assert.eq( 22 , doc.a , "after restore 2" );
-for (var i=0; i<arr.length; i++) {
- if (typeof arr[i] == 'undefined') {
- // null should be { "$undefined" : true }, but this is a workaround for SERVER-6102
- assert.eq( null, doc.b[i] , "after restore array: "+i );
- } else {
- assert.eq( arr[i], doc.b[i] , "after restore array: "+i );
- }
-}
-
-// now with --jsonArray
-
-t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-assert.eq( 22 , c.findOne().a , "after restore 2" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-arr = ["a", undefined, "c"];
-c.save({a : arr});
-assert.eq( 1 , c.count() , "setup2" );
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-var doc = c.findOne();
-for (var i=0; i<arr.length; i++) {
- if (typeof arr[i] == 'undefined') {
- // null should be { "$undefined" : true }, but this is a workaround for SERVER-6102
- assert.eq( null, doc.a[i] , "after restore array: "+i );
- } else {
- assert.eq( arr[i], doc.a[i] , "after restore array: "+i );
- }
-}
-
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/exportimport3.js b/src/mongo/gotools/test/legacy24/jstests/tool/exportimport3.js
deleted file mode 100644
index f18ba6cbd4b..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/exportimport3.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// exportimport3.js
-
-t = new ToolTest( "exportimport3" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-c.save({a:1})
-c.save({a:2})
-c.save({a:3})
-c.save({a:4})
-c.save({a:5})
-
-assert.eq( 5 , c.count() , "setup2" );
-
-
-t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 5 , c.count() , "after restore 2" );
-
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/exportimport4.js b/src/mongo/gotools/test/legacy24/jstests/tool/exportimport4.js
deleted file mode 100644
index 605e21b7337..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/exportimport4.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// exportimport4.js
-
-t = new ToolTest( "exportimport4" );
-c = t.startDB( "foo" );
-
-install_test_data = function() {
- c.drop();
-
- assert.eq( 0 , c.count() , "setup1" );
-
- c.save( { a : [1, 2, 3, NaN, 4, null, 5] } );
- c.save( { a : [1, 2, 3, 4, 5] } );
- c.save( { a : [ NaN ] } );
- c.save( { a : [1, 2, 3, 4, NaN, NaN, 5, NaN] } );
- c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
-
- assert.eq( 5 , c.count() , "setup2" );
-};
-
-// attempt to export fields without NaN
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[NaN]}}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 2 , c.count() , "after restore 1" );
-
-// attempt to export fields with NaN
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:NaN}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 3 , c.count() , "after restore 2" );
-
-// attempt to export everything
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 5 , c.count() , "after restore 3" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/exportimport5.js b/src/mongo/gotools/test/legacy24/jstests/tool/exportimport5.js
deleted file mode 100644
index 427b03f0232..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/exportimport5.js
+++ /dev/null
@@ -1,81 +0,0 @@
-// exportimport4.js
-
-t = new ToolTest( "exportimport5" );
-c = t.startDB( "foo" );
-
-install_test_data = function() {
- c.drop();
-
- assert.eq( 0 , c.count() , "setup1" );
-
- c.save( { a : [1, 2, 3, Infinity, 4, null, 5] } );
- c.save( { a : [1, 2, 3, 4, 5] } );
- c.save( { a : [ Infinity ] } );
- c.save( { a : [1, 2, 3, 4, Infinity, Infinity, 5, -Infinity] } );
- c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
- c.save( { a : [ -Infinity ] } );
-
- assert.eq( 6 , c.count() , "setup2" );
-};
-
-// attempt to export fields without Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[Infinity]}}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 3 , c.count() , "after restore 1" );
-
-// attempt to export fields with Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:Infinity}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 3 , c.count() , "after restore 2" );
-
-// attempt to export fields without -Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[-Infinity]}}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 4 , c.count() , "after restore 3" );
-
-// attempt to export fields with -Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:-Infinity}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 2 , c.count() , "after restore 4" );
-
-// attempt to export everything
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 6 , c.count() , "after restore 5" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/files1.js b/src/mongo/gotools/test/legacy24/jstests/tool/files1.js
deleted file mode 100644
index acfcc16dcc3..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/files1.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// files1.js
-
-t = new ToolTest( "files1" )
-
-db = t.startDB();
-
-filename = 'mongod'
-if ( _isWindows() )
- filename += '.exe'
-
-t.runTool( "files" , "-d" , t.baseName , "put" , filename );
-md5 = md5sumFile(filename);
-
-file_obj = db.fs.files.findOne()
-assert( file_obj , "A 0" );
-md5_stored = file_obj.md5;
-md5_computed = db.runCommand({filemd5: file_obj._id}).md5;
-assert.eq( md5 , md5_stored , "A 1" );
-assert.eq( md5 , md5_computed, "A 2" );
-
-mkdir(t.ext);
-
-t.runTool( "files" , "-d" , t.baseName , "get" , filename , '-l' , t.extFile );
-md5 = md5sumFile(t.extFile);
-assert.eq( md5 , md5_stored , "B" );
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/oplog1.js b/src/mongo/gotools/test/legacy24/jstests/tool/oplog1.js
deleted file mode 100644
index 0429e6e3416..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/oplog1.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// oplog1.js
-
-// very basic test for mongooplog
-// need a lot more, but test that it functions at all
-
-t = new ToolTest( "oplog1" );
-
-db = t.startDB();
-
-output = db.output
-
-doc = { x : 17, _id: 5 };
-
-db.oplog.insert( { ts : new Timestamp() , "op" : "i" , "ns" : output.getFullName() , "o" : doc } );
-
-assert.eq( 0 , output.count() , "before" )
-
-t.runTool( "oplog" , "--oplogns" , db.getName() + ".oplog" , "--from" , "127.0.0.1:" + t.port , "-vv" );
-
-assert.eq( 1 , output.count() , "after" );
-
-var res = output.findOne()
-assert.eq( doc["x"], res["x"], "have same val for x after check" )
-assert.eq( doc["_id"], res["_id"], "have same val for _id after check" )
-assert.eq( Object.keys(doc).length, Object.keys(res).length, "have same amount of keys after check" )
-
-t.stop();
-
-
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/oplog_all_ops.js b/src/mongo/gotools/test/legacy24/jstests/tool/oplog_all_ops.js
deleted file mode 100644
index 8f231cb233d..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/oplog_all_ops.js
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Performs a simple test on mongooplog by doing different types of operations
- * that will show up in the oplog then replaying it on another replica set.
- * Correctness is verified using the dbhash command.
- */
-
-var repl1 = new ReplSetTest({ name: 'rs1', nodes: [{ nopreallocj: '' },
- { arbiter: true }, { arbiter: true }]});
-
-repl1.startSet({ oplogSize: 10 });
-repl1.initiate();
-repl1.awaitSecondaryNodes();
-
-var repl1Conn = new Mongo(repl1.getURL());
-var testDB = repl1Conn.getDB('test');
-var testColl = testDB.user;
-
-// op i
-testColl.insert({ x: 1 });
-testColl.insert({ x: 2 });
-
-// op c
-testDB.dropDatabase();
-
-testColl.insert({ y: 1 });
-testColl.insert({ y: 2 });
-testColl.insert({ y: 3 });
-
-// op u
-testColl.update({}, { $inc: { z: 1 }}, true, true);
-
-// op d
-testColl.remove({ y: 2 });
-
-// op n
-var oplogColl = repl1Conn.getCollection('local.oplog.rs');
-oplogColl.insert({ ts: new Timestamp(), op: 'n', ns: testColl.getFullName(), 'o': { x: 'noop' }});
-
-var repl2 = new ReplSetTest({ name: 'rs2', startPort: 31100, nodes: [{ nopreallocj: '' },
- { arbiter: true }, { arbiter: true }]});
-
-repl2.startSet({ oplogSize: 10 });
-repl2.initiate();
-repl2.awaitSecondaryNodes();
-
-var srcConn = repl1.getPrimary();
-runMongoProgram('mongooplog', '--from', repl1.getPrimary().host,
- '--host', repl2.getPrimary().host);
-
-var repl1Hash = testDB.runCommand({ dbhash: 1 });
-
-var repl2Conn = new Mongo(repl2.getURL());
-var testDB2 = repl2Conn.getDB(testDB.getName());
-var repl2Hash = testDB2.runCommand({ dbhash: 1 });
-
-assert(repl1Hash.md5);
-assert.eq(repl1Hash.md5, repl2Hash.md5);
-
-repl1.stopSet();
-repl2.stopSet();
-
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/restorewithauth.js b/src/mongo/gotools/test/legacy24/jstests/tool/restorewithauth.js
deleted file mode 100644
index a759ccf038c..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/restorewithauth.js
+++ /dev/null
@@ -1,113 +0,0 @@
-/* SERVER-4972
- * Test for mongorestore on server with --auth allows restore without credentials of colls
- * with no index
- */
-/*
- * 1) Start mongo without auth.
- * 2) Write to collection
- * 3) Take dump of the collection using mongodump.
- * 4) Drop the collection.
- * 5) Stop mongod from step 1.
- * 6) Restart mongod with auth.
- * 7) Add admin user to kick authentication
- * 8) Try restore without auth credentials. The restore should fail
- * 9) Try restore with correct auth credentials. The restore should succeed this time.
- */
-
-var port = allocatePorts(1)[0];
-baseName = "jstests_restorewithauth";
-var conn = startMongod( "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface",
- "--nojournal", "--bind_ip", "127.0.0.1" );
-
-// write to ns foo.bar
-var foo = conn.getDB( "foo" );
-for( var i = 0; i < 4; i++ ) {
- foo["bar"].save( { "x": i } );
- foo["baz"].save({"x": i});
-}
-
-// make sure the collection exists
-assert.eq( foo.system.namespaces.count({name: "foo.bar"}), 1 )
-
-//make sure it has no index except _id
-assert.eq(foo.system.indexes.count(), 2);
-
-foo.bar.createIndex({x:1});
-assert.eq(foo.system.indexes.count(), 3);
-
-// get data dump
-var dumpdir = MongoRunner.dataDir + "/restorewithauth-dump1/";
-resetDbpath( dumpdir );
-x = runMongoProgram("mongodump", "--db", "foo", "-h", "127.0.0.1:"+port, "--out", dumpdir);
-
-// now drop the db
-foo.dropDatabase();
-
-// stop mongod
-stopMongod( port );
-
-// start mongod with --auth
-conn = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface",
- "--nojournal", "--bind_ip", "127.0.0.1" );
-
-// admin user
-var admin = conn.getDB( "admin" )
-admin.addUser( "admin" , "admin" );
-admin.auth( "admin" , "admin" );
-
-var foo = conn.getDB( "foo" )
-
-// make sure no collection with the same name exists
-assert.eq(foo.system.namespaces.count( {name: "foo.bar"}), 0);
-assert.eq(foo.system.namespaces.count( {name: "foo.baz"}), 0);
-
-// now try to restore dump
-x = runMongoProgram( "mongorestore", "-h", "127.0.0.1:" + port, "--dir" , dumpdir, "-vvvvv" );
-
-// make sure that the collection isn't restored
-assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 0);
-assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 0);
-
-// now try to restore dump with correct credentials
-x = runMongoProgram( "mongorestore",
- "-h", "127.0.0.1:" + port,
- "-d", "foo",
- "--authenticationDatabase=admin",
- "-u", "admin",
- "-p", "admin",
- "--dir", dumpdir + "foo/",
- "-vvvvv");
-
-// make sure that the collection was restored
-assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 1);
-assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 1);
-
-// make sure the collection has 4 documents
-assert.eq(foo.bar.count(), 4);
-assert.eq(foo.baz.count(), 4);
-
-foo.dropDatabase();
-
-// make sure that the collection is empty
-assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 0);
-assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 0);
-
-foo.addUser('user', 'password');
-
-// now try to restore dump with foo database credentials
-x = runMongoProgram("mongorestore",
- "-h", "127.0.0.1:" + port,
- "-d", "foo",
- "-u", "user",
- "-p", "password",
- "--dir", dumpdir + "foo/",
- "-vvvvv");
-
-// make sure that the collection was restored
-assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 1);
-assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 1);
-assert.eq(foo.bar.count(), 4);
-assert.eq(foo.baz.count(), 4);
-assert.eq(foo.system.indexes.count(), 5); // _id on foo, _id on bar, x on foo, _id + 1 on system.users
-
-stopMongod( port );
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/stat1.js b/src/mongo/gotools/test/legacy24/jstests/tool/stat1.js
deleted file mode 100644
index d2e00756e75..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/stat1.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// stat1.js
-// test mongostat with authentication SERVER-3875
-port = allocatePorts( 1 )[ 0 ];
-baseName = "tool_stat1";
-
-m = startMongod( "--auth", "--port", port, "--dbpath", "/data/db/" + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
-db = m.getDB( "admin" );
-
-t = db[ baseName ];
-t.drop();
-
-users = db.getCollection( "system.users" );
-users.remove( {} );
-
-db.addUser( "eliot" , "eliot" );
-
-assert( db.auth( "eliot" , "eliot" ) , "auth failed" );
-
-x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+port, "--username", "eliot", "--password", "eliot", "--rowcount", "1", "--authenticationDatabase", "admin");
-assert.eq(x, 0, "mongostat should exit successfully with eliot:eliot");
-
-x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+port, "--username", "eliot", "--password", "wrong", "--rowcount", "1", "--authenticationDatabase", "admin");
-assert.eq(x, 1, "mongostat should exit with -1 with eliot:wrong");
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/tool_replset.js b/src/mongo/gotools/test/legacy24/jstests/tool/tool_replset.js
deleted file mode 100644
index bc50a0fd7d4..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/tool_replset.js
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Test to ensure that (dump/restore/export/import/oplog) works with a replica set connection string
- * 1. Start a replica set.
- * 2. Add data to a collection.
- * 3. Take a dump of the database.
- * 4. Drop the db.
- * 5. Restore the db.
- * 6. Export a collection.
- * 7. Drop the collection.
- * 8. Import the collection.
- * 9. Add data to the oplog.rs collection.
- * 10. Ensure that the document doesn't exist yet.
- * 11. Now play the mongooplog tool.
- * 12. Make sure that the oplog was played
-*/
-
-// Load utility methods for replica set tests
-load("jstests/replsets/rslib.js");
-
-print("starting the replica set")
-
-var replTest = new ReplSetTest({ name: 'tool_replset', nodes: 2, oplogSize: 5 });
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-for (var i = 0; i < 100; i++) {
- master.getDB("foo").bar.insert({ a: i });
-}
-replTest.awaitReplication();
-
-var replSetConnString = "tool_replset/127.0.0.1:" + replTest.ports[0] +
- ",127.0.0.1:" + replTest.ports[1];
-
-// Test with mongodump/mongorestore
-print("dump the db");
-var data = MongoRunner.dataDir + "/tool_replset-dump1/";
-runMongoProgram("mongodump", "--host", replSetConnString, "--out", data);
-
-print("db successfully dumped, dropping now");
-master.getDB("foo").dropDatabase();
-replTest.awaitReplication();
-
-print("restore the db");
-runMongoProgram("mongorestore", "--host", replSetConnString, "--dir", data);
-
-print("db successfully restored, checking count")
-var x = master.getDB("foo").getCollection("bar").count();
-assert.eq(x, 100, "mongorestore should have successfully restored the collection");
-
-replTest.awaitReplication();
-
-// Test with mongoexport/mongoimport
-print("export the collection");
-var extFile = MongoRunner.dataDir + "/tool_replset/export";
-runMongoProgram("mongoexport", "--host", replSetConnString, "--out", extFile,
- "-d", "foo", "-c", "bar");
-
-print("collection successfully exported, dropping now");
-master.getDB("foo").getCollection("bar").drop();
-replTest.awaitReplication();
-
-print("import the collection");
-runMongoProgram("mongoimport", "--host", replSetConnString, "--file", extFile,
- "-d", "foo", "-c", "bar");
-
-var x = master.getDB("foo").getCollection("bar").count();
-assert.eq(x, 100, "mongoimport should have successfully imported the collection");
-
-// Test with mongooplog
-var doc = { _id : 5, x : 17 };
-master.getDB("local").oplog.rs.insert({ ts : new Timestamp(), "op" : "i", "ns" : "foo.bar",
- "o" : doc });
-
-assert.eq(100, master.getDB("foo").getCollection("bar").count(), "count before running mongooplog " +
- "was not 100 as expected");
-
-runMongoProgram("mongooplog" , "--from", "127.0.0.1:" + replTest.ports[0],
- "--host", replSetConnString);
-
-print("running mongooplog to replay the oplog")
-
-assert.eq(101, master.getDB("foo").getCollection("bar").count(), "count after running mongooplog " +
- "was not 101 as expected")
-
-print("all tests successful, stopping replica set")
-
-replTest.stopSet();
-
-print("replica set stopped, test complete")
diff --git a/src/mongo/gotools/test/legacy24/jstests/tool/tsv1.js b/src/mongo/gotools/test/legacy24/jstests/tool/tsv1.js
deleted file mode 100644
index 1b0ddbb7c9e..00000000000
--- a/src/mongo/gotools/test/legacy24/jstests/tool/tsv1.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// tsv1.js
-
-t = new ToolTest( "tsv1" )
-
-c = t.startDB( "foo" );
-
-base = { a : "", b : 1 , c : "foobar" , d: 5, e: -6 };
-
-t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "-f" , "a,b,c,d,e" );
-assert.soon( "2 == c.count()" , "restore 2" );
-
-a = c.find().sort( { a : 1 } ).toArray();
-delete a[0]._id
-delete a[1]._id
-
-assert.eq( { a : "a" , b : "b" , c : "c" , d: "d", e: "e"} , a[1] , "tsv parse 1" );
-assert.eq( base , a[0] , "tsv parse 0" )
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop 2" )
-
-t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "--headerline" )
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-
-x = c.findOne()
-delete x._id;
-assert.eq( base , x , "tsv parse 2" )
-
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy26/buildscripts/buildlogger.py b/src/mongo/gotools/test/legacy26/buildscripts/buildlogger.py
deleted file mode 100644
index a31b3e2dfa1..00000000000
--- a/src/mongo/gotools/test/legacy26/buildscripts/buildlogger.py
+++ /dev/null
@@ -1,480 +0,0 @@
-"""
-buildlogger.py
-
-Wrap a command (specified on the command line invocation of buildlogger.py)
-and send output in batches to the buildlogs web application via HTTP POST.
-
-The script configures itself from environment variables:
-
- required env vars:
- MONGO_BUILDER_NAME (e.g. "Nightly Linux 64-bit")
- MONGO_BUILD_NUMBER (an integer)
- MONGO_TEST_FILENAME (not required when invoked with -g)
-
- optional env vars:
- MONGO_PHASE (e.g. "core", "slow nightly", etc)
- MONGO_* (any other environment vars are passed to the web app)
- BUILDLOGGER_CREDENTIALS (see below)
-
-This script has two modes: a "test" mode, intended to wrap the invocation of
-an individual test file, and a "global" mode, intended to wrap the mongod
-instances that run throughout the duration of a mongo test phase (the logs
-from "global" invocations are displayed interspersed with the logs of each
-test, in order to let the buildlogs web app display the full output sensibly.)
-
-If the BUILDLOGGER_CREDENTIALS environment variable is set, it should be a
-path to a valid Python file containing "username" and "password" variables,
-which should be valid credentials for authenticating to the buildlogger web
-app. For example:
-
- username = "hello"
- password = "world"
-
-If BUILDLOGGER_CREDENTIALS is a relative path, then the working directory
-and the directories one, two, and three levels up, are searched, in that
-order.
-"""
-
-import functools
-import os
-import os.path
-import re
-import signal
-import socket
-import subprocess
-import sys
-import time
-import traceback
-import urllib2
-import utils
-
-# suppress deprecation warnings that happen when
-# we import the 'buildbot.tac' file below
-import warnings
-warnings.simplefilter('ignore', DeprecationWarning)
-
-try:
- import json
-except:
- try:
- import simplejson as json
- except:
- json = None
-
-# try to load the shared secret from settings.py
-# which will be one, two, or three directories up
-# from this file's location
-credentials_file = os.environ.get('BUILDLOGGER_CREDENTIALS', 'buildbot.tac')
-credentials_loc, credentials_name = os.path.split(credentials_file)
-if not credentials_loc:
- here = os.path.abspath(os.path.dirname(__file__))
- possible_paths = [
- os.path.abspath(os.path.join(here, '..')),
- os.path.abspath(os.path.join(here, '..', '..')),
- os.path.abspath(os.path.join(here, '..', '..', '..')),
- ]
-else:
- possible_paths = [credentials_loc]
-
-username, password = None, None
-for path in possible_paths:
- credentials_path = os.path.join(path, credentials_name)
- if os.path.isfile(credentials_path):
- credentials = {}
- try:
- execfile(credentials_path, credentials, credentials)
- username = credentials.get('slavename', credentials.get('username'))
- password = credentials.get('passwd', credentials.get('password'))
- break
- except:
- pass
-
-
-URL_ROOT = os.environ.get('BUILDLOGGER_URL', 'http://buildlogs.mongodb.org/')
-TIMEOUT_SECONDS = 10
-socket.setdefaulttimeout(TIMEOUT_SECONDS)
-
-digest_handler = urllib2.HTTPDigestAuthHandler()
-digest_handler.add_password(
- realm='buildlogs',
- uri=URL_ROOT,
- user=username,
- passwd=password)
-
-# This version of HTTPErrorProcessor is copied from
-# Python 2.7, and allows REST response codes (e.g.
-# "201 Created") which are treated as errors by
-# older versions.
-class HTTPErrorProcessor(urllib2.HTTPErrorProcessor):
- def http_response(self, request, response):
- code, msg, hdrs = response.code, response.msg, response.info()
-
- # According to RFC 2616, "2xx" code indicates that the client's
- # request was successfully received, understood, and accepted.
- if not (200 <= code < 300):
- response = self.parent.error(
- 'http', request, response, code, msg, hdrs)
-
- return response
-
-url_opener = urllib2.build_opener(digest_handler, HTTPErrorProcessor())
-
-def url(endpoint):
- if not endpoint.endswith('/'):
- endpoint = '%s/' % endpoint
-
- return '%s/%s' % (URL_ROOT.rstrip('/'), endpoint)
-
-def post(endpoint, data, headers=None):
- data = json.dumps(data, encoding='utf-8')
-
- headers = headers or {}
- headers.update({'Content-Type': 'application/json; charset=utf-8'})
-
- req = urllib2.Request(url=url(endpoint), data=data, headers=headers)
- try:
- response = url_opener.open(req)
- except urllib2.URLError:
- import traceback
- traceback.print_exc(file=sys.stderr)
- sys.stderr.flush()
- # indicate that the request did not succeed
- return None
-
- response_headers = dict(response.info())
-
- # eg "Content-Type: application/json; charset=utf-8"
- content_type = response_headers.get('content-type')
- match = re.match(r'(?P<mimetype>[^;]+).*(?:charset=(?P<charset>[^ ]+))?$', content_type)
- if match and match.group('mimetype') == 'application/json':
- encoding = match.group('charset') or 'utf-8'
- return json.load(response, encoding=encoding)
-
- return response.read()
-
-def traceback_to_stderr(func):
- """
- decorator which logs any exceptions encountered to stderr
- and returns none.
- """
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- try:
- return func(*args, **kwargs)
- except urllib2.HTTPError, err:
- sys.stderr.write('error: HTTP code %d\n----\n' % err.code)
- if hasattr(err, 'hdrs'):
- for k, v in err.hdrs.items():
- sys.stderr.write("%s: %s\n" % (k, v))
- sys.stderr.write('\n')
- sys.stderr.write(err.read())
- sys.stderr.write('\n----\n')
- sys.stderr.flush()
- except:
- sys.stderr.write('Traceback from buildlogger:\n')
- traceback.print_exc(file=sys.stderr)
- sys.stderr.flush()
- return None
- return wrapper
-
-
-@traceback_to_stderr
-def get_or_create_build(builder, buildnum, extra={}):
- data = {'builder': builder, 'buildnum': buildnum}
- data.update(extra)
- response = post('build', data)
- if response is None:
- return None
- return response['id']
-
-@traceback_to_stderr
-def create_test(build_id, test_filename, test_command, test_phase):
- response = post('build/%s/test' % build_id, {
- 'test_filename': test_filename,
- 'command': test_command,
- 'phase': test_phase,
- })
- if response is None:
- return None
- return response['id']
-
-@traceback_to_stderr
-def append_test_logs(build_id, test_id, log_lines):
- response = post('build/%s/test/%s' % (build_id, test_id), data=log_lines)
- if response is None:
- return False
- return True
-
-@traceback_to_stderr
-def append_global_logs(build_id, log_lines):
- """
- "global" logs are for the mongod(s) started by smoke.py
- that last the duration of a test phase -- since there
- may be output in here that is important but spans individual
- tests, the buildlogs webapp handles these logs specially.
- """
- response = post('build/%s' % build_id, data=log_lines)
- if response is None:
- return False
- return True
-
-@traceback_to_stderr
-def finish_test(build_id, test_id, failed=False):
- response = post('build/%s/test/%s' % (build_id, test_id), data=[], headers={
- 'X-Sendlogs-Test-Done': 'true',
- 'X-Sendlogs-Test-Failed': failed and 'true' or 'false',
- })
- if response is None:
- return False
- return True
-
-def run_and_echo(command):
- """
- this just calls the command, and returns its return code,
- allowing stdout and stderr to work as normal. it is used
- as a fallback when environment variables or python
- dependencies cannot be configured, or when the logging
- webapp is unavailable, etc
- """
- proc = subprocess.Popen(command)
-
- def handle_sigterm(signum, frame):
- try:
- proc.send_signal(signum)
- except AttributeError:
- os.kill(proc.pid, signum)
- orig_handler = signal.signal(signal.SIGTERM, handle_sigterm)
-
- proc.wait()
-
- signal.signal(signal.SIGTERM, orig_handler)
- return proc.returncode
-
-class LogAppender(object):
- def __init__(self, callback, args, send_after_lines=2000, send_after_seconds=10):
- self.callback = callback
- self.callback_args = args
-
- self.send_after_lines = send_after_lines
- self.send_after_seconds = send_after_seconds
-
- self.buf = []
- self.retrybuf = []
- self.last_sent = time.time()
-
- def __call__(self, line):
- self.buf.append((time.time(), line))
-
- delay = time.time() - self.last_sent
- if len(self.buf) >= self.send_after_lines or delay >= self.send_after_seconds:
- self.submit()
-
- # no return value is expected
-
- def submit(self):
- if len(self.buf) + len(self.retrybuf) == 0:
- return True
-
- args = list(self.callback_args)
- args.append(list(self.buf) + self.retrybuf)
-
- self.last_sent = time.time()
-
- if self.callback(*args):
- self.buf = []
- self.retrybuf = []
- return True
- else:
- self.retrybuf += self.buf
- self.buf = []
- return False
-
-
-def wrap_test(command):
- """
- call the given command, intercept its stdout and stderr,
- and send results in batches of 100 lines or 10s to the
- buildlogger webapp
- """
-
- # get builder name and build number from environment
- builder = os.environ.get('MONGO_BUILDER_NAME')
- buildnum = os.environ.get('MONGO_BUILD_NUMBER')
-
- if builder is None or buildnum is None:
- return run_and_echo(command)
-
- try:
- buildnum = int(buildnum)
- except ValueError:
- sys.stderr.write('buildlogger: build number ("%s") was not an int\n' % buildnum)
- sys.stderr.flush()
- return run_and_echo(command)
-
- # test takes some extra info
- phase = os.environ.get('MONGO_PHASE', 'unknown')
- test_filename = os.environ.get('MONGO_TEST_FILENAME', 'unknown')
-
- build_info = dict((k, v) for k, v in os.environ.items() if k.startswith('MONGO_'))
- build_info.pop('MONGO_BUILDER_NAME', None)
- build_info.pop('MONGO_BUILD_NUMBER', None)
- build_info.pop('MONGO_PHASE', None)
- build_info.pop('MONGO_TEST_FILENAME', None)
-
- build_id = get_or_create_build(builder, buildnum, extra=build_info)
- if not build_id:
- return run_and_echo(command)
-
- test_id = create_test(build_id, test_filename, ' '.join(command), phase)
- if not test_id:
- return run_and_echo(command)
-
- # the peculiar formatting here matches what is printed by
- # smoke.py when starting tests
- output_url = '%s/build/%s/test/%s/' % (URL_ROOT.rstrip('/'), build_id, test_id)
- sys.stdout.write(' (output suppressed; see %s)\n' % output_url)
- sys.stdout.flush()
-
- callback = LogAppender(callback=append_test_logs, args=(build_id, test_id))
- returncode = loop_and_callback(command, callback)
- failed = bool(returncode != 0)
-
- # this will append any remaining unsubmitted logs, or
- # return True if there are none left to submit
- tries = 5
- while not callback.submit() and tries > 0:
- sys.stderr.write('failed to finish sending test logs, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- tries = 5
- while not finish_test(build_id, test_id, failed) and tries > 5:
- sys.stderr.write('failed to mark test finished, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- return returncode
-
-def wrap_global(command):
- """
- call the given command, intercept its stdout and stderr,
- and send results in batches of 100 lines or 10s to the
- buildlogger webapp. see :func:`append_global_logs` for the
- difference between "global" and "test" log output.
- """
-
- # get builder name and build number from environment
- builder = os.environ.get('MONGO_BUILDER_NAME')
- buildnum = os.environ.get('MONGO_BUILD_NUMBER')
-
- if builder is None or buildnum is None:
- return run_and_echo(command)
-
- try:
- buildnum = int(buildnum)
- except ValueError:
- sys.stderr.write('int(os.environ["MONGO_BUILD_NUMBER"]):\n')
- sys.stderr.write(traceback.format_exc())
- sys.stderr.flush()
- return run_and_echo(command)
-
- build_info = dict((k, v) for k, v in os.environ.items() if k.startswith('MONGO_'))
- build_info.pop('MONGO_BUILDER_NAME', None)
- build_info.pop('MONGO_BUILD_NUMBER', None)
-
- build_id = get_or_create_build(builder, buildnum, extra=build_info)
- if not build_id:
- return run_and_echo(command)
-
- callback = LogAppender(callback=append_global_logs, args=(build_id, ))
- returncode = loop_and_callback(command, callback)
-
- # this will append any remaining unsubmitted logs, or
- # return True if there are none left to submit
- tries = 5
- while not callback.submit() and tries > 0:
- sys.stderr.write('failed to finish sending global logs, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- return returncode
-
-def loop_and_callback(command, callback):
- """
- run the given command (a sequence of arguments, ordinarily
- from sys.argv), and call the given callback with each line
- of stdout or stderr encountered. after the command is finished,
- callback is called once more with None instead of a string.
- """
- proc = subprocess.Popen(
- command,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- )
-
- def handle_sigterm(signum, frame):
- try:
- proc.send_signal(signum)
- except AttributeError:
- os.kill(proc.pid, signum)
-
- # register a handler to delegate SIGTERM
- # to the child process
- orig_handler = signal.signal(signal.SIGTERM, handle_sigterm)
-
- while proc.poll() is None:
- try:
- line = proc.stdout.readline().strip('\r\n')
- line = utils.unicode_dammit(line)
- callback(line)
- except IOError:
- # if the signal handler is called while
- # we're waiting for readline() to return,
- # don't show a traceback
- break
-
- # There may be additional buffered output
- for line in proc.stdout.readlines():
- callback(line.strip('\r\n'))
-
- # restore the original signal handler, if any
- signal.signal(signal.SIGTERM, orig_handler)
- return proc.returncode
-
-
-if __name__ == '__main__':
- # argv[0] is 'buildlogger.py'
- del sys.argv[0]
-
- if sys.argv[0] in ('-g', '--global'):
- # then this is wrapping a "global" command, and should
- # submit global logs to the build, not test logs to a
- # test within the build
- del sys.argv[0]
- wrapper = wrap_global
-
- else:
- wrapper = wrap_test
-
- # if we are missing credentials or the json module, then
- # we can't use buildlogger; so just echo output, but also
- # log why we can't work.
- if json is None:
- sys.stderr.write('buildlogger: could not import a json module\n')
- sys.stderr.flush()
- wrapper = run_and_echo
-
- elif username is None or password is None:
- sys.stderr.write('buildlogger: could not find or import %s for authentication\n' % credentials_file)
- sys.stderr.flush()
- wrapper = run_and_echo
-
- # otherwise wrap a test command as normal; the
- # wrapper functions return the return code of
- # the wrapped command, so that should be our
- # exit code as well.
- sys.exit(wrapper(sys.argv))
-
diff --git a/src/mongo/gotools/test/legacy26/buildscripts/cleanbb.py b/src/mongo/gotools/test/legacy26/buildscripts/cleanbb.py
deleted file mode 100644
index fee7efdc0c1..00000000000
--- a/src/mongo/gotools/test/legacy26/buildscripts/cleanbb.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-
-import re
-import sys
-import os, os.path
-import utils
-import time
-from optparse import OptionParser
-
-def shouldKill( c, root=None ):
-
- if "smoke.py" in c:
- return False
-
- if "emr.py" in c:
- return False
-
- if "java" in c:
- return False
-
- # if root directory is provided, see if command line matches mongod process running
- # with the same data directory
-
- if root and re.compile("(\W|^)mongod(.exe)?\s+.*--dbpath(\s+|=)%s(\s+|$)" % root).search( c ):
- return True
-
- if ( c.find( "buildbot" ) >= 0 or c.find( "slave" ) >= 0 ) and c.find( "/mongo/" ) >= 0:
- return True
-
- if c.find( "xml-data/build-dir" ) >= 0: # for bamboo
- return True
-
- return False
-
-def killprocs( signal="", root=None ):
- killed = 0
-
- if sys.platform == 'win32':
- return killed
-
- l = utils.getprocesslist()
- print( "num procs:" + str( len( l ) ) )
- if len(l) == 0:
- print( "no procs" )
- try:
- print( execsys( "/sbin/ifconfig -a" ) )
- except Exception,e:
- print( "can't get interfaces" + str( e ) )
-
- for x in l:
- x = x.lstrip()
- if not shouldKill( x, root=root ):
- continue
-
- pid = x.split( " " )[0]
- print( "killing: " + x )
- utils.execsys( "/bin/kill " + signal + " " + pid )
- killed = killed + 1
-
- return killed
-
-
-def tryToRemove(path):
- for _ in range(60):
- try:
- os.remove(path)
- return True
- except OSError, e:
- errno = getattr(e, 'winerror', None)
- # check for the access denied and file in use WindowsErrors
- if errno in (5, 32):
- print("os.remove(%s) failed, retrying in one second." % path)
- time.sleep(1)
- else:
- raise e
- return False
-
-
-def cleanup( root , nokill ):
- if nokill:
- print "nokill requested, not killing anybody"
- else:
- if killprocs( root=root ) > 0:
- time.sleep(3)
- killprocs( "-9", root=root )
-
- # delete all regular files, directories can stay
- # NOTE: if we delete directories later, we can't delete diskfulltest
- for ( dirpath , dirnames , filenames ) in os.walk( root , topdown=False ):
- for x in filenames:
- foo = dirpath + "/" + x
- if os.path.exists(foo):
- if not tryToRemove(foo):
- raise Exception("Couldn't remove file '%s' after 60 seconds" % foo)
-
-if __name__ == "__main__":
- parser = OptionParser(usage="read the script")
- parser.add_option("--nokill", dest='nokill', default=False, action='store_true')
- (options, args) = parser.parse_args()
-
- root = "/data/db/"
- if len(args) > 0:
- root = args[0]
-
- cleanup( root , options.nokill )
diff --git a/src/mongo/gotools/test/legacy26/buildscripts/smoke.py b/src/mongo/gotools/test/legacy26/buildscripts/smoke.py
deleted file mode 100755
index 7c8da1108f9..00000000000
--- a/src/mongo/gotools/test/legacy26/buildscripts/smoke.py
+++ /dev/null
@@ -1,1314 +0,0 @@
-#!/usr/bin/env python
-
-# smoke.py: run some mongo tests.
-
-# Bugs, TODOs:
-
-# 0 Some tests hard-code pathnames relative to the mongo repository,
-# so the smoke.py process and all its children must be run with the
-# mongo repo as current working directory. That's kinda icky.
-
-# 1 The tests that are implemented as standalone executables ("test",
-# "perftest"), don't take arguments for the dbpath, but
-# unconditionally use "/tmp/unittest".
-
-# 2 mongod output gets intermingled with mongo output, and it's often
-# hard to find error messages in the slop. Maybe have smoke.py do
-# some fancier wrangling of child process output?
-
-# 3 Some test suites run their own mongods, and so don't need us to
-# run any mongods around their execution. (It's harmless to do so,
-# but adds noise in the output.)
-
-# 4 Running a separate mongo shell for each js file is slower than
-# loading js files into one mongo shell process. Maybe have runTest
-# queue up all filenames ending in ".js" and run them in one mongo
-# shell at the "end" of testing?
-
-# 5 Right now small-oplog implies master/slave replication. Maybe
-# running with replication should be an orthogonal concern. (And
-# maybe test replica set replication, too.)
-
-# 6 We use cleanbb.py to clear out the dbpath, but cleanbb.py kills
-# off all mongods on a box, which means you can't run two smoke.py
-# jobs on the same host at once. So something's gotta change.
-
-from datetime import datetime
-from itertools import izip
-import glob
-from optparse import OptionParser
-import os
-import pprint
-import re
-import shlex
-import socket
-import stat
-from subprocess import (PIPE, Popen, STDOUT)
-import sys
-import time
-
-from pymongo import MongoClient
-from pymongo.errors import OperationFailure
-from pymongo import ReadPreference
-
-import cleanbb
-import utils
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-try:
- from hashlib import md5 # new in 2.5
-except ImportError:
- from md5 import md5 # deprecated in 2.5
-
-try:
- import json
-except:
- try:
- import simplejson as json
- except:
- json = None
-
-
-# TODO clean this up so we don't need globals...
-mongo_repo = os.getcwd() #'./'
-failfile = os.path.join(mongo_repo, 'failfile.smoke')
-test_path = None
-mongod_executable = None
-mongod_port = None
-shell_executable = None
-continue_on_failure = None
-file_of_commands_mode = False
-start_mongod = True
-temp_path = None
-clean_every_n_tests = 1
-clean_whole_dbroot = False
-
-tests = []
-winners = []
-losers = {}
-fails = [] # like losers but in format of tests
-
-# For replication hash checking
-replicated_collections = []
-lost_in_slave = []
-lost_in_master = []
-screwy_in_slave = {}
-
-smoke_db_prefix = ''
-small_oplog = False
-small_oplog_rs = False
-
-test_report = { "results": [] }
-report_file = None
-
-# This class just implements the with statement API, for a sneaky
-# purpose below.
-class Nothing(object):
- def __enter__(self):
- return self
- def __exit__(self, type, value, traceback):
- return not isinstance(value, Exception)
-
-def buildlogger(cmd, is_global=False):
- # if the environment variable MONGO_USE_BUILDLOGGER
- # is set to 'true', then wrap the command with a call
- # to buildlogger.py, which sends output to the buidlogger
- # machine; otherwise, return as usual.
- if os.environ.get('MONGO_USE_BUILDLOGGER', '').lower().strip() == 'true':
- if is_global:
- return [utils.find_python(), 'buildscripts/buildlogger.py', '-g'] + cmd
- else:
- return [utils.find_python(), 'buildscripts/buildlogger.py'] + cmd
- return cmd
-
-
-def clean_dbroot(dbroot="", nokill=False):
- # Clean entire /data/db dir if --with-cleanbb, else clean specific database path.
- if clean_whole_dbroot and not small_oplog:
- dbroot = os.path.normpath(smoke_db_prefix + "/data/db")
- if os.path.exists(dbroot):
- print("clean_dbroot: %s" % dbroot)
- cleanbb.cleanup(dbroot, nokill)
-
-
-class mongod(object):
- def __init__(self, **kwargs):
- self.kwargs = kwargs
- self.proc = None
- self.auth = False
-
- def __enter__(self):
- self.start()
- return self
-
- def __exit__(self, type, value, traceback):
- try:
- self.stop()
- except Exception, e:
- print >> sys.stderr, "error shutting down mongod"
- print >> sys.stderr, e
- return not isinstance(value, Exception)
-
- def ensure_test_dirs(self):
- utils.ensureDir(smoke_db_prefix + "/tmp/unittest/")
- utils.ensureDir(smoke_db_prefix + "/data/")
- utils.ensureDir(smoke_db_prefix + "/data/db/")
-
- def check_mongo_port(self, port=27017):
- sock = socket.socket()
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- sock.settimeout(1)
- sock.connect(("localhost", int(port)))
- sock.close()
-
- def is_mongod_up(self, port=mongod_port):
- try:
- self.check_mongo_port(int(port))
- return True
- except Exception,e:
- print >> sys.stderr, e
- return False
-
- def did_mongod_start(self, port=mongod_port, timeout=300):
- while timeout > 0:
- time.sleep(1)
- is_up = self.is_mongod_up(port)
- if is_up:
- return True
- timeout = timeout - 1
- print >> sys.stderr, "timeout starting mongod"
- return False
-
- def start(self):
- global mongod_port
- global mongod
- if self.proc:
- print >> sys.stderr, "probable bug: self.proc already set in start()"
- return
- self.ensure_test_dirs()
- dir_name = smoke_db_prefix + "/data/db/sconsTests/"
- self.port = int(mongod_port)
- self.slave = False
- if 'slave' in self.kwargs:
- dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/'
- srcport = mongod_port
- self.port += 1
- self.slave = True
-
- clean_dbroot(dbroot=dir_name, nokill=self.slave)
- utils.ensureDir(dir_name)
-
- argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name]
- # These parameters are alwas set for tests
- # SERVER-9137 Added httpinterface parameter to keep previous behavior
- argv += ['--setParameter', 'enableTestCommands=1', '--httpinterface']
- if self.kwargs.get('small_oplog'):
- argv += ["--master", "--oplogSize", "511"]
- params = self.kwargs.get('set_parameters', None)
- if params:
- for p in params.split(','): argv += ['--setParameter', p]
- if self.kwargs.get('small_oplog_rs'):
- argv += ["--replSet", "foo", "--oplogSize", "511"]
- if self.slave:
- argv += ['--slave', '--source', 'localhost:' + str(srcport)]
- if self.kwargs.get('no_journal'):
- argv += ['--nojournal']
- if self.kwargs.get('no_preallocj'):
- argv += ['--nopreallocj']
- if self.kwargs.get('auth'):
- argv += ['--auth', '--setParameter', 'enableLocalhostAuthBypass=false']
- authMechanism = self.kwargs.get('authMechanism', 'MONGODB-CR')
- if authMechanism != 'MONGODB-CR':
- argv += ['--setParameter', 'authenticationMechanisms=' + authMechanism]
- self.auth = True
- if self.kwargs.get('keyFile'):
- argv += ['--keyFile', self.kwargs.get('keyFile')]
- if self.kwargs.get('use_ssl') or self.kwargs.get('use_x509'):
- argv += ['--sslMode', "requireSSL",
- '--sslPEMKeyFile', 'jstests/libs/server.pem',
- '--sslCAFile', 'jstests/libs/ca.pem',
- '--sslWeakCertificateValidation']
- if self.kwargs.get('use_x509'):
- argv += ['--clusterAuthMode','x509'];
- self.auth = True
- print "running " + " ".join(argv)
- self.proc = self._start(buildlogger(argv, is_global=True))
-
- if not self.did_mongod_start(self.port):
- raise Exception("Failed to start mongod")
-
- if self.slave:
- local = MongoClient(port=self.port,
- read_preference=ReadPreference.SECONDARY_PREFERRED).local
- synced = False
- while not synced:
- synced = True
- for source in local.sources.find(fields=["syncedTo"]):
- synced = synced and "syncedTo" in source and source["syncedTo"]
-
- def _start(self, argv):
- """In most cases, just call subprocess.Popen(). On windows,
- add the started process to a new Job Object, so that any
- child processes of this process can be killed with a single
- call to TerminateJobObject (see self.stop()).
- """
-
- if os.sys.platform == "win32":
- # Create a job object with the "kill on job close"
- # flag; this is inherited by child processes (ie
- # the mongod started on our behalf by buildlogger)
- # and lets us terminate the whole tree of processes
- # rather than orphaning the mongod.
- import win32job
-
- # Magic number needed to allow job reassignment in Windows 7
- # see: MSDN - Process Creation Flags - ms684863
- CREATE_BREAKAWAY_FROM_JOB = 0x01000000
-
- proc = Popen(argv, creationflags=CREATE_BREAKAWAY_FROM_JOB)
-
- self.job_object = win32job.CreateJobObject(None, '')
-
- job_info = win32job.QueryInformationJobObject(
- self.job_object, win32job.JobObjectExtendedLimitInformation)
- job_info['BasicLimitInformation']['LimitFlags'] |= win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
- win32job.SetInformationJobObject(
- self.job_object,
- win32job.JobObjectExtendedLimitInformation,
- job_info)
-
- win32job.AssignProcessToJobObject(self.job_object, proc._handle)
-
- else:
- proc = Popen(argv)
-
- return proc
-
- def stop(self):
- if not self.proc:
- print >> sys.stderr, "probable bug: self.proc unset in stop()"
- return
- try:
- if os.sys.platform == "win32":
- import win32job
- win32job.TerminateJobObject(self.job_object, -1)
- import time
- # Windows doesn't seem to kill the process immediately, so give it some time to die
- time.sleep(5)
- else:
- # This function not available in Python 2.5
- self.proc.terminate()
- except AttributeError:
- from os import kill
- kill(self.proc.pid, 15)
- self.proc.wait()
- sys.stderr.flush()
- sys.stdout.flush()
-
- def wait_for_repl(self):
- print "Awaiting replicated (w:2, wtimeout:5min) insert (port:" + str(self.port) + ")"
- MongoClient(port=self.port).testing.smokeWait.insert({}, w=2, wtimeout=5*60*1000)
- print "Replicated write completed -- done wait_for_repl"
-
-class Bug(Exception):
- def __str__(self):
- return 'bug in smoke.py: ' + super(Bug, self).__str__()
-
-class TestFailure(Exception):
- pass
-
-class TestExitFailure(TestFailure):
- def __init__(self, *args):
- self.path = args[0]
- self.status=args[1]
-
- def __str__(self):
- return "test %s exited with status %d" % (self.path, self.status)
-
-class TestServerFailure(TestFailure):
- def __init__(self, *args):
- self.path = args[0]
- self.status = -1 # this is meaningless as an exit code, but
- # that's the point.
- def __str__(self):
- return 'mongod not running after executing test %s' % self.path
-
-def check_db_hashes(master, slave):
- # Need to pause a bit so a slave might catch up...
- if not slave.slave:
- raise(Bug("slave instance doesn't have slave attribute set"))
-
- print "waiting for slave (%s) to catch up to master (%s)" % (slave.port, master.port)
- master.wait_for_repl()
- print "caught up!"
-
- # FIXME: maybe make this run dbhash on all databases?
- for mongod in [master, slave]:
- client = MongoClient(port=mongod.port, read_preference=ReadPreference.SECONDARY_PREFERRED)
- mongod.dbhash = client.test.command("dbhash")
- mongod.dict = mongod.dbhash["collections"]
-
- global lost_in_slave, lost_in_master, screwy_in_slave, replicated_collections
-
- replicated_collections += master.dict.keys()
-
- for coll in replicated_collections:
- if coll not in slave.dict and coll not in lost_in_slave:
- lost_in_slave.append(coll)
- mhash = master.dict[coll]
- shash = slave.dict[coll]
- if mhash != shash:
- mTestDB = MongoClient(port=master.port).test
- sTestDB = MongoClient(port=slave.port,
- read_preference=ReadPreference.SECONDARY_PREFERRED).test
- mCount = mTestDB[coll].count()
- sCount = sTestDB[coll].count()
- stats = {'hashes': {'master': mhash, 'slave': shash},
- 'counts':{'master': mCount, 'slave': sCount}}
- try:
- mDocs = list(mTestDB[coll].find().sort("_id", 1))
- sDocs = list(sTestDB[coll].find().sort("_id", 1))
- mDiffDocs = list()
- sDiffDocs = list()
- for left, right in izip(mDocs, sDocs):
- if left != right:
- mDiffDocs.append(left)
- sDiffDocs.append(right)
-
- stats["docs"] = {'master': mDiffDocs, 'slave': sDiffDocs }
- except Exception, e:
- stats["error-docs"] = e;
-
- screwy_in_slave[coll] = stats
- if mhash == "no _id _index":
- mOplog = mTestDB.connection.local["oplog.$main"];
- oplog_entries = list(mOplog.find({"$or": [{"ns":mTestDB[coll].full_name}, \
- {"op":"c"}]}).sort("$natural", 1))
- print "oplog for %s" % mTestDB[coll].full_name
- for doc in oplog_entries:
- pprint.pprint(doc, width=200)
-
-
- for db in slave.dict.keys():
- if db not in master.dict and db not in lost_in_master:
- lost_in_master.append(db)
-
-
-def ternary( b , l="true", r="false" ):
- if b:
- return l
- return r
-
-# Blech.
-def skipTest(path):
- basename = os.path.basename(path)
- parentPath = os.path.dirname(path)
- parentDir = os.path.basename(parentPath)
- if small_oplog: # For tests running in parallel
- if basename in ["cursor8.js", "indexh.js", "dropdb.js", "dropdb_race.js",
- "connections_opened.js", "opcounters.js", "dbadmin.js"]:
- return True
- if use_ssl:
- # Skip tests using mongobridge since it does not support SSL
- # TODO: Remove when SERVER-10910 has been resolved.
- if basename in ["gridfs.js", "initial_sync3.js", "majority.js", "no_chaining.js",
- "rollback4.js", "slavedelay3.js", "sync2.js", "tags.js"]:
- return True
- # TODO: For now skip tests using MongodRunner, remove when SERVER-10909 has been resolved
- if basename in ["fastsync.js", "index_retry.js", "ttl_repl_maintenance.js",
- "unix_socket1.js"]:
- return True;
- if auth or keyFile or use_x509: # For tests running with auth
- # Skip any tests that run with auth explicitly
- if parentDir.lower() == "auth" or "auth" in basename.lower():
- return True
- if parentPath == mongo_repo: # Skip client tests
- return True
- if parentDir == "tool": # SERVER-6368
- return True
- if parentDir == "dur": # SERVER-7317
- return True
- if parentDir == "disk": # SERVER-7356
- return True
-
- authTestsToSkip = [("jstests", "drop2.js"), # SERVER-8589,
- ("jstests", "killop.js"), # SERVER-10128
- ("sharding", "sync3.js"), # SERVER-6388 for this and those below
- ("sharding", "sync6.js"),
- ("sharding", "parallel.js"),
- ("jstests", "bench_test1.js"),
- ("jstests", "bench_test2.js"),
- ("jstests", "bench_test3.js"),
- ("core", "bench_test1.js"),
- ("core", "bench_test2.js"),
- ("core", "bench_test3.js"),
- ]
-
- if os.path.join(parentDir,basename) in [ os.path.join(*test) for test in authTestsToSkip ]:
- return True
-
- return False
-
-forceCommandsForDirs = ["aggregation", "auth", "core", "parallel", "replsets"]
-# look for jstests and one of the above suites separated by either posix or windows slashes
-forceCommandsRE = re.compile(r"jstests[/\\](%s)" % ('|'.join(forceCommandsForDirs)))
-def setShellWriteModeForTest(path, argv):
- swm = shell_write_mode
- if swm == "legacy": # change when the default changes to "commands"
- if use_write_commands or forceCommandsRE.search(path):
- swm = "commands"
- argv += ["--writeMode", swm]
-
-def runTest(test, result):
- # result is a map containing test result details, like result["url"]
-
- # test is a tuple of ( filename , usedb<bool> )
- # filename should be a js file to run
- # usedb is true if the test expects a mongod to be running
-
- (path, usedb) = test
- (ignore, ext) = os.path.splitext(path)
- test_mongod = mongod()
- mongod_is_up = test_mongod.is_mongod_up(mongod_port)
- result["mongod_running_at_start"] = mongod_is_up;
-
- if file_of_commands_mode:
- # smoke.py was invoked like "--mode files --from-file foo",
- # so don't try to interpret the test path too much
- if os.sys.platform == "win32":
- argv = [path]
- else:
- argv = shlex.split(path)
- path = argv[0]
- # if the command is a python script, use the script name
- if os.path.basename(path) in ('python', 'python.exe'):
- path = argv[1]
- elif ext == ".js":
- argv = [shell_executable, "--port", mongod_port, '--authenticationMechanism', authMechanism]
-
- setShellWriteModeForTest(path, argv)
-
- if not usedb:
- argv += ["--nodb"]
- if small_oplog or small_oplog_rs:
- argv += ["--eval", 'testingReplication = true;']
- if use_ssl:
- argv += ["--ssl",
- "--sslPEMKeyFile", "jstests/libs/client.pem",
- "--sslCAFile", "jstests/libs/ca.pem",
- "--sslAllowInvalidCertificates"]
- argv += [path]
- elif ext in ["", ".exe"]:
- # Blech.
- if os.path.basename(path) in ["test", "test.exe", "perftest", "perftest.exe"]:
- argv = [path]
- # default data directory for test and perftest is /tmp/unittest
- if smoke_db_prefix:
- dir_name = smoke_db_prefix + '/unittests'
- argv.extend(["--dbpath", dir_name] )
- # more blech
- elif os.path.basename(path) in ['mongos', 'mongos.exe']:
- argv = [path, "--test"]
- else:
- argv = [test_path and os.path.abspath(os.path.join(test_path, path)) or path,
- "--port", mongod_port]
- else:
- raise Bug("fell off in extension case: %s" % path)
-
- mongo_test_filename = os.path.basename(path)
-
- # sys.stdout.write() is more atomic than print, so using it prevents
- # lines being interrupted by, e.g., child processes
- sys.stdout.write(" *******************************************\n")
- sys.stdout.write(" Test : %s ...\n" % mongo_test_filename)
- sys.stdout.flush()
-
- # FIXME: we don't handle the case where the subprocess
- # hangs... that's bad.
- if ( argv[0].endswith( 'mongo' ) or argv[0].endswith( 'mongo.exe' ) ) and not '--eval' in argv :
- evalString = 'load("jstests/libs/use_extended_timeout.js");' + \
- 'TestData = new Object();' + \
- 'TestData.testPath = "' + path + '";' + \
- 'TestData.testFile = "' + os.path.basename( path ) + '";' + \
- 'TestData.testName = "' + re.sub( ".js$", "", os.path.basename( path ) ) + '";' + \
- 'TestData.setParameters = "' + ternary( set_parameters, set_parameters, "" ) + '";' + \
- 'TestData.setParametersMongos = "' + ternary( set_parameters_mongos, set_parameters_mongos, "" ) + '";' + \
- 'TestData.noJournal = ' + ternary( no_journal ) + ";" + \
- 'TestData.noJournalPrealloc = ' + ternary( no_preallocj ) + ";" + \
- 'TestData.auth = ' + ternary( auth ) + ";" + \
- 'TestData.keyFile = ' + ternary( keyFile , '"' + str(keyFile) + '"' , 'null' ) + ";" + \
- 'TestData.keyFileData = ' + ternary( keyFile , '"' + str(keyFileData) + '"' , 'null' ) + ";" + \
- 'TestData.authMechanism = ' + ternary( authMechanism,
- '"' + str(authMechanism) + '"', 'null') + ";" + \
- 'TestData.useSSL = ' + ternary( use_ssl ) + ";" + \
- 'TestData.useX509 = ' + ternary( use_x509 ) + ";"
- # this updates the default data directory for mongod processes started through shell (src/mongo/shell/servers.js)
- evalString += 'MongoRunner.dataDir = "' + os.path.abspath(smoke_db_prefix + '/data/db') + '";'
- evalString += 'MongoRunner.dataPath = MongoRunner.dataDir + "/";'
- if os.sys.platform == "win32":
- # double quotes in the evalString on windows; this
- # prevents the backslashes from being removed when
- # the shell (i.e. bash) evaluates this string. yuck.
- evalString = evalString.replace('\\', '\\\\')
-
- if auth and usedb:
- evalString += 'jsTest.authenticate(db.getMongo());'
-
- argv = argv + [ '--eval', evalString]
-
- if argv[0].endswith( 'test' ) or argv[0].endswith( 'test.exe' ):
- if no_preallocj :
- argv = argv + [ '--nopreallocj' ]
- if temp_path:
- argv = argv + [ '--tempPath', temp_path ]
-
-
- sys.stdout.write(" Command : %s\n" % ' '.join(argv))
- sys.stdout.write(" Date : %s\n" % datetime.now().ctime())
- sys.stdout.flush()
-
- os.environ['MONGO_TEST_FILENAME'] = mongo_test_filename
- t1 = time.time()
-
- proc = Popen(buildlogger(argv), cwd=test_path, stdout=PIPE, stderr=STDOUT, bufsize=0)
- first_line = proc.stdout.readline() # Get suppressed output URL
- m = re.search(r"\s*\(output suppressed; see (?P<url>.*)\)" + os.linesep, first_line)
- if m:
- result["url"] = m.group("url")
- sys.stdout.write(first_line)
- sys.stdout.flush()
- while True:
- # print until subprocess's stdout closed.
- # Not using "for line in file" since that has unwanted buffering.
- line = proc.stdout.readline()
- if not line:
- break;
-
- sys.stdout.write(line)
- sys.stdout.flush()
-
- proc.wait() # wait if stdout is closed before subprocess exits.
- r = proc.returncode
-
- t2 = time.time()
- del os.environ['MONGO_TEST_FILENAME']
-
- timediff = t2 - t1
- # timediff is seconds by default
- scale = 1
- suffix = "seconds"
- # if timediff is less than 10 seconds use ms
- if timediff < 10:
- scale = 1000
- suffix = "ms"
- # if timediff is more than 60 seconds use minutes
- elif timediff > 60:
- scale = 1.0 / 60.0
- suffix = "minutes"
- sys.stdout.write(" %10.4f %s\n" % ((timediff) * scale, suffix))
- sys.stdout.flush()
-
- result["exit_code"] = r
-
- is_mongod_still_up = test_mongod.is_mongod_up(mongod_port)
- if not is_mongod_still_up:
- print "mongod is not running after test"
- result["mongod_running_at_end"] = is_mongod_still_up;
- if start_mongod:
- raise TestServerFailure(path)
-
- result["mongod_running_at_end"] = is_mongod_still_up;
-
- if r != 0:
- raise TestExitFailure(path, r)
-
- print ""
-
-def run_tests(tests):
- # FIXME: some suites of tests start their own mongod, so don't
- # need this. (So long as there are no conflicts with port,
- # dbpath, etc., and so long as we shut ours down properly,
- # starting this mongod shouldn't break anything, though.)
-
- # The reason we want to use "with" is so that we get __exit__ semantics
- # but "with" is only supported on Python 2.5+
-
- if start_mongod:
- master = mongod(small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509).__enter__()
- else:
- master = Nothing()
- try:
- if small_oplog:
- slave = mongod(slave=True,
- set_parameters=set_parameters).__enter__()
- elif small_oplog_rs:
- slave = mongod(slave=True,
- small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509).__enter__()
- primary = MongoClient(port=master.port);
-
- primary.admin.command({'replSetInitiate' : {'_id' : 'foo', 'members' : [
- {'_id': 0, 'host':'localhost:%s' % master.port},
- {'_id': 1, 'host':'localhost:%s' % slave.port,'priority':0}]}})
-
- ismaster = False
- while not ismaster:
- result = primary.admin.command("ismaster");
- ismaster = result["ismaster"]
- time.sleep(1)
- else:
- slave = Nothing()
-
- try:
- if small_oplog or small_oplog_rs:
- master.wait_for_repl()
-
- for tests_run, test in enumerate(tests):
- tests_run += 1 # enumerate from 1, python 2.5 compatible
- test_result = { "start": time.time() }
-
- (test_path, use_db) = test
-
- if test_path.startswith(mongo_repo + os.path.sep):
- test_result["test_file"] = test_path[len(mongo_repo)+1:]
- else:
- # user could specify a file not in repo. leave it alone.
- test_result["test_file"] = test_path
-
- try:
- if skipTest(test_path):
- test_result["status"] = "skip"
-
- print "skipping " + test_path
- else:
- fails.append(test)
- runTest(test, test_result)
- fails.pop()
- winners.append(test)
-
- test_result["status"] = "pass"
-
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_report["results"].append( test_result )
- if small_oplog or small_oplog_rs:
- master.wait_for_repl()
- # check the db_hashes
- if isinstance(slave, mongod):
- check_db_hashes(master, slave)
- check_and_report_replication_dbhashes()
-
- elif use_db: # reach inside test and see if "usedb" is true
- if clean_every_n_tests and (tests_run % clean_every_n_tests) == 0:
- # Restart mongod periodically to clean accumulated test data
- # clean_dbroot() is invoked by mongod.start()
- master.__exit__(None, None, None)
- master = mongod(small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509).__enter__()
-
- except TestFailure, f:
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_result["error"] = str(f)
- test_result["status"] = "fail"
- test_report["results"].append( test_result )
- try:
- print f
- # Record the failing test and re-raise.
- losers[f.path] = f.status
- raise f
- except TestServerFailure, f:
- return 2
- except TestFailure, f:
- if not continue_on_failure:
- return 1
- if isinstance(slave, mongod):
- check_db_hashes(master, slave)
- finally:
- slave.__exit__(None, None, None)
- finally:
- master.__exit__(None, None, None)
- return 0
-
-
-def check_and_report_replication_dbhashes():
- def missing(lst, src, dst):
- if lst:
- print """The following collections were present in the %s but not the %s
-at the end of testing:""" % (src, dst)
- for db in lst:
- print db
-
- missing(lost_in_slave, "master", "slave")
- missing(lost_in_master, "slave", "master")
- if screwy_in_slave:
- print """The following collections has different hashes in master and slave
-at the end of testing:"""
- for coll in screwy_in_slave.keys():
- stats = screwy_in_slave[coll]
- # Counts are "approx" because they are collected after the dbhash runs and may not
- # reflect the states of the collections that were hashed. If the hashes differ, one
- # possibility is that a test exited with writes still in-flight.
- print "collection: %s\t (master/slave) hashes: %s/%s counts (approx): %i/%i" % (coll, stats['hashes']['master'], stats['hashes']['slave'], stats['counts']['master'], stats['counts']['slave'])
- if "docs" in stats:
- if (("master" in stats["docs"] and len(stats["docs"]["master"]) != 0) or
- ("slave" in stats["docs"] and len(stats["docs"]["slave"]) != 0)):
- print "All docs matched!"
- else:
- print "Different Docs"
- print "Master docs:"
- pprint.pprint(stats["docs"]["master"], indent=2)
- print "Slave docs:"
- pprint.pprint(stats["docs"]["slave"], indent=2)
- if "error-docs" in stats:
- print "Error getting docs to diff:"
- pprint.pprint(stats["error-docs"])
- return True
-
- if (small_oplog or small_oplog_rs) and not (lost_in_master or lost_in_slave or screwy_in_slave):
- print "replication ok for %d collections" % (len(replicated_collections))
-
- return False
-
-
-def report():
- print "%d tests succeeded" % len(winners)
- num_missed = len(tests) - (len(winners) + len(losers.keys()))
- if num_missed:
- print "%d tests didn't get run" % num_missed
- if losers:
- print "The following tests failed (with exit code):"
- for loser in losers:
- print "%s\t%d" % (loser, losers[loser])
-
- test_result = { "start": time.time() }
- if check_and_report_replication_dbhashes():
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_result["test_file"] = "/#dbhash#"
- test_result["error"] = "dbhash mismatch"
- test_result["status"] = "fail"
- test_report["results"].append( test_result )
-
- if report_file:
- f = open( report_file, "wb" )
- f.write( json.dumps( test_report ) )
- f.close()
-
- if losers or lost_in_slave or lost_in_master or screwy_in_slave:
- raise Exception("Test failures")
-
-# Keys are the suite names (passed on the command line to smoke.py)
-# Values are pairs: (filenames, <start mongod before running tests>)
-suiteGlobalConfig = {"js": ("core/*.js", True),
- "quota": ("quota/*.js", True),
- "jsPerf": ("perf/*.js", True),
- "disk": ("disk/*.js", True),
- "noPassthroughWithMongod": ("noPassthroughWithMongod/*.js", True),
- "noPassthrough": ("noPassthrough/*.js", False),
- "parallel": ("parallel/*.js", True),
- "clone": ("clone/*.js", False),
- "repl": ("repl/*.js", False),
- "replSets": ("replsets/*.js", False),
- "dur": ("dur/*.js", False),
- "auth": ("auth/*.js", False),
- "sharding": ("sharding/*.js", False),
- "tool": ("tool/*.js", False),
- "aggregation": ("aggregation/*.js", True),
- "multiVersion": ("multiVersion/*.js", True),
- "failPoint": ("fail_point/*.js", False),
- "ssl": ("ssl/*.js", True),
- "sslSpecial": ("sslSpecial/*.js", True),
- "jsCore": ("core/*.js", True),
- "gle": ("gle/*.js", True),
- "slow1": ("slow1/*.js", True),
- "slow2": ("slow2/*.js", True),
- }
-
-def get_module_suites():
- """Attempts to discover and return information about module test suites
-
- Returns a dictionary of module suites in the format:
-
- {
- "<suite_name>" : "<full_path_to_suite_directory/[!_]*.js>",
- ...
- }
-
- This means the values of this dictionary can be used as "glob"s to match all jstests in the
- suite directory that don't start with an underscore
-
- The module tests should be put in 'src/mongo/db/modules/<module_name>/<suite_name>/*.js'
-
- NOTE: This assumes that if we have more than one module the suite names don't conflict
- """
- modules_directory = 'src/mongo/db/modules'
- test_suites = {}
-
- # Return no suites if we have no modules
- if not os.path.exists(modules_directory) or not os.path.isdir(modules_directory):
- return {}
-
- module_directories = os.listdir(modules_directory)
- for module_directory in module_directories:
-
- test_directory = os.path.join(modules_directory, module_directory, "jstests")
-
- # Skip this module if it has no "jstests" directory
- if not os.path.exists(test_directory) or not os.path.isdir(test_directory):
- continue
-
- # Get all suites for this module
- for test_suite in os.listdir(test_directory):
- test_suites[test_suite] = os.path.join(test_directory, test_suite, "[!_]*.js")
-
- return test_suites
-
-def expand_suites(suites,expandUseDB=True):
- """Takes a list of suites and expands to a list of tests according to a set of rules.
-
- Keyword arguments:
- suites -- list of suites specified by the user
- expandUseDB -- expand globs (such as [!_]*.js) for tests that are run against a database
- (default True)
-
- This function handles expansion of globs (such as [!_]*.js), aliases (such as "client" and
- "all"), detection of suites in the "modules" directory, and enumerating the test files in a
- given suite. It returns a list of tests of the form (path_to_test, usedb), where the second
- part of the tuple specifies whether the test is run against the database (see --nodb in the
- mongo shell)
-
- """
- globstr = None
- tests = []
- module_suites = get_module_suites()
- for suite in suites:
- if suite == 'all':
- return expand_suites(['test',
- 'perf',
- 'jsCore',
- 'jsPerf',
- 'noPassthroughWithMongod',
- 'noPassthrough',
- 'clone',
- 'parallel',
- 'repl',
- 'auth',
- 'sharding',
- 'slow1',
- 'slow2',
- 'tool'],
- expandUseDB=expandUseDB)
- if suite == 'test':
- if os.sys.platform == "win32":
- program = 'test.exe'
- else:
- program = 'test'
- (globstr, usedb) = (program, False)
- elif suite == 'perf':
- if os.sys.platform == "win32":
- program = 'perftest.exe'
- else:
- program = 'perftest'
- (globstr, usedb) = (program, False)
- elif suite == 'mongosTest':
- if os.sys.platform == "win32":
- program = 'mongos.exe'
- else:
- program = 'mongos'
- tests += [(os.path.join(mongo_repo, program), False)]
- elif os.path.exists( suite ):
- usedb = True
- for name in suiteGlobalConfig:
- if suite in glob.glob( "jstests/" + suiteGlobalConfig[name][0] ):
- usedb = suiteGlobalConfig[name][1]
- break
- tests += [ ( os.path.join( mongo_repo , suite ) , usedb ) ]
- elif suite in module_suites:
- # Currently we connect to a database in all module tests since there's no mechanism yet
- # to configure it independently
- usedb = True
- paths = glob.glob(module_suites[suite])
- paths.sort()
- tests += [(path, usedb) for path in paths]
- else:
- try:
- globstr, usedb = suiteGlobalConfig[suite]
- except KeyError:
- raise Exception('unknown test suite %s' % suite)
-
- if globstr:
- if usedb and not expandUseDB:
- tests += [ (suite,False) ]
- else:
- if globstr.endswith('.js'):
- loc = 'jstests/'
- else:
- loc = ''
- globstr = os.path.join(mongo_repo, (os.path.join(loc, globstr)))
- globstr = os.path.normpath(globstr)
- paths = glob.glob(globstr)
- paths.sort()
- tests += [(path, usedb) for path in paths]
-
- return tests
-
-def add_exe(e):
- if os.sys.platform.startswith( "win" ) and not e.endswith( ".exe" ):
- e += ".exe"
- return e
-
-def set_globals(options, tests):
- global mongod_executable, mongod_port, shell_executable, continue_on_failure
- global small_oplog, small_oplog_rs
- global no_journal, set_parameters, set_parameters_mongos, no_preallocj
- global auth, authMechanism, keyFile, keyFileData, smoke_db_prefix, test_path, start_mongod
- global use_ssl, use_x509
- global file_of_commands_mode
- global report_file, shell_write_mode, use_write_commands
- global temp_path
- global clean_every_n_tests
- global clean_whole_dbroot
-
- start_mongod = options.start_mongod
- if hasattr(options, 'use_ssl'):
- use_ssl = options.use_ssl
- if hasattr(options, 'use_x509'):
- use_x509 = options.use_x509
- use_ssl = use_ssl or use_x509
- #Careful, this can be called multiple times
- test_path = options.test_path
-
- mongod_executable = add_exe(options.mongod_executable)
- if not os.path.exists(mongod_executable):
- raise Exception("no mongod found in this directory.")
-
- mongod_port = options.mongod_port
-
- shell_executable = add_exe( options.shell_executable )
- if not os.path.exists(shell_executable):
- raise Exception("no mongo shell found in this directory.")
-
- continue_on_failure = options.continue_on_failure
- smoke_db_prefix = options.smoke_db_prefix
- small_oplog = options.small_oplog
- if hasattr(options, "small_oplog_rs"):
- small_oplog_rs = options.small_oplog_rs
- no_journal = options.no_journal
- set_parameters = options.set_parameters
- set_parameters_mongos = options.set_parameters_mongos
- no_preallocj = options.no_preallocj
- auth = options.auth
- authMechanism = options.authMechanism
- keyFile = options.keyFile
-
- clean_every_n_tests = options.clean_every_n_tests
- clean_whole_dbroot = options.with_cleanbb
-
- if auth and not keyFile:
- # if only --auth was given to smoke.py, load the
- # default keyFile from jstests/libs/authTestsKey
- keyFile = os.path.join(mongo_repo, 'jstests', 'libs', 'authTestsKey')
-
- if keyFile:
- f = open(keyFile, 'r')
- keyFileData = re.sub(r'\s', '', f.read()) # Remove all whitespace
- f.close()
- os.chmod(keyFile, stat.S_IRUSR | stat.S_IWUSR)
- else:
- keyFileData = None
-
- # if smoke.py is running a list of commands read from a
- # file (or stdin) rather than running a suite of js tests
- file_of_commands_mode = options.File and options.mode == 'files'
- # generate json report
- report_file = options.report_file
- temp_path = options.temp_path
-
- use_write_commands = options.use_write_commands
- shell_write_mode = options.shell_write_mode
-
-def file_version():
- return md5(open(__file__, 'r').read()).hexdigest()
-
-def clear_failfile():
- if os.path.exists(failfile):
- os.remove(failfile)
-
-def run_old_fails():
- global tests
-
- try:
- f = open(failfile, 'r')
- state = pickle.load(f)
- f.close()
- except Exception:
- try:
- f.close()
- except:
- pass
- clear_failfile()
- return # This counts as passing so we will run all tests
-
- if ('version' not in state or state['version'] != file_version()):
- print "warning: old version of failfile.smoke detected. skipping recent fails"
- clear_failfile()
- return
-
- testsAndOptions = state['testsAndOptions']
- tests = [x[0] for x in testsAndOptions]
- passed = []
- try:
- for (i, (test, options)) in enumerate(testsAndOptions):
- # SERVER-5102: until we can figure out a better way to manage
- # dependencies of the --only-old-fails build phase, just skip
- # tests which we can't safely run at this point
- path, usedb = test
-
- if not os.path.exists(path):
- passed.append(i)
- winners.append(test)
- continue
-
- filename = os.path.basename(path)
- if filename in ('test', 'test.exe') or filename.endswith('.js'):
- set_globals(options, [filename])
- oldWinners = len(winners)
- run_tests([test])
- if len(winners) != oldWinners: # can't use return value due to continue_on_failure
- passed.append(i)
- finally:
- for offset, i in enumerate(passed):
- testsAndOptions.pop(i - offset)
-
- if testsAndOptions:
- f = open(failfile, 'w')
- state = {'version':file_version(), 'testsAndOptions':testsAndOptions}
- pickle.dump(state, f)
- else:
- clear_failfile()
-
- report() # exits with failure code if there is an error
-
-def add_to_failfile(tests, options):
- try:
- f = open(failfile, 'r')
- testsAndOptions = pickle.load(f)["testsAndOptions"]
- except Exception:
- testsAndOptions = []
-
- for test in tests:
- if (test, options) not in testsAndOptions:
- testsAndOptions.append( (test, options) )
-
- state = {'version':file_version(), 'testsAndOptions':testsAndOptions}
- f = open(failfile, 'w')
- pickle.dump(state, f)
-
-
-
-def main():
- global mongod_executable, mongod_port, shell_executable, continue_on_failure, small_oplog
- global no_journal, set_parameters, set_parameters_mongos, no_preallocj, auth
- global keyFile, smoke_db_prefix, test_path, use_write_commands
-
- parser = OptionParser(usage="usage: smoke.py [OPTIONS] ARGS*")
- parser.add_option('--mode', dest='mode', default='suite',
- help='If "files", ARGS are filenames; if "suite", ARGS are sets of tests (%default)')
- # Some of our tests hard-code pathnames e.g., to execute, so until
- # that changes we don't have the freedom to run from anyplace.
- # parser.add_option('--mongo-repo', dest='mongo_repo', default=None,
- parser.add_option('--test-path', dest='test_path', default=None,
- help="Path to the test executables to run, "
- "currently only used for 'client' (%default)")
- parser.add_option('--mongod', dest='mongod_executable', default=os.path.join(mongo_repo, 'mongod'),
- help='Path to mongod to run (%default)')
- parser.add_option('--port', dest='mongod_port', default="27999",
- help='Port the mongod will bind to (%default)')
- parser.add_option('--mongo', dest='shell_executable', default=os.path.join(mongo_repo, 'mongo'),
- help='Path to mongo, for .js test files (%default)')
- parser.add_option('--continue-on-failure', dest='continue_on_failure',
- action="store_true", default=False,
- help='If supplied, continue testing even after a test fails')
- parser.add_option('--from-file', dest='File',
- help="Run tests/suites named in FILE, one test per line, '-' means stdin")
- parser.add_option('--smoke-db-prefix', dest='smoke_db_prefix', default=smoke_db_prefix,
- help="Prefix to use for the mongods' dbpaths ('%default')")
- parser.add_option('--small-oplog', dest='small_oplog', default=False,
- action="store_true",
- help='Run tests with master/slave replication & use a small oplog')
- parser.add_option('--small-oplog-rs', dest='small_oplog_rs', default=False,
- action="store_true",
- help='Run tests with replica set replication & use a small oplog')
- parser.add_option('--nojournal', dest='no_journal', default=False,
- action="store_true",
- help='Do not turn on journaling in tests')
- parser.add_option('--nopreallocj', dest='no_preallocj', default=False,
- action="store_true",
- help='Do not preallocate journal files in tests')
- parser.add_option('--auth', dest='auth', default=False,
- action="store_true",
- help='Run standalone mongods in tests with authentication enabled')
- parser.add_option('--use-x509', dest='use_x509', default=False,
- action="store_true",
- help='Use x509 auth for internal cluster authentication')
- parser.add_option('--authMechanism', dest='authMechanism', default='MONGODB-CR',
- help='Use the given authentication mechanism, when --auth is used.')
- parser.add_option('--keyFile', dest='keyFile', default=None,
- help='Path to keyFile to use to run replSet and sharding tests with authentication enabled')
- parser.add_option('--ignore', dest='ignore_files', default=None,
- help='Pattern of files to ignore in tests')
- parser.add_option('--only-old-fails', dest='only_old_fails', default=False,
- action="store_true",
- help='Check the failfile and only run all tests that failed last time')
- parser.add_option('--reset-old-fails', dest='reset_old_fails', default=False,
- action="store_true",
- help='Clear the failfile. Do this if all tests pass')
- parser.add_option('--with-cleanbb', dest='with_cleanbb', action="store_true",
- default=False,
- help='Clear database files before first test')
- parser.add_option('--clean-every', dest='clean_every_n_tests', type='int',
- default=20,
- help='Clear database files every N tests [default %default]')
- parser.add_option('--dont-start-mongod', dest='start_mongod', default=True,
- action='store_false',
- help='Do not start mongod before commencing test running')
- parser.add_option('--use-ssl', dest='use_ssl', default=False,
- action='store_true',
- help='Run mongo shell and mongod instances with SSL encryption')
- parser.add_option('--set-parameters', dest='set_parameters', default="",
- help='Adds --setParameter to mongod for each passed in item in the csv list - ex. "param1=1,param2=foo" ')
- parser.add_option('--set-parameters-mongos', dest='set_parameters_mongos', default="",
- help='Adds --setParameter to mongos for each passed in item in the csv list - ex. "param1=1,param2=foo" ')
- parser.add_option('--temp-path', dest='temp_path', default=None,
- help='If present, passed as --tempPath to unittests and dbtests')
- # Buildlogger invocation from command line
- parser.add_option('--buildlogger-builder', dest='buildlogger_builder', default=None,
- action="store", help='Set the "builder name" for buildlogger')
- parser.add_option('--buildlogger-buildnum', dest='buildlogger_buildnum', default=None,
- action="store", help='Set the "build number" for buildlogger')
- parser.add_option('--buildlogger-url', dest='buildlogger_url', default=None,
- action="store", help='Set the url root for the buildlogger service')
- parser.add_option('--buildlogger-credentials', dest='buildlogger_credentials', default=None,
- action="store", help='Path to Python file containing buildlogger credentials')
- parser.add_option('--buildlogger-phase', dest='buildlogger_phase', default=None,
- action="store", help='Set the "phase" for buildlogger (e.g. "core", "auth") for display in the webapp (optional)')
- parser.add_option('--report-file', dest='report_file', default=None,
- action='store',
- help='Path to generate detailed json report containing all test details')
- parser.add_option('--use-write-commands', dest='use_write_commands', default=False,
- action='store_true',
- help='Deprecated(use --shell-write-mode): Sets the shell to use write commands by default')
- parser.add_option('--shell-write-mode', dest='shell_write_mode', default="legacy",
- help='Sets the shell to use a specific write mode: commands/compatibility/legacy (default:legacy)')
-
- global tests
- (options, tests) = parser.parse_args()
-
- set_globals(options, tests)
-
- buildlogger_opts = (options.buildlogger_builder, options.buildlogger_buildnum, options.buildlogger_credentials)
- if all(buildlogger_opts):
- os.environ['MONGO_USE_BUILDLOGGER'] = 'true'
- os.environ['MONGO_BUILDER_NAME'] = options.buildlogger_builder
- os.environ['MONGO_BUILD_NUMBER'] = options.buildlogger_buildnum
- os.environ['BUILDLOGGER_CREDENTIALS'] = options.buildlogger_credentials
- if options.buildlogger_phase:
- os.environ['MONGO_PHASE'] = options.buildlogger_phase
- elif any(buildlogger_opts):
- # some but not all of the required options were sete
- raise Exception("you must set all of --buildlogger-builder, --buildlogger-buildnum, --buildlogger-credentials")
-
- if options.buildlogger_url: #optional; if None, defaults to const in buildlogger.py
- os.environ['BUILDLOGGER_URL'] = options.buildlogger_url
-
- if options.File:
- if options.File == '-':
- tests = sys.stdin.readlines()
- else:
- f = open(options.File)
- tests = f.readlines()
- tests = [t.rstrip('\n') for t in tests]
-
- if options.only_old_fails:
- run_old_fails()
- return
- elif options.reset_old_fails:
- clear_failfile()
- return
-
- # If we're in suite mode, tests is a list of names of sets of tests.
- if options.mode == 'suite':
- tests = expand_suites(tests)
- elif options.mode == 'files':
- tests = [(os.path.abspath(test), start_mongod) for test in tests]
-
- if options.ignore_files != None :
- ignore_patt = re.compile( options.ignore_files )
- print "Ignoring files with pattern: ", ignore_patt
-
- def ignore_test( test ):
- if ignore_patt.search( test[0] ) != None:
- print "Ignoring test ", test[0]
- return False
- else:
- return True
-
- tests = filter( ignore_test, tests )
-
- if not tests:
- print "warning: no tests specified"
- return
-
- if options.with_cleanbb:
- clean_dbroot(nokill=True)
-
- test_report["start"] = time.time()
- test_report["mongod_running_at_start"] = mongod().is_mongod_up(mongod_port)
- try:
- run_tests(tests)
- finally:
- add_to_failfile(fails, options)
-
- test_report["end"] = time.time()
- test_report["elapsed"] = test_report["end"] - test_report["start"]
- test_report["failures"] = len(losers.keys())
- test_report["mongod_running_at_end"] = mongod().is_mongod_up(mongod_port)
- if report_file:
- f = open( report_file, "wb" )
- f.write( json.dumps( test_report, indent=4, separators=(',', ': ')) )
- f.close()
-
- report()
-
-if __name__ == "__main__":
- main()
diff --git a/src/mongo/gotools/test/legacy26/buildscripts/utils.py b/src/mongo/gotools/test/legacy26/buildscripts/utils.py
deleted file mode 100644
index 68273ee69c8..00000000000
--- a/src/mongo/gotools/test/legacy26/buildscripts/utils.py
+++ /dev/null
@@ -1,230 +0,0 @@
-
-import codecs
-import re
-import socket
-import time
-import os
-import os.path
-import itertools
-import subprocess
-import sys
-import hashlib
-
-# various utilities that are handy
-
-def getAllSourceFiles( arr=None , prefix="." ):
- if arr is None:
- arr = []
-
- if not os.path.isdir( prefix ):
- # assume a file
- arr.append( prefix )
- return arr
-
- for x in os.listdir( prefix ):
- if x.startswith( "." ) or x.startswith( "pcre-" ) or x.startswith( "32bit" ) or x.startswith( "mongodb-" ) or x.startswith("debian") or x.startswith( "mongo-cxx-driver" ):
- continue
- full = prefix + "/" + x
- if os.path.isdir( full ) and not os.path.islink( full ):
- getAllSourceFiles( arr , full )
- else:
- if full.endswith( ".cpp" ) or full.endswith( ".h" ) or full.endswith( ".c" ):
- full = full.replace( "//" , "/" )
- arr.append( full )
-
- return arr
-
-
-def getGitBranch():
- if not os.path.exists( ".git" ) or not os.path.isdir(".git"):
- return None
-
- version = open( ".git/HEAD" ,'r' ).read().strip()
- if not version.startswith( "ref: " ):
- return version
- version = version.split( "/" )
- version = version[len(version)-1]
- return version
-
-def getGitBranchString( prefix="" , postfix="" ):
- t = re.compile( '[/\\\]' ).split( os.getcwd() )
- if len(t) > 2 and t[len(t)-1] == "mongo":
- par = t[len(t)-2]
- m = re.compile( ".*_([vV]\d+\.\d+)$" ).match( par )
- if m is not None:
- return prefix + m.group(1).lower() + postfix
- if par.find("Nightly") > 0:
- return ""
-
-
- b = getGitBranch()
- if b == None or b == "master":
- return ""
- return prefix + b + postfix
-
-def getGitVersion():
- if not os.path.exists( ".git" ) or not os.path.isdir(".git"):
- return "nogitversion"
-
- version = open( ".git/HEAD" ,'r' ).read().strip()
- if not version.startswith( "ref: " ):
- return version
- version = version[5:]
- f = ".git/" + version
- if not os.path.exists( f ):
- return version
- return open( f , 'r' ).read().strip()
-
-def execsys( args ):
- import subprocess
- if isinstance( args , str ):
- r = re.compile( "\s+" )
- args = r.split( args )
- p = subprocess.Popen( args , stdout=subprocess.PIPE , stderr=subprocess.PIPE )
- r = p.communicate()
- return r;
-
-def getprocesslist():
- raw = ""
- try:
- raw = execsys( "/bin/ps axww" )[0]
- except Exception,e:
- print( "can't get processlist: " + str( e ) )
-
- r = re.compile( "[\r\n]+" )
- return r.split( raw )
-
-def removeIfInList( lst , thing ):
- if thing in lst:
- lst.remove( thing )
-
-def findVersion( root , choices ):
- for c in choices:
- if ( os.path.exists( root + c ) ):
- return root + c
- raise "can't find a version of [" + root + "] choices: " + choices
-
-def choosePathExist( choices , default=None):
- for c in choices:
- if c != None and os.path.exists( c ):
- return c
- return default
-
-def filterExists(paths):
- return filter(os.path.exists, paths)
-
-def ensureDir( name ):
- d = os.path.dirname( name )
- if not os.path.exists( d ):
- print( "Creating dir: " + name );
- os.makedirs( d )
- if not os.path.exists( d ):
- raise "Failed to create dir: " + name
-
-
-def distinctAsString( arr ):
- s = set()
- for x in arr:
- s.add( str(x) )
- return list(s)
-
-def checkMongoPort( port=27017 ):
- sock = socket.socket()
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- sock.settimeout(1)
- sock.connect(("localhost", port))
- sock.close()
-
-
-def didMongodStart( port=27017 , timeout=20 ):
- while timeout > 0:
- time.sleep( 1 )
- try:
- checkMongoPort( port )
- return True
- except Exception,e:
- print( e )
- timeout = timeout - 1
- return False
-
-def which(executable):
- if sys.platform == 'win32':
- paths = os.environ.get('Path', '').split(';')
- else:
- paths = os.environ.get('PATH', '').split(':')
-
- for path in paths:
- path = os.path.expandvars(path)
- path = os.path.expanduser(path)
- path = os.path.abspath(path)
- executable_path = os.path.join(path, executable)
- if os.path.exists(executable_path):
- return executable_path
-
- return executable
-
-def md5sum( file ):
- #TODO error handling, etc..
- return execsys( "md5sum " + file )[0].partition(" ")[0]
-
-def md5string( a_string ):
- return hashlib.md5(a_string).hexdigest()
-
-def find_python(min_version=(2, 5)):
- try:
- if sys.version_info >= min_version:
- return sys.executable
- except AttributeError:
- # In case the version of Python is somehow missing sys.version_info or sys.executable.
- pass
-
- version = re.compile(r'[Pp]ython ([\d\.]+)', re.MULTILINE)
- binaries = ('python27', 'python2.7', 'python26', 'python2.6', 'python25', 'python2.5', 'python')
- for binary in binaries:
- try:
- out, err = subprocess.Popen([binary, '-V'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
- for stream in (out, err):
- match = version.search(stream)
- if match:
- versiontuple = tuple(map(int, match.group(1).split('.')))
- if versiontuple >= min_version:
- return which(binary)
- except:
- pass
-
- raise Exception('could not find suitable Python (version >= %s)' % '.'.join(str(v) for v in min_version))
-
-def smoke_command(*args):
- # return a list of arguments that comprises a complete
- # invocation of smoke.py
- here = os.path.dirname(__file__)
- smoke_py = os.path.abspath(os.path.join(here, 'smoke.py'))
- # the --with-cleanbb argument causes smoke.py to run
- # buildscripts/cleanbb.py before each test phase; this
- # prevents us from running out of disk space on slaves
- return [find_python(), smoke_py, '--with-cleanbb'] + list(args)
-
-def run_smoke_command(*args):
- # to run a command line script from a scons Alias (or any
- # Action), the command sequence must be enclosed in a list,
- # otherwise SCons treats it as a list of dependencies.
- return [smoke_command(*args)]
-
-# unicode is a pain. some strings cannot be unicode()'d
-# but we want to just preserve the bytes in a human-readable
-# fashion. this codec error handler will substitute the
-# repr() of the offending bytes into the decoded string
-# at the position they occurred
-def replace_with_repr(unicode_error):
- offender = unicode_error.object[unicode_error.start:unicode_error.end]
- return (unicode(repr(offender).strip("'").strip('"')), unicode_error.end)
-
-codecs.register_error('repr', replace_with_repr)
-
-def unicode_dammit(string, encoding='utf8'):
- # convert a string to a unicode, using the Python
- # representation of non-ascii bytes when necessary
- #
- # name inpsired by BeautifulSoup's "UnicodeDammit"
- return string.decode(encoding, 'repr')
-
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/authTestsKey b/src/mongo/gotools/test/legacy26/jstests/libs/authTestsKey
deleted file mode 100644
index 573898a4f05..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/authTestsKey
+++ /dev/null
@@ -1 +0,0 @@
-This key is only for running the suite with authentication dont use it in any tests directly
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/ca.pem b/src/mongo/gotools/test/legacy26/jstests/libs/ca.pem
deleted file mode 100644
index f739ef0627b..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/ca.pem
+++ /dev/null
@@ -1,17 +0,0 @@
------BEGIN CERTIFICATE-----
-MIICnTCCAgYCCQD4+RCKzwZr/zANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMC
-VVMxETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4w
-DAYDVQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0
-IEF1dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTEz
-MDAyMzU0OVoXDTIzMTEyODAyMzU0OVowgZIxCzAJBgNVBAYTAlVTMREwDwYDVQQI
-DAhOZXcgWW9yazEWMBQGA1UEBwwNTmV3IFlvcmsgQ2l0eTEOMAwGA1UECgwFMTBH
-ZW4xDzANBgNVBAsMBktlcm5lbDEaMBgGA1UEAwwRTXkgQ2VydCBBdXRob3JpdHkx
-GzAZBgkqhkiG9w0BCQEWDHJvb3RAbGF6YXJ1czCBnzANBgkqhkiG9w0BAQEFAAOB
-jQAwgYkCgYEA1xymeY+U/evUuQvxpun9moe4GopN80c1ptmaAHM/1Onwaq54Wt27
-nl1wUVme3dh4DdWviYY7mJ333HVEnp/QhVcT4kQhICZqdgPKPdCseQW3H+8x6Gwz
-hrNRBdz0NkSoFxDlIymfy2Q2xoQpbCGAg+EnRYUTKlHMXNpUDLFhGjcCAwEAATAN
-BgkqhkiG9w0BAQUFAAOBgQDRQB3c/9osTexEzMPHyMGTzG5nGwy8Wv77GgW3BETM
-hECoGqueXLa5ZgvealJrnMHNKdj6vrCGgBDzE0K0VdXc4dLtLmx3DRntDOAWKJdB
-2XPMvdC7Ec//Fwep/9emz0gDiJrTiEpL4p74+h+sp4Xy8cBokQ3Ss5S9NmnPXT7E
-qQ==
------END CERTIFICATE-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/client.pem b/src/mongo/gotools/test/legacy26/jstests/libs/client.pem
deleted file mode 100644
index 85ace4fd40b..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/client.pem
+++ /dev/null
@@ -1,101 +0,0 @@
-Certificate:
- Data:
- Version: 3 (0x2)
- Serial Number: 7 (0x7)
- Signature Algorithm: sha1WithRSAEncryption
- Issuer: C=US, ST=New York, L=New York City, O=10Gen, OU=Kernel, CN=My Cert Authority/emailAddress=root@lazarus
- Validity
- Not Before: Aug 23 14:55:32 2013 GMT
- Not After : Jan 7 14:55:32 2041 GMT
- Subject: C=US, ST=New York, L=New York City, O=10Gen, OU=kerneluser, CN=client
- Subject Public Key Info:
- Public Key Algorithm: rsaEncryption
- Public-Key: (2048 bit)
- Modulus:
- 00:ba:16:42:d4:8b:3d:5e:8a:67:9e:a7:c0:cd:4a:
- 9c:9c:fd:95:b9:83:bf:f4:cf:03:8c:2e:db:a9:c1:
- 35:58:80:f6:e2:e9:87:28:84:e3:d0:9b:68:60:51:
- 0e:42:84:d8:6f:e8:34:cc:18:97:79:d3:8d:d8:2f:
- 23:11:25:6f:69:7a:38:bb:8c:b2:29:e9:91:be:79:
- 8c:cc:1b:56:98:98:d3:83:2a:c5:f9:9c:86:0c:2c:
- 24:0e:5c:46:3b:a9:95:44:6c:c5:e0:7c:9d:03:ae:
- 0d:23:99:49:a4:48:dd:0e:35:a2:e5:b4:8b:86:bd:
- c0:c8:ce:d5:ac:c4:36:f3:9e:5f:17:00:23:8d:53:
- a1:43:1b:a3:61:96:36:80:4d:35:50:b5:8b:69:31:
- 39:b4:63:8b:96:59:5c:d1:ea:92:eb:eb:fa:1b:35:
- 64:44:b3:f6:f3:a6:9d:49:3a:59:e5:e1:c2:cb:98:
- be:29:b3:22:dd:33:97:d7:50:4f:db:c2:58:64:18:
- b5:8c:3c:6b:2d:21:f6:bd:8d:e5:d2:da:8d:79:fe:
- a7:80:75:a8:15:b9:ee:79:7f:01:31:1d:e5:e7:15:
- 76:53:65:f6:fe:f0:93:7d:20:3d:cc:ff:9b:ca:b2:
- 50:2c:1b:3a:69:d5:e6:70:cf:ac:be:7e:5c:33:c4:
- 6e:a7
- Exponent: 65537 (0x10001)
- X509v3 extensions:
- X509v3 Basic Constraints:
- CA:FALSE
- Netscape Comment:
- OpenSSL Generated Certificate
- X509v3 Subject Key Identifier:
- 4A:8B:EE:22:42:E6:F8:62:4C:86:38:8D:C5:78:95:98:C1:10:05:7C
- X509v3 Authority Key Identifier:
- keyid:07:41:19:3A:9F:7E:C5:B7:22:4E:B7:BC:D5:DF:E4:FC:09:B8:64:16
-
- Signature Algorithm: sha1WithRSAEncryption
- 13:13:a8:f0:de:78:c6:b1:e0:85:cc:27:e6:04:28:44:93:1d:
- f1:ff:5e:81:69:33:1f:f3:76:e0:49:ca:d9:ad:aa:db:f5:a5:
- f8:a6:50:bb:a1:a7:40:14:e4:2f:8d:b8:21:7f:35:04:60:db:
- af:f0:9e:dd:a1:ca:0b:7f:03:2e:2f:19:1e:32:6e:1e:2d:87:
- 68:e3:37:47:a8:5b:93:d1:88:41:73:da:88:21:59:27:d4:35:
- 1c:6a:27:b5:c0:c6:17:ba:f3:87:c8:e1:f4:8f:43:12:bc:fa:
- 8d:90:d5:86:83:df:51:a5:c9:e0:92:f0:66:d0:37:61:6f:85:
- 24:18
------BEGIN CERTIFICATE-----
-MIIDdjCCAt+gAwIBAgIBBzANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
-ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
-VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
-dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMDgyMzE0
-NTUzMloXDTQxMDEwNzE0NTUzMlowbjELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
-dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjET
-MBEGA1UECwwKa2VybmVsdXNlcjEPMA0GA1UEAwwGY2xpZW50MIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuhZC1Is9XopnnqfAzUqcnP2VuYO/9M8DjC7b
-qcE1WID24umHKITj0JtoYFEOQoTYb+g0zBiXedON2C8jESVvaXo4u4yyKemRvnmM
-zBtWmJjTgyrF+ZyGDCwkDlxGO6mVRGzF4HydA64NI5lJpEjdDjWi5bSLhr3AyM7V
-rMQ2855fFwAjjVOhQxujYZY2gE01ULWLaTE5tGOLlllc0eqS6+v6GzVkRLP286ad
-STpZ5eHCy5i+KbMi3TOX11BP28JYZBi1jDxrLSH2vY3l0tqNef6ngHWoFbnueX8B
-MR3l5xV2U2X2/vCTfSA9zP+byrJQLBs6adXmcM+svn5cM8RupwIDAQABo3sweTAJ
-BgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0
-aWZpY2F0ZTAdBgNVHQ4EFgQUSovuIkLm+GJMhjiNxXiVmMEQBXwwHwYDVR0jBBgw
-FoAUB0EZOp9+xbciTre81d/k/Am4ZBYwDQYJKoZIhvcNAQEFBQADgYEAExOo8N54
-xrHghcwn5gQoRJMd8f9egWkzH/N24EnK2a2q2/Wl+KZQu6GnQBTkL424IX81BGDb
-r/Ce3aHKC38DLi8ZHjJuHi2HaOM3R6hbk9GIQXPaiCFZJ9Q1HGontcDGF7rzh8jh
-9I9DErz6jZDVhoPfUaXJ4JLwZtA3YW+FJBg=
------END CERTIFICATE-----
------BEGIN PRIVATE KEY-----
-MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC6FkLUiz1eimee
-p8DNSpyc/ZW5g7/0zwOMLtupwTVYgPbi6YcohOPQm2hgUQ5ChNhv6DTMGJd5043Y
-LyMRJW9peji7jLIp6ZG+eYzMG1aYmNODKsX5nIYMLCQOXEY7qZVEbMXgfJ0Drg0j
-mUmkSN0ONaLltIuGvcDIztWsxDbznl8XACONU6FDG6NhljaATTVQtYtpMTm0Y4uW
-WVzR6pLr6/obNWREs/bzpp1JOlnl4cLLmL4psyLdM5fXUE/bwlhkGLWMPGstIfa9
-jeXS2o15/qeAdagVue55fwExHeXnFXZTZfb+8JN9ID3M/5vKslAsGzpp1eZwz6y+
-flwzxG6nAgMBAAECggEBALYw92urjAFVFxCiA8W7aEzYhtAkaztft4R3mD/C19z4
-H0CZDeig+3+RuIactY5xDIu8WHz/EseHVlg0BmxSL5ugu4z8uq8IbNaFoVFw7r7m
-2ieRKFY0ZpXiXcbllynw5iEhMjeRKhWhQmH5Qb2kTTINV5j4xKa+f9Lblx7Y2Uh4
-tsaOtlMwb98D2/KYJdTv5Nj1nyuSqRVhECsd00Cb6JUBGQBx8Ja0wFy9gEygq6kU
-w3s1XNOSnYNEo4FaVZwp5KZyCyBENcKpNUq4nXt/7ncEfVYdJck0Li3wN4Jr2J9S
-eHqRzh8QkHxc1Ro8ktcXaUSs9kFuwvVvb4rcGUpOMWkCgYEA9xxp8yDtFVgzMtc/
-vS8xgM1Wj4SrgKKYhE2wS05BJh/41oFMzfH1FpZ1GCM983r4QgYWoT71XsBgiOMC
-yN2p2IbV4V44bMGKJqaVMkB91CVCUWI6piaCQb/1CJTwaXE7zPim6dlUSxxBBnRn
-LP50NTscRLFcCZELD3Yl7jR8XFUCgYEAwMfkNFmGtBKAwlHZ3Y3XOwPWg+jCll7s
-9nhv8TU2IB9pcCRGqyOT7k1YymvYkDT2Je4JUPWEBs4cW7yD61LrQ8w8+DrE9dGo
-czzGPyjOAANSX0asG74UjkNIQThmyEOltVHIxYMaSqowjHRSPdA+R4Od9EdcDdfS
-q5SfSVFxmwsCgYBtl1thqUOcCL7EGHQ7KdfxgJ+YDMWmyfWMD4xVCYKZLurD7xop
-59nDR7zslIygE/RQC7Uzk+FsQTNO4ibVAIGX9syaI5gwm3DyjURzwehMEq4ju8W4
-9DEmicRZJvysNrzHvasA4RKiMQihnTQ43yyYgvuZd3MTBxF5rPNLfll89QKBgQC9
-SsmiOZIR+OUjaTmS2bbQBNm7Fm8TNcxZyzKn1wb5jb57VbNqUfnskVgxEqpIFyjn
-X48YRqtH/1RLI5UpGXdXUBFB8Hr7oM1VsgQ7ejakPp7AXOWcLA2FDz3AhMAvvnTU
-0KRihHPpgqk/EOy8M2Ej2XHcrcEO+q+quLmbRXRWtwKBgHacQiwci/2J+v0e9i52
-re/2AJHKP5MwNHFe1e01iNc5EEN0G+/Ut8XW19DWf6bsxqie0ChC+xN8TUst8alT
-F+tXTsHHmt/lRcjTROjT5XVuoqjtU2Q0QeVeGLgvObso+fZy3ZNeQuSJjWukdMZ3
-57rGT6p0OuM8qbrTzpv3JMrm
------END PRIVATE KEY-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/client_revoked.pem b/src/mongo/gotools/test/legacy26/jstests/libs/client_revoked.pem
deleted file mode 100644
index 276e62644b6..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/client_revoked.pem
+++ /dev/null
@@ -1,34 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIC7jCCAlegAwIBAgIBDDANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
-ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
-VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
-dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTIwNjE1
-MjUzMVoXDTQxMDQyMjE1MjUzMVowajELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
-dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEP
-MA0GA1UECwwGS2VybmVsMQ8wDQYDVQQDDAZjbGllbnQwgZ8wDQYJKoZIhvcNAQEB
-BQADgY0AMIGJAoGBALX6DqSWRJBEJJRIRqG5X3cFHzse5jGIdV8fTqikaVitvuhs
-15z1njzfqBQZMJBCEvNb4eaenXJRMBDkEOcbfy6ah+ZLLqGFy7b6OxTROfx++3fT
-gsCAjBaIWvtGKNkwdcdM7PQ2jE5bL8vN/ufbH2sX451nVd+j6oAz0dTz7RvhAgMB
-AAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJh
-dGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBTjciYidtPfd5ILsm7c2yYGV99vwjAf
-BgNVHSMEGDAWgBQHQRk6n37FtyJOt7zV3+T8CbhkFjANBgkqhkiG9w0BAQUFAAOB
-gQCgs74YrlZ6nivONRO8tNWi+gJ1TcWbQV+5yfF7Ispxo1TFxpa6GTWeZA3X4CwK
-PHmCdhb+oZoi59Qny0KECxtBj6zwdYIKLN0gIFYygaGX5J+YrRVatTjCJUHz9fco
-hZwApLEUkYg2Ldvbg+FncDwiVhi74OW685SkThNIulmPcQ==
------END CERTIFICATE-----
------BEGIN PRIVATE KEY-----
-MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBALX6DqSWRJBEJJRI
-RqG5X3cFHzse5jGIdV8fTqikaVitvuhs15z1njzfqBQZMJBCEvNb4eaenXJRMBDk
-EOcbfy6ah+ZLLqGFy7b6OxTROfx++3fTgsCAjBaIWvtGKNkwdcdM7PQ2jE5bL8vN
-/ufbH2sX451nVd+j6oAz0dTz7RvhAgMBAAECgYEAmHRy+g5uSJLeNmBK1EiSIwtm
-e8hKP+s7scJvyrdbDpEZJG2zQWtA82zIynXECsdgSwOKQQRXkaNU6oG3a3bM19uY
-0CqFRb9EwOLIStp+CM5zLRGmUr73u/+JrBPUWWFJkJvINvTXt18CMnCmosTvygWB
-IBZqsuEXQ6JcejxzQ6UCQQDdVUNdE2JgHp1qrr5l8563dztcrfCxuVFtgsj6qnhd
-UrBAa388B9kn4yVAe2i55xFmtHsO9Bz3ViiDFO163SafAkEA0nq8PeZtcIlZ2c7+
-6/Vdw1uLE5APVG2H9VEZdaVvkwIIXo8WQfMwWo5MQyPjVyBhUGlDwnKa46AcuplJ
-2XMtfwJBAIDrMfKb4Ng13OEP6Yz+yvr4MxZ3plQOqlRMMn53HubUzB6pvpGbzKwE
-DWWyvDxUT/lvtKHwJJMYlz5KyUygVecCQHr50RBNmLW+2muDILiWlOD2lIyqh/pp
-QJ2Zc8mkDkuTTXaKHZQM1byjFXXI+yRFu/Xyeu+abFsAiqiPtXFCdVsCQHai+Ykv
-H3y0mUJmwBVP2fBE3GiTGlaadM0auZKu7/ad+yo7Hv8Kibacwibzrj9PjT3mFSSF
-vujX1oWOaxAMVbE=
------END PRIVATE KEY-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/cluster-cert.pem b/src/mongo/gotools/test/legacy26/jstests/libs/cluster-cert.pem
deleted file mode 100644
index 74dc9845e3d..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/cluster-cert.pem
+++ /dev/null
@@ -1,101 +0,0 @@
-Certificate:
- Data:
- Version: 3 (0x2)
- Serial Number: 5 (0x5)
- Signature Algorithm: sha1WithRSAEncryption
- Issuer: C=US, ST=New York, L=New York City, O=10Gen, OU=Kernel, CN=My Cert Authority/emailAddress=root@lazarus
- Validity
- Not Before: Aug 7 17:19:17 2013 GMT
- Not After : Dec 22 17:19:17 2040 GMT
- Subject: C=US, ST=New York, L=New York City, O=10Gen, OU=Kernel, CN=clustertest
- Subject Public Key Info:
- Public Key Algorithm: rsaEncryption
- Public-Key: (2048 bit)
- Modulus:
- 00:98:ec:01:6e:f4:ae:8e:16:c8:87:a2:44:86:a0:
- 45:5c:ca:82:56:ba:0d:a9:60:bf:07:40:da:db:70:
- 33:a6:c2:ec:9d:e1:f0:da:fe:b9:f9:ac:23:33:64:
- e6:63:71:cc:a2:0d:eb:86:bc:31:32:aa:30:e6:1d:
- 5d:6d:fd:45:f4:2f:dc:72:93:bc:92:27:f7:6a:5a:
- 18:04:f7:64:d0:6a:3c:a9:14:f6:9e:9d:58:26:f4:
- 16:93:7e:3d:2e:3c:9e:54:41:4d:1a:e1:bd:b4:cf:
- d0:05:4c:4d:15:fb:5c:70:1e:0c:32:6d:d7:67:5b:
- ec:b2:61:83:e3:f0:b1:78:aa:30:45:86:f9:6d:f5:
- 48:1f:f1:90:06:25:db:71:ed:af:d7:0d:65:65:70:
- 89:d4:c8:c8:23:a0:67:22:de:d9:6e:1d:44:38:cf:
- 0f:eb:2c:fe:79:01:d7:98:15:5f:22:42:3f:ee:c9:
- 16:eb:b9:25:08:9a:2a:11:74:47:e0:51:75:8c:ae:
- eb:8d:b5:30:fe:48:98:0a:9e:ba:6e:a4:60:08:81:
- c6:05:a0:97:38:70:c0:1f:b4:27:96:8e:c3:d2:c1:
- 14:5f:34:16:91:7d:ad:4c:e9:23:07:f0:42:86:78:
- 11:a1:1e:9d:f3:d0:41:09:06:7d:5c:89:ef:d2:0d:
- 6c:d5
- Exponent: 65537 (0x10001)
- X509v3 extensions:
- X509v3 Basic Constraints:
- CA:FALSE
- Netscape Comment:
- OpenSSL Generated Certificate
- X509v3 Subject Key Identifier:
- C9:00:3A:28:CC:6A:75:57:82:81:00:A6:25:48:6C:CE:0A:A0:4A:59
- X509v3 Authority Key Identifier:
- keyid:07:41:19:3A:9F:7E:C5:B7:22:4E:B7:BC:D5:DF:E4:FC:09:B8:64:16
-
- Signature Algorithm: sha1WithRSAEncryption
- d1:55:e3:5c:43:8c:4f:d3:29:8d:74:4a:1d:23:50:17:27:b3:
- 30:6f:c6:d7:4c:6c:96:7e:52:a0:2f:91:92:b3:f5:4c:a1:ca:
- 88:62:31:e4:d6:64:ac:40:17:47:00:24:e8:0d:3b:7b:c7:d4:
- 7f:3a:76:45:27:fd:9b:ae:9d:44:71:8f:ab:62:60:e5:9d:e8:
- 59:dd:0e:25:17:14:f8:83:b0:b6:fc:5f:27:8b:69:a2:dc:31:
- b9:17:a1:27:92:96:c1:73:bf:a3:f0:b8:97:b9:e2:fb:97:6d:
- 44:01:b0:68:68:47:4b:84:56:3b:19:66:f8:0b:6c:1b:f5:44:
- a9:ae
------BEGIN CERTIFICATE-----
-MIIDdzCCAuCgAwIBAgIBBTANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
-ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
-VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
-dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMDgwNzE3
-MTkxN1oXDTQwMTIyMjE3MTkxN1owbzELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
-dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEP
-MA0GA1UECwwGS2VybmVsMRQwEgYDVQQDDAtjbHVzdGVydGVzdDCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAJjsAW70ro4WyIeiRIagRVzKgla6DalgvwdA
-2ttwM6bC7J3h8Nr+ufmsIzNk5mNxzKIN64a8MTKqMOYdXW39RfQv3HKTvJIn92pa
-GAT3ZNBqPKkU9p6dWCb0FpN+PS48nlRBTRrhvbTP0AVMTRX7XHAeDDJt12db7LJh
-g+PwsXiqMEWG+W31SB/xkAYl23Htr9cNZWVwidTIyCOgZyLe2W4dRDjPD+ss/nkB
-15gVXyJCP+7JFuu5JQiaKhF0R+BRdYyu6421MP5ImAqeum6kYAiBxgWglzhwwB+0
-J5aOw9LBFF80FpF9rUzpIwfwQoZ4EaEenfPQQQkGfVyJ79INbNUCAwEAAaN7MHkw
-CQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2Vy
-dGlmaWNhdGUwHQYDVR0OBBYEFMkAOijManVXgoEApiVIbM4KoEpZMB8GA1UdIwQY
-MBaAFAdBGTqffsW3Ik63vNXf5PwJuGQWMA0GCSqGSIb3DQEBBQUAA4GBANFV41xD
-jE/TKY10Sh0jUBcnszBvxtdMbJZ+UqAvkZKz9UyhyohiMeTWZKxAF0cAJOgNO3vH
-1H86dkUn/ZuunURxj6tiYOWd6FndDiUXFPiDsLb8XyeLaaLcMbkXoSeSlsFzv6Pw
-uJe54vuXbUQBsGhoR0uEVjsZZvgLbBv1RKmu
------END CERTIFICATE-----
------BEGIN PRIVATE KEY-----
-MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCY7AFu9K6OFsiH
-okSGoEVcyoJWug2pYL8HQNrbcDOmwuyd4fDa/rn5rCMzZOZjccyiDeuGvDEyqjDm
-HV1t/UX0L9xyk7ySJ/dqWhgE92TQajypFPaenVgm9BaTfj0uPJ5UQU0a4b20z9AF
-TE0V+1xwHgwybddnW+yyYYPj8LF4qjBFhvlt9Ugf8ZAGJdtx7a/XDWVlcInUyMgj
-oGci3tluHUQ4zw/rLP55AdeYFV8iQj/uyRbruSUImioRdEfgUXWMruuNtTD+SJgK
-nrpupGAIgcYFoJc4cMAftCeWjsPSwRRfNBaRfa1M6SMH8EKGeBGhHp3z0EEJBn1c
-ie/SDWzVAgMBAAECggEAfogRK5Dz+gfqByiCEO7+VagOrtolwbeWeNb2AEpXwq1Z
-Ac5Y76uDkI4ZVkYvx6r6ykBAWOzQvH5MFavIieDeiA0uF/QcPMcrFmnTpBBb74No
-C/OXmGjS7vBa2dHDp8VqsIaT2SFeSgUFt8yJoB2rP+3s47E1YYWTVYoQioO3JQJN
-f0mSuvTnvJO9lbTWiW+yWGVkQvIciCCnHkCEwU0fHht8IoFBGNFlpWZcGiMeietr
-16GdRcmAq95q8TTCeQxkgmmL+0ZJ1BrF7llG2pGYdacawXj1eVRqOHQaFIlcKe05
-RITpuXVYOWBpBpfbQsBZaCGLe7WxHJedrFxdbqm0ZQKBgQDLUQrmIl2wz43t3sI+
-WjW6y1GwMPG9EjXUT1Boq6PNHKgw04/32QNn5IMmz4cp2Mgyz7Hc0ABDU/ZATujd
-yCkxVErPbKRDKSxSl6nLXtLpLbHFmVPfKPbNKIuyFMBsOFOtoFoVbo33wI5dI7aO
-i7sTGB3ngbq4pzCJ9dVt/t81QwKBgQDAjAtBXS8WB69l9w35tx+MgYG0LJ+ykAug
-d91pwiWqSt02fZ0nr/S/76G6B4C8eqeOnYh1RzF5isLD246rLD2Y+uuFrgasvSiS
-4qSKbpG2kk02R/DRTAglAyXI0rhYIDrYKCQPWqNMWpawT/FQQwbFjTuhmz10FyXS
-hmVztZWoBwKBgQCBdnptLibghllGxViEoaai6gJ7Ib9ceHMEXPjDnb+wxPWoGZ8L
-4AjWJ+EHXpAfqmVYTX5hL6VrOdSNAHIxftoUCiuUxwYVqesKMH6y/A9q4WjYfRi1
-+fyliJLjc2lPv9IwtfGGwh3uS5ObZTlCrWES+IFaP/YozHUQ9BPSdb+lxwKBgB35
-Lv9b3CqXw6why2EmKpkax/AeSjXnyoeOYT9HY8mgodMLtt0ovPbr/McSx+2PQmon
-B8kJ7h+3hB4tHYZz+prH5MYIky1svNYwxeBu2ewL1k0u4cQTC+mHFeivNNczHTXs
-+cASIf2O1IpZx3zxEirKk4/StLxPpimhlkVu7P8dAoGBAJVw2U70+PagVBPtvheu
-ZDEvxSEzrn90ivIh7Y6ZIwdSOSLW04sOVL2JAzO155u4g77jdmcxV3urr1vD9LbF
-qkBGLXx7FFC/Mn/H42qerxr16Bt6RtvVpms71UIQLYxA7caab9cqoyt0wkgqJFKX
-fj0TVODnIf+zPMDCu+frpLbA
------END PRIVATE KEY-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/command_line/test_parsed_options.js b/src/mongo/gotools/test/legacy26/jstests/libs/command_line/test_parsed_options.js
deleted file mode 100644
index e2ca646b63a..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/command_line/test_parsed_options.js
+++ /dev/null
@@ -1,202 +0,0 @@
-// Merge the two options objects. Used as a helper when we are trying to actually compare options
-// despite the fact that our test framework adds extra stuff to it. Anything set in the second
-// options object overrides the first options object. The two objects must have the same structure.
-function mergeOptions(obj1, obj2) {
- var obj3 = {};
- for (var attrname in obj1) {
- if (typeof obj1[attrname] === "object" &&
- typeof obj2[attrname] !== "undefined") {
- if (typeof obj2[attrname] !== "object") {
- throw "Objects being merged must have the same structure";
- }
- obj3[attrname] = mergeOptions(obj1[attrname], obj2[attrname]);
- }
- else {
- obj3[attrname] = obj1[attrname];
- }
- }
- for (var attrname in obj2) {
- if (typeof obj2[attrname] === "object" &&
- typeof obj1[attrname] !== "undefined") {
- if (typeof obj1[attrname] !== "object") {
- throw "Objects being merged must have the same structure";
- }
- // Already handled above
- }
- else {
- obj3[attrname] = obj2[attrname];
- }
- }
- return obj3;
-}
-
-// Test that the parsed result of setting certain command line options has the correct format in
-// mongod. See SERVER-13379.
-//
-// Arguments:
-// mongoRunnerConfig - Configuration object to pass to the mongo runner
-// expectedResult - Object formatted the same way as the result of running the "getCmdLineOpts"
-// command, but with only the fields that should be set by the options implied by the first
-// argument set.
-//
-// Example:
-//
-// testGetCmdLineOptsMongod({ port : 10000 }, { "parsed" : { "net" : { "port" : 10000 } } });
-//
-var getCmdLineOptsBaseMongod;
-function testGetCmdLineOptsMongod(mongoRunnerConfig, expectedResult) {
-
- // Get the options object returned by "getCmdLineOpts" when we spawn a mongod using our test
- // framework without passing any additional options. We need this because the framework adds
- // options of its own, and we only want to compare against the options we care about.
- function getBaseOptsObject() {
-
- // Start mongod with no options
- var baseMongod = MongoRunner.runMongod();
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsBaseMongod = baseMongod.adminCommand("getCmdLineOpts");
-
- // Stop the mongod we used to get the options
- MongoRunner.stopMongod(baseMongod.port);
-
- return getCmdLineOptsBaseMongod;
- }
-
- if (typeof getCmdLineOptsBaseMongod === "undefined") {
- getCmdLineOptsBaseMongod = getBaseOptsObject();
- }
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsExpected = getCmdLineOptsBaseMongod;
-
- // Delete port and dbPath if we are not explicitly setting them, since they will change on
- // multiple runs of the test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsExpected.parsed.net.port;
- }
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.storage === "undefined" ||
- typeof expectedResult.parsed.storage.dbPath === "undefined") {
- delete getCmdLineOptsExpected.parsed.storage.dbPath;
- }
-
- // Merge with the result that we expect
- expectedResult = mergeOptions(getCmdLineOptsExpected, expectedResult);
-
- // Start mongod with options
- var mongod = MongoRunner.runMongod(mongoRunnerConfig);
-
- // Get the parsed options
- var getCmdLineOptsResult = mongod.adminCommand("getCmdLineOpts");
-
- // Delete port and dbPath if we are not explicitly setting them, since they will change on
- // multiple runs of the test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsResult.parsed.net.port;
- }
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.storage === "undefined" ||
- typeof expectedResult.parsed.storage.dbPath === "undefined") {
- delete getCmdLineOptsResult.parsed.storage.dbPath;
- }
-
- // Make sure the options are equal to what we expect
- assert.docEq(getCmdLineOptsResult.parsed, expectedResult.parsed);
-
- // Cleanup
- MongoRunner.stopMongod(mongod.port);
-}
-
-// Test that the parsed result of setting certain command line options has the correct format in
-// mongos. See SERVER-13379.
-//
-// Arguments:
-// mongoRunnerConfig - Configuration object to pass to the mongo runner
-// expectedResult - Object formatted the same way as the result of running the "getCmdLineOpts"
-// command, but with only the fields that should be set by the options implied by the first
-// argument set.
-//
-// Example:
-//
-// testGetCmdLineOptsMongos({ port : 10000 }, { "parsed" : { "net" : { "port" : 10000 } } });
-//
-var getCmdLineOptsBaseMongos;
-function testGetCmdLineOptsMongos(mongoRunnerConfig, expectedResult) {
-
- // Get the options object returned by "getCmdLineOpts" when we spawn a mongos using our test
- // framework without passing any additional options. We need this because the framework adds
- // options of its own, and we only want to compare against the options we care about.
- function getBaseOptsObject() {
-
- // Start mongod with no options
- var baseMongod = MongoRunner.runMongod();
-
- // Start mongos with only the configdb option
- var baseMongos = MongoRunner.runMongos({ configdb : baseMongod.host });
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsBaseMongos = baseMongos.adminCommand("getCmdLineOpts");
-
- // Remove the configdb option
- delete getCmdLineOptsBaseMongos.parsed.sharding.configDB;
-
- // Stop the mongod and mongos we used to get the options
- MongoRunner.stopMongos(baseMongos.port);
- MongoRunner.stopMongod(baseMongod.port);
-
- return getCmdLineOptsBaseMongos;
- }
-
- if (typeof getCmdLineOptsBaseMongos === "undefined") {
- getCmdLineOptsBaseMongos = getBaseOptsObject();
- }
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsExpected = getCmdLineOptsBaseMongos;
-
- // Delete port if we are not explicitly setting it, since it will change on multiple runs of the
- // test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsExpected.parsed.net.port;
- }
-
- // Merge with the result that we expect
- expectedResult = mergeOptions(getCmdLineOptsExpected, expectedResult);
-
- // Start mongod with no options
- var mongod = MongoRunner.runMongod();
-
- // Add configdb option
- mongoRunnerConfig['configdb'] = mongod.host;
-
- // Start mongos connected to mongod
- var mongos = MongoRunner.runMongos(mongoRunnerConfig);
-
- // Get the parsed options
- var getCmdLineOptsResult = mongos.adminCommand("getCmdLineOpts");
-
- // Delete port if we are not explicitly setting it, since it will change on multiple runs of the
- // test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsResult.parsed.net.port;
- }
-
- // Remove the configdb option
- delete getCmdLineOptsResult.parsed.sharding.configDB;
-
- // Make sure the options are equal to what we expect
- assert.docEq(getCmdLineOptsResult.parsed, expectedResult.parsed);
-
- // Cleanup
- MongoRunner.stopMongos(mongos.port);
- MongoRunner.stopMongod(mongod.port);
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/disable_noscripting.ini b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/disable_noscripting.ini
deleted file mode 100644
index 4cfaf3395f6..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/disable_noscripting.ini
+++ /dev/null
@@ -1 +0,0 @@
-noscripting=false
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_auth.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_auth.json
deleted file mode 100644
index 9f9cc84d107..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_auth.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "security" : {
- "authorization" : "enabled"
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_autosplit.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_autosplit.json
deleted file mode 100644
index a0d4f8af1be..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_autosplit.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "sharding" : {
- "autoSplit" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_httpinterface.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_httpinterface.json
deleted file mode 100644
index c87dabe125d..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_httpinterface.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "net" : {
- "http" : {
- "enabled" : true
- }
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_indexbuildretry.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_indexbuildretry.json
deleted file mode 100644
index 362db08edd3..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_indexbuildretry.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "storage" : {
- "indexBuildRetry" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_journal.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_journal.json
deleted file mode 100644
index d75b94ccbc7..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_journal.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "storage" : {
- "journal" : {
- "enabled" : false
- }
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_objcheck.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_objcheck.json
deleted file mode 100644
index b52be7382ed..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_objcheck.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "net" : {
- "wireObjectCheck" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_paranoia.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_paranoia.json
deleted file mode 100644
index 218646b1662..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_paranoia.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "sharding" : {
- "archiveMovedChunks" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_prealloc.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_prealloc.json
deleted file mode 100644
index 15ecefbb546..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_prealloc.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "storage" : {
- "preallocDataFiles" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_scripting.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_scripting.json
deleted file mode 100644
index e8f32f2c23c..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_scripting.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "security" : {
- "javascriptEnabled" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_unixsocket.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_unixsocket.json
deleted file mode 100644
index 660d21eb17f..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/enable_unixsocket.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "net" : {
- "unixDomainSocket" : {
- "enabled" : true
- }
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_profiling.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_profiling.json
deleted file mode 100644
index 944f0de1575..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_profiling.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "operationProfiling" : {
- "mode" : "all"
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_replsetname.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_replsetname.json
deleted file mode 100644
index 522ca2b766f..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_replsetname.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "replication" : {
- "replSetName" : "myconfigname"
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_shardingrole.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_shardingrole.json
deleted file mode 100644
index 71f92f122db..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_shardingrole.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "sharding" : {
- "clusterRole" : "configsvr"
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_verbosity.json b/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_verbosity.json
deleted file mode 100644
index 47a1cce1b03..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/config_files/set_verbosity.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "systemLog" : {
- "verbosity" : 5
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/crl.pem b/src/mongo/gotools/test/legacy26/jstests/libs/crl.pem
deleted file mode 100644
index dce0a0fb3f1..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/crl.pem
+++ /dev/null
@@ -1,10 +0,0 @@
------BEGIN X509 CRL-----
-MIIBazCB1QIBATANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMxETAPBgNV
-BAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUx
-MEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1dGhvcml0
-eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzFw0xMjEyMTIxODQ3NDFaFw00
-MDA0MjgxODQ3NDFaoA4wDDAKBgNVHRQEAwIBCzANBgkqhkiG9w0BAQUFAAOBgQAu
-PlPDGei2q6kdkoHe8vmDuts7Hm/o9LFbBmn0XUcfHisCJCPsJTyGCsgnfIiBcXJY
-1LMKsQFnYGv28rE2ZPpFg2qNxL+6qUEzCvqaHLX9q1V0F+f8hHDxucNYu52oo/h0
-uNZxB1KPFI2PReG5d3oUYqJ2+EctKkrGtxSPzbN0gg==
------END X509 CRL-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/crl_client_revoked.pem b/src/mongo/gotools/test/legacy26/jstests/libs/crl_client_revoked.pem
deleted file mode 100644
index 85eeaff5543..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/crl_client_revoked.pem
+++ /dev/null
@@ -1,12 +0,0 @@
------BEGIN X509 CRL-----
-MIIBujCCASMCAQEwDQYJKoZIhvcNAQEFBQAwgZIxCzAJBgNVBAYTAlVTMREwDwYD
-VQQIDAhOZXcgWW9yazEWMBQGA1UEBwwNTmV3IFlvcmsgQ2l0eTEOMAwGA1UECgwF
-MTBHZW4xDzANBgNVBAsMBktlcm5lbDEaMBgGA1UEAwwRTXkgQ2VydCBBdXRob3Jp
-dHkxGzAZBgkqhkiG9w0BCQEWDHJvb3RAbGF6YXJ1cxcNMTMxMjA2MTUzMzUwWhcN
-MTQwMTA1MTUzMzUwWjBMMBICAQwXDTEzMTIwNjE1MjczMFowGgIJAJGUg/wuW1KD
-Fw0xMjEyMTIxODQ4MjJaMBoCCQCRlIP8LltShRcNMTIxMjEyMTg0ODUyWqAOMAww
-CgYDVR0UBAMCAQ4wDQYJKoZIhvcNAQEFBQADgYEAERPfPdQnIafo1lYbFEx2ojrb
-eYqvWN9ykTyUGq2bKv+STYiuaKUz6daGVjELjn/safn5wHkYr9+C/kRRoCor5HYw
-N3uxHnkMpl6Xn7kgXL2b0jbdvfa44faOXdH2gbhzd8bFsOMra4QJHT6CgpYb3ei1
-+ePhAd1KS7tS/dyyP4c=
------END X509 CRL-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/crl_expired.pem b/src/mongo/gotools/test/legacy26/jstests/libs/crl_expired.pem
deleted file mode 100644
index 88307503240..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/crl_expired.pem
+++ /dev/null
@@ -1,10 +0,0 @@
------BEGIN X509 CRL-----
-MIIBazCB1QIBATANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMxETAPBgNV
-BAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUx
-MEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1dGhvcml0
-eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzFw0xMjEyMTIxODQwNTBaFw0x
-MzAxMTExODQwNTBaoA4wDDAKBgNVHRQEAwIBAzANBgkqhkiG9w0BAQUFAAOBgQBs
-jyvEdX8o0+PfRJsEv5oLwgp5y+YmKjRlXg2oj/ETxBDKNYtBY7B9Uu9q0chFtwTu
-XMXeEFWuxnKG+4Ovp6JmNcCKkttUwsWQuR6dGpClW6ttTk0putAWtDnqukTPlEQ2
-XU3wco7ZgrTphvuGpaIQLM1sQg9x8SfW3q6/hxYm3A==
------END X509 CRL-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_bad_first.journal b/src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_bad_first.journal
deleted file mode 100644
index 687317844a7..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_bad_first.journal
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_bad_last.journal b/src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_bad_last.journal
deleted file mode 100644
index 7dd98e2c97b..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_bad_last.journal
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_good.journal b/src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_good.journal
deleted file mode 100644
index d76790d2451..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/dur_checksum_good.journal
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/fts.js b/src/mongo/gotools/test/legacy26/jstests/libs/fts.js
deleted file mode 100644
index 73b7d339ba5..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/fts.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-function queryIDS( coll, search, filter, extra ){
- var cmd = { search : search }
- if ( filter )
- cmd.filter = filter;
- if ( extra )
- Object.extend( cmd, extra );
- lastCommadResult = coll.runCommand( "text" , cmd);
-
- return getIDS( lastCommadResult );
-}
-
-function getIDS( commandResult ){
- if ( ! ( commandResult && commandResult.results ) )
- return []
-
- return commandResult.results.map( function(z){ return z.obj._id; } )
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/fun.js b/src/mongo/gotools/test/legacy26/jstests/libs/fun.js
deleted file mode 100644
index 276f32a8f40..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/fun.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// General high-order functions
-
-function forEach (action, array) {
- for (var i = 0; i < array.length; i++)
- action (array[i]);
-}
-
-function foldl (combine, base, array) {
- for (var i = 0; i < array.length; i++)
- base = combine (base, array[i]);
- return base
-}
-
-function foldr (combine, base, array) {
- for (var i = array.length - 1; i >= 0; i--)
- base = combine (array[i], base);
- return base
-}
-
-function map (func, array) {
- var result = [];
- for (var i = 0; i < array.length; i++)
- result.push (func (array[i]));
- return result
-}
-
-function filter (pred, array) {
- var result = []
- for (var i = 0; i < array.length; i++)
- if (pred (array[i])) result.push (array[i]);
- return result
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/geo_near_random.js b/src/mongo/gotools/test/legacy26/jstests/libs/geo_near_random.js
deleted file mode 100644
index 60cb7733f5d..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/geo_near_random.js
+++ /dev/null
@@ -1,99 +0,0 @@
-GeoNearRandomTest = function(name) {
- this.name = name;
- this.t = db[name];
- this.nPts = 0;
-
- // reset state
- this.t.drop();
- Random.srand(1234);
-
- print("starting test: " + name);
-}
-
-
-GeoNearRandomTest.prototype.mkPt = function mkPt(scale, indexBounds){
- if(!indexBounds){
- scale = scale || 1; // scale is good for staying away from edges
- return [((Random.rand() * 359.8) - 179.9) * scale, ((Random.rand() * 180) - 90) * scale];
- }
- else{
- var range = indexBounds.max - indexBounds.min;
- var eps = Math.pow(2, -40);
- // Go very close to the borders but not quite there.
- return [( Random.rand() * (range - eps) + eps) + indexBounds.min, ( Random.rand() * (range - eps) + eps ) + indexBounds.min];
- }
-
-}
-
-GeoNearRandomTest.prototype.insertPts = function(nPts, indexBounds, scale) {
- assert.eq(this.nPts, 0, "insertPoints already called");
- this.nPts = nPts;
-
- for (var i=0; i<nPts; i++){
- this.t.insert({_id: i, loc: this.mkPt(scale, indexBounds)});
- }
-
- if(!indexBounds)
- this.t.ensureIndex({loc: '2d'});
- else
- this.t.ensureIndex({loc: '2d'}, indexBounds)
-}
-
-GeoNearRandomTest.prototype.assertIsPrefix = function(short, long) {
- for (var i=0; i < short.length; i++){
-
- var xS = short[i].obj ? short[i].obj.loc[0] : short[i].loc[0]
- var yS = short[i].obj ? short[i].obj.loc[1] : short[i].loc[1]
- var dS = short[i].obj ? short[i].dis : 1
-
- var xL = long[i].obj ? long[i].obj.loc[0] : long[i].loc[0]
- var yL = long[i].obj ? long[i].obj.loc[1] : long[i].loc[1]
- var dL = long[i].obj ? long[i].dis : 1
-
- assert.eq([xS, yS, dS], [xL, yL, dL]);
- }
-}
-
-GeoNearRandomTest.prototype.testPt = function(pt, opts) {
- assert.neq(this.nPts, 0, "insertPoints not yet called");
-
- opts = opts || {};
- opts['sphere'] = opts['sphere'] || 0;
- opts['nToTest'] = opts['nToTest'] || this.nPts; // be careful, test is O( N^2 )
-
- print("testing point: " + tojson(pt) + " opts: " + tojson(opts));
-
-
- var cmd = {geoNear:this.t.getName(), near: pt, num: 1, spherical:opts.sphere};
-
- var last = db.runCommand(cmd).results;
- for (var i=2; i <= opts.nToTest; i++){
- //print(i); // uncomment to watch status
- cmd.num = i
- var ret = db.runCommand(cmd).results;
-
- try {
- this.assertIsPrefix(last, ret);
- } catch (e) {
- print("*** failed while compairing " + (i-1) + " and " + i);
- printjson(cmd);
- throw e; // rethrow
- }
-
- last = ret;
- }
-
-
- if (!opts.sharded){
- last = last.map(function(x){return x.obj});
-
- var query = {loc:{}};
- query.loc[ opts.sphere ? '$nearSphere' : '$near' ] = pt;
- var near = this.t.find(query).limit(opts.nToTest).toArray();
-
- this.assertIsPrefix(last, near);
- assert.eq(last, near);
- }
-}
-
-
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/grid.js b/src/mongo/gotools/test/legacy26/jstests/libs/grid.js
deleted file mode 100644
index 3a1253d83cd..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/grid.js
+++ /dev/null
@@ -1,171 +0,0 @@
-// Grid infrastructure: Servers, ReplicaSets, ConfigSets, Shards, Routers (mongos). Convenient objects and functions on top of those in shell/servers.js -Tony
-
-load('jstests/libs/fun.js')
-load('jstests/libs/network.js')
-
-// New servers and routers take and increment port number from this.
-// A comment containing FreshPorts monad implies reading and incrementing this, IO may also read/increment this.
-var nextPort = 31000
-
-/*** Server is the spec of a mongod, ie. all its command line options.
- To start a server call 'begin' ***/
-// new Server :: String -> FreshPorts Server
-function Server (name) {
- this.addr = '127.0.0.1';
- this.dirname = name + nextPort;
- this.args = { port : nextPort++,
- noprealloc : '',
- smallfiles : '',
- rest : '',
- oplogSize : 8 }
-}
-
-// Server -> String <addr:port>
-Server.prototype.host = function() {
- return this.addr + ':' + this.args.port
-}
-
-// Start a new server with this spec and return connection to it
-// Server -> IO Connection
-Server.prototype.begin = function() {
- return startMongodTest(this.args.port, this.dirname, false, this.args);
-}
-
-// Stop server and remove db directory
-// Server -> IO ()
-Server.prototype.end = function() {
- print('Stopping mongod on port ' + this.args.port)
- stopMongod (this.args.port)
- resetDbpath (MongoRunner.dataPath + this.dirname)
-}
-
-// Cut server from network so it is unreachable (but still alive)
-// Requires sudo access and ipfw program (Mac OS X and BSD Unix). TODO: use iptables on Linux.
-function cutServer (conn) {
- var addrport = parseHost (conn.host)
- cutNetwork (addrport.port)
-}
-
-// Ensure server is connected to network (undo cutServer)
-// Requires sudo access and ipfw program (Mac OS X and BSD Unix). TODO: use iptables on Linux.
-function uncutServer (conn) {
- var iport = parseHost (conn.host)
- restoreNetwork (iport.port)
-}
-
-// Kill server process at other end of this connection
-function killServer (conn, _signal) {
- var signal = _signal || 15
- var iport = parseHost (conn.host)
- stopMongod (iport.port, signal)
-}
-
-/*** ReplicaSet is the spec of a replica set, ie. options given to ReplicaSetTest.
- To start a replica set call 'begin' ***/
-// new ReplicaSet :: String -> Int -> FreshPorts ReplicaSet
-function ReplicaSet (name, numServers) {
- this.name = name
- this.host = '127.0.0.1'
- this.nodes = numServers
- this.startPort = nextPort
- this.oplogSize = 40
- nextPort += numServers
-}
-
-// Start a replica set with this spec and return ReplSetTest, which hold connections to the servers including the master server. Call ReplicaSetTest.stopSet() to end all servers
-// ReplicaSet -> IO ReplicaSetTest
-ReplicaSet.prototype.begin = function() {
- var rs = new ReplSetTest(this)
- rs.startSet()
- rs.initiate()
- rs.awaitReplication()
- return rs
-}
-
-// Create a new server and add it to replica set
-// ReplicaSetTest -> IO Connection
-ReplSetTest.prototype.addServer = function() {
- var conn = this.add()
- nextPort++
- this.reInitiate()
- this.awaitReplication(60000)
- assert.soon(function() {
- var doc = conn.getDB('admin').isMaster()
- return doc['ismaster'] || doc['secondary']
- })
- return conn
-}
-
-/*** ConfigSet is a set of specs (Servers) for sharding config servers.
- Supply either the servers or the number of servers desired.
- To start the config servers call 'begin' ***/
-// new ConfigSet :: [Server] or Int -> FreshPorts ConfigSet
-function ConfigSet (configSvrsOrNumSvrs) {
- if (typeof configSvrsOrNumSvrs == 'number') {
- this.configSvrs = []
- for (var i = 0; i < configSvrsOrNumSvrs; i++)
- this.configSvrs.push (new Server ('config'))
- } else
- this.configSvrs = configSvrs
-}
-
-// Start config servers, return list of connections to them
-// ConfigSet -> IO [Connection]
-ConfigSet.prototype.begin = function() {
- return map (function(s) {return s.begin()}, this.configSvrs)
-}
-
-// Stop config servers
-// ConfigSet -> IO ()
-ConfigSet.prototype.end = function() {
- return map (function(s) {return s.end()}, this.configSvrs)
-}
-
-/*** Router is the spec for a mongos, ie, its command line options.
- To start a router (mongos) call 'begin' ***/
-// new Router :: ConfigSet -> FreshPorts Router
-function Router (configSet) {
- this.args = { port : nextPort++,
- v : 0,
- configdb : map (function(s) {return s.host()}, configSet.configSvrs) .join(','),
- chunkSize : 1}
-}
-
-// Start router (mongos) with this spec and return connection to it.
-// Router -> IO Connection
-Router.prototype.begin = function() {
- return startMongos (this.args);
-}
-
-// Stop router
-// Router -> IO ()
-Router.prototype.end = function() {
- return stopMongoProgram (this.args.port)
-}
-
-// Add shard to config via router (mongos) connection. Shard is either a replSet name (replSet.getURL()) or single server (server.host)
-// Connection -> String -> IO ()
-function addShard (routerConn, repSetOrHostName) {
- var ack = routerConn.getDB('admin').runCommand ({addshard: repSetOrHostName})
- assert (ack['ok'], tojson(ack))
-}
-
-// Connection -> String -> IO ()
-function enableSharding (routerConn, dbName) {
- var ack = routerConn.getDB('admin').runCommand ({enablesharding: dbName})
- assert (ack['ok'], tojson(ack))
-}
-
-// Connection -> String -> String -> String -> IO ()
-function shardCollection (routerConn, dbName, collName, shardKey) {
- var ack = routerConn.getDB('admin').runCommand ({shardcollection: dbName + '.' + collName, key: shardKey})
- assert (ack['ok'], tojson(ack))
-}
-
-// Move db from its current primary shard to given shard. Shard is either a replSet name (replSet.getURL()) or single server (server.host)
-// Connection -> String -> String -> IO ()
-function moveDB (routerConn, dbname, repSetOrHostName) {
- var ack = routerConn.getDB('admin').runCommand ({moveprimary: dbname, to: repSetOrHostName})
- printjson(ack)
- assert (ack['ok'], tojson(ack))
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/key1 b/src/mongo/gotools/test/legacy26/jstests/libs/key1
deleted file mode 100644
index b5c19e4092f..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/key1
+++ /dev/null
@@ -1 +0,0 @@
-foop de doop
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/key2 b/src/mongo/gotools/test/legacy26/jstests/libs/key2
deleted file mode 100644
index cbde8212841..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/key2
+++ /dev/null
@@ -1 +0,0 @@
-other key
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/localhostnameCN.pem b/src/mongo/gotools/test/legacy26/jstests/libs/localhostnameCN.pem
deleted file mode 100644
index e181139b5d9..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/localhostnameCN.pem
+++ /dev/null
@@ -1,101 +0,0 @@
-Certificate:
- Data:
- Version: 3 (0x2)
- Serial Number: 8 (0x8)
- Signature Algorithm: sha1WithRSAEncryption
- Issuer: C=US, ST=New York, L=New York City, O=10Gen, OU=Kernel, CN=My Cert Authority/emailAddress=root@lazarus
- Validity
- Not Before: Nov 6 14:31:58 2013 GMT
- Not After : Mar 23 14:31:58 2041 GMT
- Subject: C=US, ST=New York, L=New York City, O=10Gen, CN=127.0.0.1
- Subject Public Key Info:
- Public Key Algorithm: rsaEncryption
- Public-Key: (2048 bit)
- Modulus:
- 00:aa:e1:a0:6c:09:dc:fd:d0:9f:0f:b6:77:40:60:
- f9:01:f9:9e:55:20:fe:88:04:93:c9:ab:96:93:3a:
- ed:7e:7d:ad:e4:eb:a7:e9:07:35:ef:6e:14:64:dd:
- 31:9b:e5:24:06:18:bb:60:67:e3:c5:49:8e:79:b6:
- 78:07:c1:64:3f:de:c1:7d:1b:a9:96:35:d5:f9:b8:
- b4:5e:2a:34:b7:d0:19:ad:f6:8a:00:ef:8e:b0:d5:
- 36:1f:66:a0:7a:7d:cf:f0:98:3c:ee:0f:be:67:d2:
- de:c3:e6:b8:79:2f:64:40:0c:39:15:97:8c:13:da:
- 1b:db:5c:bb:a3:43:0b:74:c7:46:55:9b:ea:d7:93:
- d5:15:2f:d1:34:ac:a9:99:3b:01:f0:c1:d7:42:89:
- 24:bb:ab:60:99:c1:4d:9f:bf:9a:a3:92:3a:58:05:
- e2:47:a6:8e:71:b2:0a:32:b0:c5:cc:a0:58:40:bf:
- 09:a7:76:f5:37:ce:90:71:e0:75:89:17:ea:fb:80:
- 24:a1:9d:6e:1b:7e:e3:44:52:d3:fe:e3:de:80:9a:
- 8e:c3:4f:8c:bb:b4:8c:d2:a9:a9:aa:af:90:ac:b4:
- ee:6b:d2:c5:71:1e:08:7f:4c:b6:2a:5f:13:7a:e3:
- 29:f7:2e:bb:f7:c5:48:0a:4e:2e:1e:d4:2c:40:b3:
- 4c:19
- Exponent: 65537 (0x10001)
- X509v3 extensions:
- X509v3 Basic Constraints:
- CA:FALSE
- Netscape Comment:
- OpenSSL Generated Certificate
- X509v3 Subject Key Identifier:
- 0E:3F:54:C4:77:85:FF:93:58:A7:24:23:32:35:73:B0:BE:8C:C3:BB
- X509v3 Authority Key Identifier:
- keyid:07:41:19:3A:9F:7E:C5:B7:22:4E:B7:BC:D5:DF:E4:FC:09:B8:64:16
-
- Signature Algorithm: sha1WithRSAEncryption
- 4c:9d:31:81:b5:e9:6a:64:4c:1e:eb:91:7f:f1:66:74:46:13:
- 19:cb:f2:3b:9a:41:f2:83:67:32:53:a6:cd:33:37:4c:92:a6:
- 36:d4:f3:0b:56:a2:2b:66:f1:09:a7:06:36:b8:83:b7:31:70:
- fe:bf:af:b5:3d:59:f3:f2:18:48:c7:6c:b0:90:8c:24:47:30:
- 53:8d:c5:3e:7c:7b:33:53:15:ec:bd:8a:83:ed:05:e8:8b:21:
- d7:65:39:69:95:c8:58:7d:4f:1b:32:51:85:2d:4d:8b:be:00:
- 60:17:83:9b:2b:13:43:05:78:db:a4:2e:a2:cb:31:34:7e:b9:
- 8a:72
------BEGIN CERTIFICATE-----
-MIIDZDCCAs2gAwIBAgIBCDANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
-ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
-VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
-dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTEwNjE0
-MzE1OFoXDTQxMDMyMzE0MzE1OFowXDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
-dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjES
-MBAGA1UEAwwJMTI3LjAuMC4xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAquGgbAnc/dCfD7Z3QGD5AfmeVSD+iASTyauWkzrtfn2t5Oun6Qc1724UZN0x
-m+UkBhi7YGfjxUmOebZ4B8FkP97BfRupljXV+bi0Xio0t9AZrfaKAO+OsNU2H2ag
-en3P8Jg87g++Z9Lew+a4eS9kQAw5FZeME9ob21y7o0MLdMdGVZvq15PVFS/RNKyp
-mTsB8MHXQokku6tgmcFNn7+ao5I6WAXiR6aOcbIKMrDFzKBYQL8Jp3b1N86QceB1
-iRfq+4AkoZ1uG37jRFLT/uPegJqOw0+Mu7SM0qmpqq+QrLTua9LFcR4If0y2Kl8T
-euMp9y6798VICk4uHtQsQLNMGQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG
-+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU
-Dj9UxHeF/5NYpyQjMjVzsL6Mw7swHwYDVR0jBBgwFoAUB0EZOp9+xbciTre81d/k
-/Am4ZBYwDQYJKoZIhvcNAQEFBQADgYEATJ0xgbXpamRMHuuRf/FmdEYTGcvyO5pB
-8oNnMlOmzTM3TJKmNtTzC1aiK2bxCacGNriDtzFw/r+vtT1Z8/IYSMdssJCMJEcw
-U43FPnx7M1MV7L2Kg+0F6Ish12U5aZXIWH1PGzJRhS1Ni74AYBeDmysTQwV426Qu
-ossxNH65inI=
------END CERTIFICATE-----
------BEGIN PRIVATE KEY-----
-MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCq4aBsCdz90J8P
-tndAYPkB+Z5VIP6IBJPJq5aTOu1+fa3k66fpBzXvbhRk3TGb5SQGGLtgZ+PFSY55
-tngHwWQ/3sF9G6mWNdX5uLReKjS30Bmt9ooA746w1TYfZqB6fc/wmDzuD75n0t7D
-5rh5L2RADDkVl4wT2hvbXLujQwt0x0ZVm+rXk9UVL9E0rKmZOwHwwddCiSS7q2CZ
-wU2fv5qjkjpYBeJHpo5xsgoysMXMoFhAvwmndvU3zpBx4HWJF+r7gCShnW4bfuNE
-UtP+496Amo7DT4y7tIzSqamqr5CstO5r0sVxHgh/TLYqXxN64yn3Lrv3xUgKTi4e
-1CxAs0wZAgMBAAECggEADtdh04BXzUOdTQQP/2tstRs1ATfIY4/iNhXNEiSAFAhe
-Xg+Jmdeie5UX+FqtwFh6dH0ZaRoc0jm9Qhzy99l4F4QFUhRg+kbausGsCLGpun08
-fbt36PTlc75Q4RFMxta+hKr0P8jmRKYv6tvTEdNn5ZgqLRHofKDo4nh/Y4KjMBUq
-VIMUu+VO9Ol2GPlZVRBaJec0E1+HUyzaK5JVUIFh4atcrHyXxae+rY9o6G57BBEj
-ZzlahfMI5aYj9HhXnB8RuhVBuIZBNSA41nxHmOs6JBQsatVML51RFIV4KPU+AyDR
-bdYXHJehRIUF8RL92aHjGYsvXdSxVhuUBqMIQhOwAQKBgQDUtj+p+7SHpLyQIZpU
-EQFK+42LDc6zF4uJVjq1d8fC2Hrmz8PLs0KcH36VWNbo48B3iFiPWIMID5xwLuIb
-FkLOzJ8QrbILn0zcu/hplrCiy6PZas3rpLJ+X406wLQeCikOLhQkz+cuKuQmvWkK
-eyqwBIIxg8t5dTtTAmu3w/DDgQKBgQDNqByxKduTgEND1+isUOt+L/ipR3SzXQ4m
-ZsOKiSxyXxge0/CUxPxO6WeEVGQ7bGAr5yQD9ukvJnCo3phYcuRRj+RTMrTL73Kz
-p/cyOUx2NMUIgURTsO+s3D0lC4+NmoDge0roeEDX+/lFNjqgRKJ+1LUimqbo5uNE
-EupkyTh0mQKBgGw/81ZGSjFdnLic4TU3Ejlem0HQ3Qg3S0OxJl+DfZ2jHaiowzO/
-Hn7laD4I4BXVEfXC5Y7NtKE9kJdmxJqUUZt8dta+DoXro+oRnvHdRjcS+2eB+xmY
-z12QswbbWs6OzSXyPT4er7/HBCTS78nttGOvZ7JbKAm/p1kvOjJi/PwBAoGAE7Tw
-Sum/6Lp5t56Q5TI73rOqGE6ImEdqe7ONOVE7uRnzrcCRZTAbHVSwXrXXhPo1nP9h
-LCAU6De+w+/QmWkpB8fKEU7ilEg1rZGC1oU3FnyoBNCeQ4bI8L+J/GrHLsKHZvtp
-ii07yXaTxFYV+BWbnJu1X8OCCv9U98j4PQArMMECgYEAm6uLN647vb+ZhzNBMtsX
-1wnMSgzbgGpgjhWwk6dNmw8YJNKg9CFa8sQ8N7yKXWBEF/RkU0kfzZL8iddHEb/k
-Ti1BlwrEzFfIQLlBfv47tYWOj8ZxN0ujlzUoN2VAC25LZhjcQCo3ftBk2lkrmllu
-MxjxBfRk/teUdRl80oi5R0w=
------END PRIVATE KEY-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/localhostnameSAN.pem b/src/mongo/gotools/test/legacy26/jstests/libs/localhostnameSAN.pem
deleted file mode 100644
index beb0bb91b61..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/localhostnameSAN.pem
+++ /dev/null
@@ -1,100 +0,0 @@
-Certificate:
- Data:
- Version: 3 (0x2)
- Serial Number: 9 (0x9)
- Signature Algorithm: sha1WithRSAEncryption
- Issuer: C=US, ST=New York, L=New York City, O=10Gen, OU=Kernel, CN=My Cert Authority/emailAddress=root@lazarus
- Validity
- Not Before: Nov 6 14:45:13 2013 GMT
- Not After : Mar 23 14:45:13 2041 GMT
- Subject: C=US, ST=New York, L=New York City, O=10Gen, CN=santesthostname.com
- Subject Public Key Info:
- Public Key Algorithm: rsaEncryption
- Public-Key: (2048 bit)
- Modulus:
- 00:c9:83:7a:75:42:cf:35:a4:95:c7:c8:d8:4d:19:
- 0e:89:87:d5:bd:f9:2f:ee:20:2c:4c:ca:6d:0b:c1:
- 10:5b:06:1b:c4:a1:26:12:25:06:7a:1e:d1:e6:d0:
- 91:2b:a3:c8:74:de:95:10:d9:ff:20:03:ec:84:db:
- 49:d9:a4:e9:c2:93:f0:d2:32:01:a6:55:db:14:bf:
- 16:fe:88:e0:e4:46:0f:6a:bd:27:95:45:2e:8d:13:
- e2:99:09:74:e4:2b:32:c3:6d:61:0c:86:85:eb:12:
- f5:dc:9e:7b:d3:00:a3:ce:f4:8a:4b:51:7f:a2:c6:
- 0b:52:a4:f1:41:d5:01:53:88:99:b9:3b:29:f8:43:
- 5e:a4:c7:41:d9:d3:34:43:f2:c7:a6:8d:22:1c:f9:
- b2:63:cb:df:83:9c:6f:ec:e3:b0:63:af:0b:51:c9:
- 20:ca:c2:59:c1:2c:ec:de:37:18:76:3d:73:85:82:
- 12:11:cd:b6:ef:2f:7b:64:cd:a3:2d:f6:7a:54:7f:
- b3:4f:c9:38:f4:62:b6:da:00:f0:59:df:e1:d3:15:
- ca:4b:73:6c:22:c1:9a:c1:51:c4:28:59:0f:71:2a:
- 39:e9:17:08:9d:b0:88:61:a7:53:67:da:dc:fb:6e:
- 38:f7:a8:cd:cd:88:ed:d9:4c:88:f4:a4:75:5e:3f:
- 8b:ff
- Exponent: 65537 (0x10001)
- X509v3 extensions:
- X509v3 Basic Constraints:
- CA:FALSE
- Netscape Cert Type:
- SSL Server
- Netscape Comment:
- OpenSSL Certificate for SSL Server
- X509v3 Subject Alternative Name:
- DNS:*.example.com, DNS:127.0.0.1, DNS:morefun!, IP Address:154.2.2.3, email:user@host.com
- Signature Algorithm: sha1WithRSAEncryption
- 0b:82:c6:7d:e0:ba:71:24:d6:a8:f4:cb:6f:0f:f6:69:28:32:
- 98:81:e6:14:49:81:07:ff:92:dd:0a:a4:68:3c:92:00:e5:8c:
- 43:d1:29:04:4a:5e:f2:b1:db:d2:ca:5d:7d:fc:fe:7b:f5:01:
- 65:87:25:cd:4c:68:09:16:bd:c7:b0:a4:d2:89:5e:dd:92:44:
- 6c:6e:7a:fe:7e:05:e2:2b:56:96:96:16:44:4a:01:87:8f:0c:
- df:35:88:97:3e:e5:21:23:a2:af:87:ad:ee:f7:9e:05:36:f7:
- 96:88:c8:fa:92:33:c2:60:2e:14:d9:ea:34:ab:04:a6:78:04:
- be:da
------BEGIN CERTIFICATE-----
-MIIDjDCCAvWgAwIBAgIBCTANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
-ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
-VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
-dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTEwNjE0
-NDUxM1oXDTQxMDMyMzE0NDUxM1owZjELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
-dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEc
-MBoGA1UEAwwTc2FudGVzdGhvc3RuYW1lLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
-ggEPADCCAQoCggEBAMmDenVCzzWklcfI2E0ZDomH1b35L+4gLEzKbQvBEFsGG8Sh
-JhIlBnoe0ebQkSujyHTelRDZ/yAD7ITbSdmk6cKT8NIyAaZV2xS/Fv6I4ORGD2q9
-J5VFLo0T4pkJdOQrMsNtYQyGhesS9dyee9MAo870iktRf6LGC1Kk8UHVAVOImbk7
-KfhDXqTHQdnTNEPyx6aNIhz5smPL34Ocb+zjsGOvC1HJIMrCWcEs7N43GHY9c4WC
-EhHNtu8ve2TNoy32elR/s0/JOPRittoA8Fnf4dMVyktzbCLBmsFRxChZD3EqOekX
-CJ2wiGGnU2fa3PtuOPeozc2I7dlMiPSkdV4/i/8CAwEAAaOBmDCBlTAJBgNVHRME
-AjAAMBEGCWCGSAGG+EIBAQQEAwIGQDAxBglghkgBhvhCAQ0EJBYiT3BlblNTTCBD
-ZXJ0aWZpY2F0ZSBmb3IgU1NMIFNlcnZlcjBCBgNVHREEOzA5gg0qLmV4YW1wbGUu
-Y29tggkxMjcuMC4wLjGCCG1vcmVmdW4hhwSaAgIDgQ11c2VyQGhvc3QuY29tMA0G
-CSqGSIb3DQEBBQUAA4GBAAuCxn3gunEk1qj0y28P9mkoMpiB5hRJgQf/kt0KpGg8
-kgDljEPRKQRKXvKx29LKXX38/nv1AWWHJc1MaAkWvcewpNKJXt2SRGxuev5+BeIr
-VpaWFkRKAYePDN81iJc+5SEjoq+Hre73ngU295aIyPqSM8JgLhTZ6jSrBKZ4BL7a
------END CERTIFICATE-----
------BEGIN PRIVATE KEY-----
-MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDJg3p1Qs81pJXH
-yNhNGQ6Jh9W9+S/uICxMym0LwRBbBhvEoSYSJQZ6HtHm0JEro8h03pUQ2f8gA+yE
-20nZpOnCk/DSMgGmVdsUvxb+iODkRg9qvSeVRS6NE+KZCXTkKzLDbWEMhoXrEvXc
-nnvTAKPO9IpLUX+ixgtSpPFB1QFTiJm5Oyn4Q16kx0HZ0zRD8semjSIc+bJjy9+D
-nG/s47BjrwtRySDKwlnBLOzeNxh2PXOFghIRzbbvL3tkzaMt9npUf7NPyTj0Yrba
-APBZ3+HTFcpLc2wiwZrBUcQoWQ9xKjnpFwidsIhhp1Nn2tz7bjj3qM3NiO3ZTIj0
-pHVeP4v/AgMBAAECggEAbaQ12ttQ9rToMd2bosdBW58mssiERaIHuHhjQIP5LC10
-qlWr6y9uCMAAIP/WHNJuXPhGTvbtkzPPWrIdymeqMI5h91vx/di07OLT1gYPpuRf
-uwnUIamUnHn3TqEQkpzWb/JxXWlMMA0O7MzmPnYYqp/vJu/e7Geo/Xx1MAZ/RD0U
-YUvrjAyHcor01VVa/eV69jL+6x9ExFNmRYRbmjmK/f10R4o86nIfqhXbM8qKsT6x
-1U/S2I4oModm0x12PgiMDMDzVD+cNE/h8lSnFtBTNEY3xRe7CZnhMV4nBVGjWi9D
-XjcIBA0kGd4G10ploiF+37J/PQbyodLA/Y30BIYCkQKBgQD6XvEzd4DbBa08pcCa
-CYZd5pyAHur1GzJ4rTQNqB84hzuyG6dKkk0rPXjExrj/GAtGWg2ohggmC5OPInKM
-WdpMC56Q0aZYMId3Be/Wg4kRgFO0YOsrx0dRVi5nwbRXkMjXbfewSopwbzP5hIo1
-7rfOhdhbjXx6W269FPE4Epmj1QKBgQDOC1QjGeEzwEgSq3LuojRLHFo31pWYr7UU
-sxhpoWMB6ImPMVjXaEsRKfc7Gulpee1KVQLVmzbkqrHArVNXEpuG4egRwZ10UJ0L
-v4PqrElyHKxgAvllflkkMSX4rx791T+AZMq6W5VX1fKiojfvSLzmEFaI6VmS43GZ
-KCz9RFbegwKBgHSE4vP01b8YsTrcWPpXHHVu8b6epPJVKfQHh4YjjAQey6VkQULv
-O4K4JRBO+6GcawLeviSD3B74nD+s5Gp1Fqb1cWIsb6HzU9gMp0XKCWxfsJTt1gSV
-xZcQ6J/ZAjkOZKn9v5wH1M3msuWYzUm0Q06V888H1bqL+sl8iZZy8ZXRAoGBALf6
-GZh2BUYGTNSOzkMSBouCt3PgYRdC3PesqwG2nwcXMazwLRm6AD1FMYJPF1edDSow
-GiXNQAiR+cHHggDflourr2IbdZJkYLYavZmPWM1RmQDp5vKfDM1qLTOOeqe//8GP
-Pg2EtScG3G4nVraMRk9PC1WYtuiXudk9rF5A5SgtAoGBAL1oVSnQpi5tzBNJqhzM
-mQIF7ct5WNj2b1lKqqsXUTd2pcgMCRrryatqH+gLz1rAjtbVfx2FAYkutH5TFgqP
-c4uomUH3so1EjEA8GtFS9SSkLn5nIr4TnVy4+Qsr1svOo8mhtztORXz+xOTxR6ud
-p7rd/YEbc5GhNSXlcW+apZW+
------END PRIVATE KEY-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/mockkrb5.conf b/src/mongo/gotools/test/legacy26/jstests/libs/mockkrb5.conf
deleted file mode 100644
index 0f004f2de8a..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/mockkrb5.conf
+++ /dev/null
@@ -1,13 +0,0 @@
-[libdefaults]
- default_realm = 10GEN.ME
-
-[realms]
- 10GEN.ME = {
- kdc = kdc.10gen.me
- admin_server = kdc.10gen.me
- default_domain = 10gen.me
- }
-
-[domain_realm]
- .10gen.me = 10GEN.ME
- 10gen.me = 10GEN.ME
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/mockservice.keytab b/src/mongo/gotools/test/legacy26/jstests/libs/mockservice.keytab
deleted file mode 100644
index 3529d5fcbc6..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/mockservice.keytab
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/mockuser.keytab b/src/mongo/gotools/test/legacy26/jstests/libs/mockuser.keytab
deleted file mode 100644
index 35fd2ff06e7..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/mockuser.keytab
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/network.js b/src/mongo/gotools/test/legacy26/jstests/libs/network.js
deleted file mode 100644
index e5b33f3219e..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/network.js
+++ /dev/null
@@ -1,37 +0,0 @@
-
-// Parse "127.0.0.1:300" into {addr: "127.0.0.1", port: 300},
-// and "127.0.0.1" into {addr: "127.0.0.1", port: undefined}
-function parseHost (hostString) {
- var items = hostString.match(/(\d+.\d+.\d+.\d+)(:(\d+))?/)
- return {addr: items[1], port: parseInt(items[3])}
-}
-
-
-/* Network traffic shaping (packet dropping) to simulate network problems
- Currently works on BSD Unix and Mac OS X only (using ipfw).
- Requires sudo access.
- TODO: make it work on Linux too (using iptables). */
-
-var nextRuleNum = 100 // this grows indefinitely but can't exceed 65534, so can't call routines below indefinitely
-var portRuleNum = {}
-
-// Cut network connection to local port by dropping packets using iptables
-function cutNetwork (port) {
- portRuleNum[port] = nextRuleNum
- runProgram ('sudo', 'ipfw', 'add ' + nextRuleNum++ + ' deny tcp from any to any ' + port)
- runProgram ('sudo', 'ipfw', 'add ' + nextRuleNum++ + ' deny tcp from any ' + port + ' to any')
- //TODO: confirm it worked (since sudo may not work)
- runProgram ('sudo', 'ipfw', 'show')
-}
-
-// Restore network connection to local port by not dropping packets using iptables
-function restoreNetwork (port) {
- var ruleNum = portRuleNum[port]
- if (ruleNum) {
- runProgram ('sudo', 'ipfw', 'delete ' + ruleNum++)
- runProgram ('sudo', 'ipfw', 'delete ' + ruleNum)
- delete portRuleNum[port]
- }
- //TODO: confirm it worked (since sudo may not work)
- runProgram ('sudo', 'ipfw', 'show')
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/parallelTester.js b/src/mongo/gotools/test/legacy26/jstests/libs/parallelTester.js
deleted file mode 100644
index d5cb5346abe..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/parallelTester.js
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- * The ParallelTester class is used to test more than one test concurrently
- */
-
-
-if ( typeof _threadInject != "undefined" ){
- //print( "fork() available!" );
-
- Thread = function(){
- this.init.apply( this, arguments );
- }
- _threadInject( Thread.prototype );
-
- ScopedThread = function() {
- this.init.apply( this, arguments );
- }
- ScopedThread.prototype = new Thread( function() {} );
- _scopedThreadInject( ScopedThread.prototype );
-
- fork = function() {
- var t = new Thread( function() {} );
- Thread.apply( t, arguments );
- return t;
- }
-
- // Helper class to generate a list of events which may be executed by a ParallelTester
- EventGenerator = function( me, collectionName, mean, host ) {
- this.mean = mean;
- if (host == undefined) host = db.getMongo().host;
- this.events = new Array( me, collectionName, host );
- }
-
- EventGenerator.prototype._add = function( action ) {
- this.events.push( [ Random.genExp( this.mean ), action ] );
- }
-
- EventGenerator.prototype.addInsert = function( obj ) {
- this._add( "t.insert( " + tojson( obj ) + " )" );
- }
-
- EventGenerator.prototype.addRemove = function( obj ) {
- this._add( "t.remove( " + tojson( obj ) + " )" );
- }
-
- EventGenerator.prototype.addUpdate = function( objOld, objNew ) {
- this._add( "t.update( " + tojson( objOld ) + ", " + tojson( objNew ) + " )" );
- }
-
- EventGenerator.prototype.addCheckCount = function( count, query, shouldPrint, checkQuery ) {
- query = query || {};
- shouldPrint = shouldPrint || false;
- checkQuery = checkQuery || false;
- var action = "assert.eq( " + count + ", t.count( " + tojson( query ) + " ) );"
- if ( checkQuery ) {
- action += " assert.eq( " + count + ", t.find( " + tojson( query ) + " ).toArray().length );"
- }
- if ( shouldPrint ) {
- action += " print( me + ' ' + " + count + " );";
- }
- this._add( action );
- }
-
- EventGenerator.prototype.getEvents = function() {
- return this.events;
- }
-
- EventGenerator.dispatch = function() {
- var args = argumentsToArray( arguments );
- var me = args.shift();
- var collectionName = args.shift();
- var host = args.shift();
- var m = new Mongo( host );
- var t = m.getDB( "test" )[ collectionName ];
- for( var i in args ) {
- sleep( args[ i ][ 0 ] );
- eval( args[ i ][ 1 ] );
- }
- }
-
- // Helper class for running tests in parallel. It assembles a set of tests
- // and then calls assert.parallelests to run them.
- ParallelTester = function() {
- assert.neq(db.getMongo().writeMode(), "legacy", "wrong shell write mode")
- this.params = new Array();
- }
-
- ParallelTester.prototype.add = function( fun, args ) {
- args = args || [];
- args.unshift( fun );
- this.params.push( args );
- }
-
- ParallelTester.prototype.run = function( msg, newScopes ) {
- newScopes = newScopes || false;
- assert.parallelTests( this.params, msg, newScopes );
- }
-
- // creates lists of tests from jstests dir in a format suitable for use by
- // ParallelTester.fileTester. The lists will be in random order.
- // n: number of lists to split these tests into
- ParallelTester.createJstestsLists = function( n ) {
- var params = new Array();
- for( var i = 0; i < n; ++i ) {
- params.push( [] );
- }
-
- var makeKeys = function( a ) {
- var ret = {};
- for( var i in a ) {
- ret[ a[ i ] ] = 1;
- }
- return ret;
- }
-
- // some tests can't run in parallel with most others
- var skipTests = makeKeys([ "dbadmin.js",
- "repair.js",
- "cursor8.js",
- "recstore.js",
- "extent.js",
- "indexb.js",
-
- // tests turn on profiling
- "profile1.js",
- "profile3.js",
- "profile4.js",
- "profile5.js",
-
- "mr_drop.js",
- "mr3.js",
- "indexh.js",
- "apitest_db.js",
- "evalb.js",
- "evald.js",
- "evalf.js",
- "killop.js",
- "run_program1.js",
- "notablescan.js",
- "drop2.js",
- "dropdb_race.js",
- "fsync2.js", // May be placed in serialTestsArr once SERVER-4243 is fixed.
- "bench_test1.js",
- "padding.js",
- "queryoptimizera.js",
- "loglong.js",// log might overflow before
- // this has a chance to see the message
- "connections_opened.js", // counts connections, globally
- "opcounters.js",
- "currentop.js", // SERVER-8673, plus rwlock yielding issues
- "set_param1.js", // changes global state
- "geo_update_btree2.js", // SERVER-11132 test disables table scans
- "update_setOnInsert.js", // SERVER-9982
- ] );
-
- var parallelFilesDir = "jstests/core";
-
- // some tests can't be run in parallel with each other
- var serialTestsArr = [ parallelFilesDir + "/fsync.js",
- parallelFilesDir + "/auth1.js",
-
- // These tests expect the profiler to be on or off at specific points
- // during the test run.
- parallelFilesDir + "/cursor6.js",
- parallelFilesDir + "/profile2.js",
- parallelFilesDir + "/updatee.js"
- ];
- var serialTests = makeKeys( serialTestsArr );
-
- // prefix the first thread with the serialTests
- // (which we will exclude from the rest of the threads below)
- params[ 0 ] = serialTestsArr;
- var files = listFiles( parallelFilesDir );
- files = Array.shuffle( files );
-
- var i = 0;
- files.forEach(
- function(x) {
- if ( ( /[\/\\]_/.test(x.name) ) ||
- ( ! /\.js$/.test(x.name) ) ||
- ( x.name.match(parallelFilesDir + "/(.*\.js)")[1] in skipTests ) || //
- ( x.name in serialTests )) {
- print(" >>>>>>>>>>>>>>> skipping " + x.name);
- return;
- }
- // add the test to run in one of the threads.
- params[ i % n ].push( x.name );
- ++i;
- }
- );
-
- // randomize ordering of the serialTests
- params[ 0 ] = Array.shuffle( params[ 0 ] );
-
- for( var i in params ) {
- params[ i ].unshift( i );
- }
-
- return params;
- }
-
- // runs a set of test files
- // first argument is an identifier for this tester, remaining arguments are file names
- ParallelTester.fileTester = function() {
- var args = argumentsToArray( arguments );
- var suite = args.shift();
- args.forEach(
- function( x ) {
- print(" S" + suite + " Test : " + x + " ...");
- var time = Date.timeFunc( function() { load(x); }, 1);
- print(" S" + suite + " Test : " + x + " " + time + "ms" );
- }
- );
- }
-
- // params: array of arrays, each element of which consists of a function followed
- // by zero or more arguments to that function. Each function and its arguments will
- // be called in a separate thread.
- // msg: failure message
- // newScopes: if true, each thread starts in a fresh scope
- assert.parallelTests = function( params, msg, newScopes ) {
- newScopes = newScopes || false;
- var wrapper = function( fun, argv ) {
- eval (
- "var z = function() {" +
- "var __parallelTests__fun = " + fun.toString() + ";" +
- "var __parallelTests__argv = " + tojson( argv ) + ";" +
- "var __parallelTests__passed = false;" +
- "try {" +
- "__parallelTests__fun.apply( 0, __parallelTests__argv );" +
- "__parallelTests__passed = true;" +
- "} catch ( e ) {" +
- "print('');" +
- "print( '********** Parallel Test FAILED: ' + tojson(e) );" +
- "print('');" +
- "}" +
- "return __parallelTests__passed;" +
- "}"
- );
- return z;
- }
- var runners = new Array();
- for( var i in params ) {
- var param = params[ i ];
- var test = param.shift();
- var t;
- if ( newScopes )
- t = new ScopedThread( wrapper( test, param ) );
- else
- t = new Thread( wrapper( test, param ) );
- runners.push( t );
- }
-
- runners.forEach( function( x ) { x.start(); } );
- var nFailed = 0;
- // v8 doesn't like it if we exit before all threads are joined (SERVER-529)
- runners.forEach( function( x ) { if( !x.returnData() ) { ++nFailed; } } );
- assert.eq( 0, nFailed, msg );
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/password_protected.pem b/src/mongo/gotools/test/legacy26/jstests/libs/password_protected.pem
deleted file mode 100644
index 87976e7a574..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/password_protected.pem
+++ /dev/null
@@ -1,51 +0,0 @@
------BEGIN ENCRYPTED PRIVATE KEY-----
-MIIFDjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIgWTIkEmBBfoCAggA
-MBQGCCqGSIb3DQMHBAjzL6xrCrEygwSCBMihG8kg3nTnTtWAbB+d1D+HJxriqm37
-7rwjkfa+T5w5ZBRGpsTt3QB5ep0maX72H55ns6ukkeMoDBSadhDWrGWcLQ2IOGt3
-E14KU6vMFe3gQkfF1fupp7F+3ma58/VNUKa4X5pzZ7OCf8inlLWejp8BRqbrPWqw
-Errgw1kNN3gWfQMr7JtIt1yI1xIMEB2Z976Jn0gaGnJAtzIW4thqjkDdb8b33S9f
-cb7N1Fq4cly22f9HdqNcLgVTi1zIlPXc/f/6mtsGTsJv/rMPthJ7c3Smvh3Fce2G
-w8e+ypfey+9QG3fk7RslaFRe8ShgqfdR8CAalp2UzwNbX91Agyuim3TA6s4jM8N9
-cF6CXlqEaA4sKhiOJmw69DfTC7QRee/gi2A8bz17pX85nKrGiLYn+Od8CEhTFxVk
-lNgBLv4+RcYHVqxWlbJMdDliMN53E+hYbh0y+GDLjteEXbrxRo1aSgd/9PGiSl97
-KY4F7b/OwRzRZh1F+cXY+uP5ZQMbx5EMMkhzuj3Hiy/AVlQrW2B1lXtcf11YFFJj
-xWq6YcpmEjL+xRq1PgoU7ahl6K0A3ScedQA5b1rLdPE8+bkRAfoN+0r8HVkIL7M+
-PorrwuWnvUmovZ0yDvm153HVvRnKZKHcelklphuUWfXvcRNITG/Rx6ssj+MVjqjb
-Xy7t7wgIrk10TFWNEcunGjSSjPDkjYPazJ2dasI0rODzhlQzrnlWM+El9P5zSu2z
-1Bvet44nmAKi2WLMda5YKbJcLSNbpBFB+rTwDt/D+dfwsJeC0sjpzzatKGXNJLJQ
-7x9BZfAbBn0QrIZYGMkaxWvcpJcaVUbCKiST4DK5ze584ptrlH+Bqw4u4xLcVrdk
-hu/8IBNybLrl4zahIz7bRRNmw5wo9zUVXPXEtuYak+MK+gmD3TzJ12OUKAlAj3Go
-Fj3NFQoxBJJjuXM3zZRvHp+/AAOUANBYIyV2WssF6C+SH4o+jKyxWC/GawPFvx/B
-gy55kdEt+ORdcOfV8L5Q2xI8Qpck6E3odmaHCvjz1bUVUWqhJcTuoewHRBfWiWgc
-UCXBS/YgendUQroBOPyYIwTtk4XY9fhhKGI4LhWcx4LfzntBnM9FGmDOwhu3HqEd
-HOs8p+HhB8LPjGRot63m7gkJ1T6AswSi9hTeZeSgXuSgL23zqwPGbGTwO3AmFs/M
-8luXQ4My9bk74K3d9lFdJPaxeTpeeWNodnBItbioT5aImptU+pkKWLTVmXi4V+JE
-1ootg+DSbz+bKp4A/LLOBO4Rsx5FCGAbBMnKc/n8lF86LjKq2PLRfgdPCaVfBrcd
-TnOkBZYU0HwJAc++4AZQJvA/KRB4UPUzMe2atjVxcrr6r6vL8G04+7TBFoynpzJ+
-4KZPCJz0Avb4wYKu/IHkdKL7UY8WEGz1mMDbAu4/xCriLg49D2f1eY3FTEjBotBI
-J9hE4ccmwqlxtl4qCVRezh0C+viJ6q2tCji2SPQviaVMNWiis9cZ52J+F9TC2p9R
-PdatJg0rjuVzfoPFE8Rq8V6+zf818b19vQ4F31J+VXTz7sF8it9IO0w/3MbtfBNE
-pKmMZ9h5RdSw1kXRWXbROR9XItS7gE1wkXAxw11z7jqNSNvhotkJXH/A5qGpTFBl
-Z8A=
------END ENCRYPTED PRIVATE KEY-----
------BEGIN CERTIFICATE-----
-MIIDczCCAtygAwIBAgIBCzANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
-ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
-VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
-dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTIwNjE1
-MTgxMFoXDTQxMDQyMjE1MTgxMFowazELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
-dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEP
-MA0GA1UECwwGS2VybmVsMRAwDgYDVQQDDAdsYXphcnVzMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA0+uq+UcogTSS+BLNTwwsBU7/HnNNhNgLKnk8pdUC
-UFOzAjXnXlXEravmbhWeIj5TsCElc5FPE66OvmiixFU6l27Z5P8gopjokxll7e1B
-ujeJOXgy5h+K76xdeQ90JmQX4OO0K5rLXvNH3ufuhGr2NObrBz6kbF5Wdr3urPl6
-pFSLH02zPLqPHhhUvO8jcbUD3RrS/5ZGHqE++F+QRMuYeCXTjECA8iLDvQsiqvT6
-qK1y04V/8K0BYJd/yE31H3cvRLUu7mRAkN87lY1Aj0i3dKM/l2RAa3tsy2/kSDH3
-VeUaqjoPN8PTfJaoMZz7xV7C+Zha+JZh3E7pq6viMR6bkwIDAQABo3sweTAJBgNV
-HRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZp
-Y2F0ZTAdBgNVHQ4EFgQUbw3OWXLJpkDMpGnLWM4vxSbwUSAwHwYDVR0jBBgwFoAU
-B0EZOp9+xbciTre81d/k/Am4ZBYwDQYJKoZIhvcNAQEFBQADgYEAL+OC9x0P7Ql+
-8NbONrIeOIoJD++K5rUM0vI+u9RDAxTm9TO6cP7Cl6H4zzvlzJ3w9DL66c2r+ZTy
-BxzFO1wtDKUo5RJKneC0tMz0rJQIWTqo45fDLs8UIDB5t4xp6zed34nvct+wIRaV
-hCjHBaVmILlBWb6OF9/kl1JhLtElyDs=
------END CERTIFICATE-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/server.pem b/src/mongo/gotools/test/legacy26/jstests/libs/server.pem
deleted file mode 100644
index e5980d4856e..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/server.pem
+++ /dev/null
@@ -1,34 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAK53miP9GczBWXnq
-NxHwQkgVqsDuesjwJbWilMK4gf3fjnf2PN3qDpnGbZbPD0ij8975pIKtSPoDycFm
-A8Mogip0yU2Lv2lL56CWthSBftOFDL2CWIsmuuURFXZPiVLtLytfI9oLASZFlywW
-Cs83qEDTvdW8VoVhVsxV1JFDnpXLAgMBAAECgYBoGBgxrMt97UazhNkCrPT/CV5t
-6lv8E7yMGMrlOyzkCkR4ssQyK3o2qbutJTGbR6czvIM5LKbD9Qqlh3ZrNHokWmTR
-VQQpJxt8HwP5boQvwRHg9+KSGr4JvRko1qxFs9C7Bzjt4r9VxdjhwZPdy0McGI/z
-yPXyQHjqBayrHV1EwQJBANorfCKeIxLhH3LAeUZuRS8ACldJ2N1kL6Ov43/v+0S/
-OprQeBTODuTds3sv7FCT1aYDTOe6JLNOwN2i4YVOMBsCQQDMuCozrwqftD17D06P
-9+lRXUekY5kFBs5j28Xnl8t8jnuxsXtQUTru660LD0QrmDNSauhpEmlpJknicnGt
-hmwRAkEA12MI6bBPlir0/jgxQqxI1w7mJqj8Vg27zpEuO7dzzLoyJHddpcSNBbwu
-npaAakiZK42klj26T9+XHvjYRuAbMwJBAJ5WnwWEkGH/pUHGEAyYQdSVojDKe/MA
-Vae0tzguFswK5C8GyArSGRPsItYYA7D4MlG/sGx8Oh2C6MiFndkJzBECQDcP1y4r
-Qsek151t1zArLKH4gG5dQAeZ0Lc2VeC4nLMUqVwrHcZDdd1RzLlSaH3j1MekFVfT
-6v6rrcNLEVbeuk4=
------END PRIVATE KEY-----
------BEGIN CERTIFICATE-----
-MIIC7jCCAlegAwIBAgIBCjANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx
-ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD
-VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1
-dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTIwNTEz
-MjU0MFoXDTQxMDQyMTEzMjU0MFowajELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l
-dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEP
-MA0GA1UECwwGS2VybmVsMQ8wDQYDVQQDDAZzZXJ2ZXIwgZ8wDQYJKoZIhvcNAQEB
-BQADgY0AMIGJAoGBAK53miP9GczBWXnqNxHwQkgVqsDuesjwJbWilMK4gf3fjnf2
-PN3qDpnGbZbPD0ij8975pIKtSPoDycFmA8Mogip0yU2Lv2lL56CWthSBftOFDL2C
-WIsmuuURFXZPiVLtLytfI9oLASZFlywWCs83qEDTvdW8VoVhVsxV1JFDnpXLAgMB
-AAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJh
-dGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBQgCkKiZhUV9/Zo7RwYYwm2cNK6tzAf
-BgNVHSMEGDAWgBQHQRk6n37FtyJOt7zV3+T8CbhkFjANBgkqhkiG9w0BAQUFAAOB
-gQCbsfr+Q4pty4Fy38lSxoCgnbB4pX6+Ex3xyw5zxDYR3xUlb/uHBiNZ1dBrXBxU
-ekU8dEvf+hx4iRDSW/C5N6BGnBBhCHcrPabo2bEEWKVsbUC3xchTB5rNGkvnMt9t
-G9ol7vanuzjL3S8/2PB33OshkBH570CxqqPflQbdjwt9dg==
------END CERTIFICATE-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/slow_weekly_util.js b/src/mongo/gotools/test/legacy26/jstests/libs/slow_weekly_util.js
deleted file mode 100644
index 1e2c7391cb1..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/slow_weekly_util.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-SlowWeeklyMongod = function( name ) {
- this.name = name;
- this.port = 30201;
-
- this.start = new Date();
-
- this.conn = startMongodEmpty("--port", this.port, "--dbpath", MongoRunner.dataPath + this.name , "--smallfiles", "--nojournal" );
-};
-
-SlowWeeklyMongod.prototype.getDB = function( name ) {
- return this.conn.getDB( name );
-}
-
-SlowWeeklyMongod.prototype.stop = function(){
- stopMongod( this.port );
- var end = new Date();
- print( "slowWeekly test: " + this.name + " completed succesfully in " + ( ( end.getTime() - this.start.getTime() ) / 1000 ) + " seconds" );
-};
-
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/smoke.pem b/src/mongo/gotools/test/legacy26/jstests/libs/smoke.pem
deleted file mode 100644
index 0f6deb368c5..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/smoke.pem
+++ /dev/null
@@ -1,50 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDLSU04xAL7eZ/Y
-J3euMDP/Uq7+a65zEIk7wzD2K5Htosbdysn67l8OzVlF2/IcB0/2SLuHHyC7+4pv
-O2+ndtvi6hr9zF4S8Bz0In/UUb+WzhFHuZd0YLl2arhnYMoDUkyLheVqEcDbECgi
-a6i5SNpAff2eUy29FVGwsaUl7+iEHqYxS9Ibmw1CeQYLEOGyhkTI9BjfO/3HwQyW
-FmOJp/IAJUFRCXTgluaMHptaonX5GmRK64wlF8Reu+uyQRdWM0cK9b3AxbBWAAyT
-SLQto+PW1J7QQ95Kn+aJ8nH1Jj80iUAjx2yAGchl1wfSHf5yAAo4OJNXgKUrQHIs
-dofsw/KTAgMBAAECggEBAItF+SX/BJwNw7lvsMsiMz2mBEZCuA4VMjBDlnPRffT1
-JJInsSG91lppzdPS0JjrWZk+U1xLsz2XJEz4x5JQGG3qPfvL3FfVMcEBMdrg9wX2
-wFgHiwAslGPQ0e3hngWQiOi+H2MALsTm2NhcMghfJUgyCWRDUH7O8FzCGIdZSk/Z
-Bx4CvBad+k+OFvUt03gwGtoCn7XneMRVGt04EU/srg0h6C3810k7+OLC1xZc8jaE
-5UAZwKO4pqJn/w0s9T2eAC+b+1YNuUTLvMTdhfH6ZkANxgcfQHWok14iGxCyXMeQ
-dBHeyNTIYKnfpwjFz85LgEvl4gsUTaa/IM0DfGPDOkECgYEA5z8Px0Sh0DSRr6PW
-3Ki9sDtJP5f+x0ARaebOfkscOJ5YvDejIxVNVBi5PYRtfCyLT78AKpRfxtBDQtW1
-w02xqkh/RR/GZm8hLyh/KzroTA3+GQvMqnE1irkJCKEOWwUjZNAFt+kgZIQWCfbn
-V1CjeK9xnEt00Icn7sh1CKubvakCgYEA4QwKZ2zj10i90NqlAAJlj6NTK/h+bHHw
-6VkUUO93GJZ1cC++dVZRhPTqBRdACJSey4nCMFdO3PLwy2gBG9LwU4rcN0Euo2bm
-J2uBBJVoXySE1250vem9I7KAramtTzQuHtIEvYhB3DHY+oYv4Eg6NSB4zAdtDKiV
-iiP23IN0+9sCgYA0KHconQRab+EEWtIVx0GxxE2LOH9Q9dR3rIWa2tossxqUqX/0
-Y9OjSkhN5dbEEVAC1rP05q6Lq2Hga0+qE5YlMGD0eGxJons7pci5OXo33VgY0h6B
-uzM2bPHqrlkMkqYfEQSZLM4PnfNSoAwiF6Anknrvo91fQ3zwUOqE4CAqsQKBgGX2
-a5xShKRcy8ud1JY9f8BlkmBgtP7zXOCMwJyu8nnMaacLqrJFCqg/wuvNjfCVTaEQ
-aFA4rn2DAMBX/fCaUNK5Hm9WdAgKrgp8Nbda7i/1Ps7Qt8n35f8PeCe2sdQp4x+J
-riYlXxmh6BoRxA1NDDpX3QMr9id/FknBY66jTNRzAoGBALab2GqBYInkmPj1nGDA
-f9+VQWFzl98k0PbLQcvKgbWuxLDf/Pz9lBi9tPzhNuTRt9RLuCMc5ZbpPbHPNWI0
-6+zofHTHoW0+prDdtZqpEE/TKmr8emjYMf4CBIKwW3CwbBRLr9C8G01ClTaan2Ge
-LMUhIseBsaQhmkL8n1AyauGL
------END PRIVATE KEY-----
------BEGIN CERTIFICATE-----
-MIIDlzCCAn+gAwIBAgIJAJDxQ4ilLvoVMA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV
-BAYTAlVTMREwDwYDVQQIDAhOZXcgWW9yazERMA8GA1UEBwwITmV3IFlvcmsxDjAM
-BgNVBAoMBTEwZ2VuMR0wGwYJKoZIhvcNAQkBFg50ZXN0QDEwZ2VuLmNvbTAeFw0x
-MjEyMDQxNTA0MDJaFw0xODA1MjcxNTA0MDJaMGIxCzAJBgNVBAYTAlVTMREwDwYD
-VQQIDAhOZXcgWW9yazERMA8GA1UEBwwITmV3IFlvcmsxDjAMBgNVBAoMBTEwZ2Vu
-MR0wGwYJKoZIhvcNAQkBFg50ZXN0QDEwZ2VuLmNvbTCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBAMtJTTjEAvt5n9gnd64wM/9Srv5rrnMQiTvDMPYrke2i
-xt3KyfruXw7NWUXb8hwHT/ZIu4cfILv7im87b6d22+LqGv3MXhLwHPQif9RRv5bO
-EUe5l3RguXZquGdgygNSTIuF5WoRwNsQKCJrqLlI2kB9/Z5TLb0VUbCxpSXv6IQe
-pjFL0hubDUJ5BgsQ4bKGRMj0GN87/cfBDJYWY4mn8gAlQVEJdOCW5owem1qidfka
-ZErrjCUXxF6767JBF1YzRwr1vcDFsFYADJNItC2j49bUntBD3kqf5onycfUmPzSJ
-QCPHbIAZyGXXB9Id/nIACjg4k1eApStAcix2h+zD8pMCAwEAAaNQME4wHQYDVR0O
-BBYEFO6qoBUb1CN4lCkGhaatcjUBKwWmMB8GA1UdIwQYMBaAFO6qoBUb1CN4lCkG
-haatcjUBKwWmMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAGcJdsiI
-JdhJDPkZksOhHZUMMRHLHfWubMGAvuml6hs+SL850DRc+vRP43eF/yz+WbEydkFz
-3qXkQQSG8A2bLOtg0c6Gyi5snUOX0CKcOl3jitgwVkHcdX/v6vbiwALk+r8kJExv
-vpiWIp3nxgLtYVJP/XPoEomEwmu5zWaw28MWXM4XrEjPYmK5ZL16VXXD+lfO0cnT
-2vjkbNK8g7fKaIYYX+cr8GLZi19kO+jUYfhtxQbn8nxUfSjHseAy9BbOLUbGTdAV
-MbGRQveOnFW0eDLjiZffwqCtn91EtYy+vBuYHT/C7Ws4hNwd9lTvmg0SHAm01vi1
-b4fBFFjNvg1wCrU=
------END CERTIFICATE-----
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/test_background_ops.js b/src/mongo/gotools/test/legacy26/jstests/libs/test_background_ops.js
deleted file mode 100644
index 91f50aaa362..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/test_background_ops.js
+++ /dev/null
@@ -1,340 +0,0 @@
-//
-// Utilities related to background operations while other operations are working
-//
-
-/**
- * Allows synchronization between background ops and the test operations
- */
-var waitForLock = function( mongo, name ){
-
- var ts = new ObjectId()
- var lockColl = mongo.getCollection( "config.testLocks" )
-
- lockColl.update({ _id : name, state : 0 }, { $set : { state : 0 } }, true)
-
- //
- // Wait until we can set the state to 1 with our id
- //
-
- var startTime = new Date().getTime()
-
- assert.soon( function() {
- lockColl.update({ _id : name, state : 0 }, { $set : { ts : ts, state : 1 } })
- var gleObj = lockColl.getDB().getLastErrorObj()
-
- if( new Date().getTime() - startTime > 20 * 1000 ){
- print( "Waiting for..." )
- printjson( gleObj )
- printjson( lockColl.findOne() )
- printjson( ts )
- }
-
- return gleObj.n == 1 || gleObj.updatedExisting
- }, "could not acquire lock", 30 * 1000, 100 )
-
- print( "Acquired lock " + tojson( { _id : name, ts : ts } ) + " curr : " +
- tojson( lockColl.findOne({ _id : name }) ) )
-
- // Set the state back to 0
- var unlock = function(){
- print( "Releasing lock " + tojson( { _id : name, ts : ts } ) + " curr : " +
- tojson( lockColl.findOne({ _id : name }) ) )
- lockColl.update({ _id : name, ts : ts }, { $set : { state : 0 } })
- }
-
- // Return an object we can invoke unlock on
- return { unlock : unlock }
-}
-
-/**
- * Allows a test or background op to say it's finished
- */
-var setFinished = function( mongo, name, finished ){
- if( finished || finished == undefined )
- mongo.getCollection( "config.testFinished" ).update({ _id : name }, { _id : name }, true )
- else
- mongo.getCollection( "config.testFinished" ).remove({ _id : name })
-}
-
-/**
- * Checks whether a test or background op is finished
- */
-var isFinished = function( mongo, name ){
- return mongo.getCollection( "config.testFinished" ).findOne({ _id : name }) != null
-}
-
-/**
- * Sets the result of a background op
- */
-var setResult = function( mongo, name, result, err ){
- mongo.getCollection( "config.testResult" ).update({ _id : name }, { _id : name, result : result, err : err }, true )
-}
-
-/**
- * Gets the result for a background op
- */
-var getResult = function( mongo, name ){
- return mongo.getCollection( "config.testResult" ).findOne({ _id : name })
-}
-
-/**
- * Overrides the parallel shell code in mongo
- */
-function startParallelShell( jsCode, port ){
-
- var x;
- if ( port ) {
- x = startMongoProgramNoConnect( "mongo" , "--port" , port , "--eval" , jsCode );
- } else {
- x = startMongoProgramNoConnect( "mongo" , "--eval" , jsCode , db ? db.getMongo().host : null );
- }
-
- return function(){
- jsTestLog( "Waiting for shell " + x + "..." )
- waitProgram( x );
- jsTestLog( "Shell " + x + " finished." )
- };
-}
-
-startParallelOps = function( mongo, proc, args, context ){
-
- var procName = proc.name + "-" + new ObjectId()
- var seed = new ObjectId( new ObjectId().valueOf().split("").reverse().join("") )
- .getTimestamp().getTime()
-
- // Make sure we aren't finished before we start
- setFinished( mongo, procName, false )
- setResult( mongo, procName, undefined, undefined )
-
- // TODO: Make this a context of its own
- var procContext = { procName : procName,
- seed : seed,
- waitForLock : waitForLock,
- setFinished : setFinished,
- isFinished : isFinished,
- setResult : setResult,
-
- setup : function( context, stored ){
-
- waitForLock = function(){
- return context.waitForLock( db.getMongo(), context.procName )
- }
- setFinished = function( finished ){
- return context.setFinished( db.getMongo(), context.procName, finished )
- }
- isFinished = function(){
- return context.isFinished( db.getMongo(), context.procName )
- }
- setResult = function( result, err ){
- return context.setResult( db.getMongo(), context.procName, result, err )
- }
- }}
-
- var bootstrapper = function( stored ){
-
- var procContext = stored.procContext
- procContext.setup( procContext, stored )
-
- var contexts = stored.contexts
- eval( "contexts = " + contexts )
-
- for( var i = 0; i < contexts.length; i++ ){
- if( typeof( contexts[i] ) != "undefined" ){
- // Evaluate all contexts
- contexts[i]( procContext )
- }
- }
-
- var operation = stored.operation
- eval( "operation = " + operation )
-
- var args = stored.args
- eval( "args = " + args )
-
- result = undefined
- err = undefined
-
- try{
- result = operation.apply( null, args )
- }
- catch( e ){
- err = e
- }
-
- setResult( result, err )
- }
-
- var contexts = [ RandomFunctionContext, context ]
-
- var testDataColl = mongo.getCollection( "config.parallelTest" )
-
- testDataColl.insert({ _id : procName,
- bootstrapper : tojson( bootstrapper ),
- operation : tojson( proc ),
- args : tojson( args ),
- procContext : procContext,
- contexts : tojson( contexts ) })
-
- assert.eq( null, testDataColl.getDB().getLastError() )
-
- var bootstrapStartup =
- "{ var procName = '" + procName + "'; " +
- "var stored = db.getMongo().getCollection( '" + testDataColl + "' )" +
- ".findOne({ _id : procName }); " +
- "var bootstrapper = stored.bootstrapper; " +
- "eval( 'bootstrapper = ' + bootstrapper ); " +
- "bootstrapper( stored ); " +
- "}"
-
-
- var oldDB = db
- db = mongo.getDB( "test" )
-
- jsTest.log( "Starting " + proc.name + " operations..." )
-
- var rawJoin = startParallelShell( bootstrapStartup )
-
- db = oldDB
-
-
- var join = function(){
- setFinished( mongo, procName, true )
-
- rawJoin();
- result = getResult( mongo, procName )
-
- assert.neq( result, null )
-
- if( result.err ) throw "Error in parallel ops " + procName + " : "
- + tojson( result.err )
-
- else return result.result
- }
-
- join.isFinished = function(){
- return isFinished( mongo, procName )
- }
-
- join.setFinished = function( finished ){
- return setFinished( mongo, procName, finished )
- }
-
- join.waitForLock = function( name ){
- return waitForLock( mongo, name )
- }
-
- return join
-}
-
-var RandomFunctionContext = function( context ){
-
- Random.srand( context.seed );
-
- Random.randBool = function(){ return Random.rand() > 0.5 }
-
- Random.randInt = function( min, max ){
-
- if( max == undefined ){
- max = min
- min = 0
- }
-
- return min + Math.floor( Random.rand() * max )
- }
-
- Random.randShardKey = function(){
-
- var numFields = 2 //Random.randInt(1, 3)
-
- var key = {}
- for( var i = 0; i < numFields; i++ ){
- var field = String.fromCharCode( "a".charCodeAt() + i )
- key[ field ] = 1
- }
-
- return key
- }
-
- Random.randShardKeyValue = function( shardKey ){
-
- var keyValue = {}
- for( field in shardKey ){
- keyValue[ field ] = Random.randInt(1, 100)
- }
-
- return keyValue
- }
-
- Random.randCluster = function(){
-
- var numShards = 2 //Random.randInt( 1, 10 )
- var rs = false //Random.randBool()
- var st = new ShardingTest({ shards : numShards,
- mongos : 4,
- other : { separateConfig : true, rs : rs } })
-
- return st
- }
-}
-
-
-//
-// Some utility operations
-//
-
-function moveOps( collName, options ){
-
- options = options || {}
-
- var admin = db.getMongo().getDB( "admin" )
- var config = db.getMongo().getDB( "config" )
- var shards = config.shards.find().toArray()
- var shardKey = config.collections.findOne({ _id : collName }).key
-
- while( ! isFinished() ){
-
- var findKey = Random.randShardKeyValue( shardKey )
- var toShard = shards[ Random.randInt( shards.length ) ]._id
-
- try {
- printjson( admin.runCommand({ moveChunk : collName,
- find : findKey,
- to : toShard }) )
- }
- catch( e ){
- printjson( e )
- }
-
- sleep( 1000 )
- }
-
- jsTest.log( "Stopping moveOps..." )
-}
-
-function splitOps( collName, options ){
-
- options = options || {}
-
- var admin = db.getMongo().getDB( "admin" )
- var config = db.getMongo().getDB( "config" )
- var shards = config.shards.find().toArray()
- var shardKey = config.collections.findOne({ _id : collName }).key
-
- while( ! isFinished() ){
-
- var middleKey = Random.randShardKeyValue( shardKey )
-
- try {
- printjson( admin.runCommand({ split : collName,
- middle : middleKey }) )
- }
- catch( e ){
- printjson( e )
- }
-
- sleep( 1000 )
- }
-
- jsTest.log( "Stopping splitOps..." )
-}
-
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/testconfig b/src/mongo/gotools/test/legacy26/jstests/libs/testconfig
deleted file mode 100644
index 0c1fc871d61..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/testconfig
+++ /dev/null
@@ -1,4 +0,0 @@
-fastsync = true
-#comment line
-#commentedflagwithan = false
-version = false
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/testconfig.json b/src/mongo/gotools/test/legacy26/jstests/libs/testconfig.json
deleted file mode 100644
index 5af32aad7d3..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/testconfig.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "fastsync" : true,
- "version" : false
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/trace_missing_docs.js b/src/mongo/gotools/test/legacy26/jstests/libs/trace_missing_docs.js
deleted file mode 100644
index 3faf50b4606..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/trace_missing_docs.js
+++ /dev/null
@@ -1,90 +0,0 @@
-
-//
-// On error inserting documents, traces back and shows where the document was dropped
-//
-
-function traceMissingDoc( coll, doc, mongos ) {
-
- if (mongos) coll = mongos.getCollection(coll + "");
- else mongos = coll.getMongo();
-
- var config = mongos.getDB( "config" );
- var shards = config.shards.find().toArray();
- for ( var i = 0; i < shards.length; i++ ) {
- shards[i].conn = new Mongo( shards[i].host );
- }
-
- var shardKeyPatt = config.collections.findOne({ _id : coll + "" }).key;
-
- // Project out the shard key
- var shardKey = {};
- for ( var k in shardKeyPatt ) {
- if ( doc[k] == undefined ) {
- jsTest.log( "Shard key " + tojson( shardKey ) +
- " not found in doc " + tojson( doc ) +
- ", falling back to _id search..." );
- shardKeyPatt = { _id : 1 };
- shardKey = { _id : doc['_id'] };
- break;
- }
- shardKey[k] = doc[k];
- }
-
- if ( doc['_id'] == undefined ) {
- jsTest.log( "Id not found in doc " + tojson( doc ) + " cannot trace oplog entries." );
- return;
- }
-
- jsTest.log( "Using shard key : " + tojson( shardKey ) );
-
- var allOps = [];
- for ( var i = 0; i < shards.length; i++ ) {
-
- var oplog = shards[i].conn.getCollection( "local.oplog.rs" );
- if ( !oplog.findOne() ) {
- oplog = shards[i].conn.getCollection( "local.oplog.$main" );
- }
-
- if ( !oplog.findOne() ) {
- jsTest.log( "No oplog was found on shard " + shards[i]._id );
- continue;
- }
-
- var addKeyQuery = function( query, prefix ) {
- for ( var k in shardKey ) {
- query[prefix + '.' + k] = shardKey[k];
- }
- return query;
- };
-
- var addToOps = function( cursor ) {
- cursor.forEach( function( doc ) {
- doc.shard = shards[i]._id;
- doc.realTime = new Date( doc.ts.getTime() * 1000 );
- allOps.push( doc );
- });
- };
-
- // Find ops
- addToOps( oplog.find( addKeyQuery( { op : 'i' }, 'o' ) ) );
- var updateQuery = { $or : [ addKeyQuery( { op : 'u' }, 'o2' ),
- { op : 'u', 'o2._id' : doc['_id'] } ] };
- addToOps( oplog.find( updateQuery ) );
- addToOps( oplog.find({ op : 'd', 'o._id' : doc['_id'] }) );
- }
-
- var compareOps = function( opA, opB ) {
- if ( opA.ts < opB.ts ) return -1;
- if ( opB.ts < opA.ts ) return 1;
- else return 0;
- }
-
- allOps.sort( compareOps );
-
- print( "Ops found for doc " + tojson( doc ) + " on each shard:\n" );
- for ( var i = 0; i < allOps.length; i++ ) {
- printjson( allOps[i] );
- }
-
- return allOps;
-} \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy26/jstests/libs/use_extended_timeout.js b/src/mongo/gotools/test/legacy26/jstests/libs/use_extended_timeout.js
deleted file mode 100644
index 7f770249214..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/libs/use_extended_timeout.js
+++ /dev/null
@@ -1,12 +0,0 @@
-var _orig_runMongoProgram = runMongoProgram;
-runMongoProgram = function() {
- var args = [];
- for (var i in arguments) {
- args[i] = arguments[i];
- }
- var progName = args[0];
- if (progName !== "bsondump" && args.indexOf("--dialTimeout") === -1) {
- args.push("--dialTimeout", "30");
- }
- return _orig_runMongoProgram.apply(null, args);
-};
diff --git a/src/mongo/gotools/test/legacy26/jstests/misc/biginsert.js b/src/mongo/gotools/test/legacy26/jstests/misc/biginsert.js
deleted file mode 100755
index ebbdc18ba3e..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/misc/biginsert.js
+++ /dev/null
@@ -1,18 +0,0 @@
-o = "xxxxxxxxxxxxxxxxxxx";
-o = o + o;
-o + o;
-o = o + o;
-o = o + o;
-o = o + o;
-
-var B = 40000;
-var last = new Date();
-for (i = 0; i < 30000000; i++) {
- db.foo.insert({ o: o });
- if (i % B == 0) {
- var n = new Date();
- print(i);
- print("per sec: " + B*1000 / (n - last));
- last = n;
- }
-}
diff --git a/src/mongo/gotools/test/legacy26/jstests/replsets/rslib.js b/src/mongo/gotools/test/legacy26/jstests/replsets/rslib.js
deleted file mode 100644
index 6a16db232e4..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/replsets/rslib.js
+++ /dev/null
@@ -1,115 +0,0 @@
-
-var count = 0;
-var w = 0;
-
-var wait = function(f,msg) {
- w++;
- var n = 0;
- while (!f()) {
- if( n % 4 == 0 )
- print("waiting " + w);
- if (++n == 4) {
- print("" + f);
- }
- assert(n < 200, 'tried 200 times, giving up on ' + msg );
- sleep(1000);
- }
-};
-
-/**
- * Use this to do something once every 4 iterations.
- *
- * <pre>
- * for (i=0; i<1000; i++) {
- * occasionally(function() { print("4 more iterations"); });
- * }
- * </pre>
- */
-var occasionally = function(f, n) {
- var interval = n || 4;
- if (count % interval == 0) {
- f();
- }
- count++;
-};
-
-var reconnect = function(a) {
- wait(function() {
- try {
- // make this work with either dbs or connections
- if (typeof(a.getDB) == "function") {
- db = a.getDB('foo');
- }
- else {
- db = a;
- }
- db.bar.stats();
- if (jsTest.options().keyFile || jsTest.options().useX509) { // SERVER-4241: Shell connections don't re-authenticate on reconnect
- return jsTest.authenticate(db.getMongo());
- }
- return true;
- } catch(e) {
- print(e);
- return false;
- }
- });
-};
-
-
-var getLatestOp = function(server) {
- server.getDB("admin").getMongo().setSlaveOk();
- var log = server.getDB("local")['oplog.rs'];
- var cursor = log.find({}).sort({'$natural': -1}).limit(1);
- if (cursor.hasNext()) {
- return cursor.next();
- }
- return null;
-};
-
-
-var waitForAllMembers = function(master, timeout) {
- var failCount = 0;
-
- assert.soon( function() {
- var state = null
- try {
- state = master.getSisterDB("admin").runCommand({replSetGetStatus:1});
- failCount = 0;
- } catch ( e ) {
- // Connection can get reset on replica set failover causing a socket exception
- print( "Calling replSetGetStatus failed" );
- print( e );
- return false;
- }
- occasionally(function() { printjson(state); }, 10);
-
- for (var m in state.members) {
- if (state.members[m].state != 1 && // PRIMARY
- state.members[m].state != 2 && // SECONDARY
- state.members[m].state != 7) { // ARBITER
- return false;
- }
- }
- printjson( state );
- return true;
- }, "not all members ready", timeout || 60000);
-
- print( "All members are now in state PRIMARY, SECONDARY, or ARBITER" );
-};
-
-var reconfig = function(rs, config) {
- var admin = rs.getPrimary().getDB("admin");
-
- try {
- var ok = admin.runCommand({replSetReconfig : config});
- assert.eq(ok.ok,1);
- }
- catch(e) {
- print(e);
- }
-
- master = rs.getPrimary().getDB("admin");
- waitForAllMembers(master);
-
- return master;
-};
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/csv1.js b/src/mongo/gotools/test/legacy26/jstests/tool/csv1.js
deleted file mode 100644
index 5eb7ab0249a..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/csv1.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// csv1.js
-
-t = new ToolTest( "csv1" )
-
-c = t.startDB( "foo" );
-
-base = { a : 1 , b : "foo,bar\"baz,qux" , c: 5, 'd d': -6 , e: '-', f : "."};
-
-assert.eq( 0 , c.count() , "setup1" );
-c.insert( base );
-delete base._id
-assert.eq( 1 , c.count() , "setup2" );
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c,d d,e,f" )
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop" )
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c,d d,e,f" );
-assert.soon( "2 == c.count()" , "restore 2" );
-
-a = c.find().sort( { a : 1 } ).toArray();
-delete a[0]._id
-delete a[1]._id
-assert.eq( tojson( { a : "a" , b : "b" , c : "c" , 'd d': "d d", e: 'e', f : "f"} ) , tojson( a[1] ) , "csv parse 1" );
-assert.eq( tojson( base ) , tojson(a[0]) , "csv parse 0" )
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop 2" )
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-
-x = c.findOne()
-delete x._id;
-assert.eq( tojson( base ) , tojson(x) , "csv parse 2" )
-
-
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/csvexport1.js b/src/mongo/gotools/test/legacy26/jstests/tool/csvexport1.js
deleted file mode 100644
index 2cd3c9c0447..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/csvexport1.js
+++ /dev/null
@@ -1,65 +0,0 @@
-// csvexport1.js
-
-
-t = new ToolTest( "csvexport1" )
-
-c = t.startDB( "foo" );
-
-assert.eq( 0 , c.count() , "setup1" );
-
-objId = ObjectId()
-
-c.insert({ a : new NumberInt(1) , b : objId , c: [1, 2, 3], d : {a : "hello", b : "world"} , e: '-'})
-c.insert({ a : -2.0, c : MinKey, d : "Then he said, \"Hello World!\"", e : new NumberLong(3)})
-c.insert({ a : new BinData(0, "1234"), b : ISODate("2009-08-27T12:34:56.789"),
- c : new Timestamp(1234, 9876), d : /foo*\"bar\"/i,
- e : function foo() { print("Hello World!"); }})
-
-assert.eq( 3 , c.count() , "setup2" );
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b,c,d,e")
-
-
-c.drop()
-
-assert.eq( 0 , c.count() , "after drop" )
-
-t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
-
-assert.soon ( 3 + " == c.count()", "after import");
-
-// Note: Exporting and Importing to/from CSV is not designed to be round-trippable
-expected = []
-expected.push({ a : 1, b : "ObjectId(" + objId.valueOf() + ")", c : [ 1, 2, 3 ], d : { "a" : "hello", "b" : "world" }, e : "-"})
-expected.push({ a : -2.0, b : "", c : "$MinKey", d : "Then he said, \"Hello World!\"", e : 3})
-// "t" should be 1234, but the shell interprets the first field of timestamps as milliseconds while
-// they are stored as seconds. See SERVER-7718.
-expected.push({ a : "D76DF8", b : "2009-08-27T12:34:56.789Z",
- c : { "$timestamp" : { "t" : 1234, "i" : 9876 } },
- d : "/foo*\\\"bar\\\"/i", e : tojson(function foo() { print("Hello World!"); })})
-
-actual = []
-actual.push(c.find({a : 1}).toArray()[0]);
-actual.push(c.find({a : -2.0}).toArray()[0]);
-actual.push(c.find({a : "D76DF8"}).toArray()[0]);
-
-for (i = 0; i < expected.length; i++) {
- delete actual[i]._id
- assert.eq(Object.keys(expected[i]).length, Object.keys(actual[i]).length)
- keys = Object.keys(expected[i])
- for(var j=0;j<keys.length;j++){
- expectedVal = expected[i][keys[j]]
- if((typeof expectedVal)== "object"){
- // For fields which contain arrays or objects, they have been
- // exported as JSON - parse the JSON in the output and verify
- // that it matches the original document's value
- assert.docEq(expectedVal, JSON.parse(actual[i][keys[j]]), "CSV export " + i)
- }else{
- // Otherwise just compare the values directly
- assert.eq(expectedVal, actual[i][keys[j]], "CSV export " + i)
- }
- }
-}
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/csvexport2.js b/src/mongo/gotools/test/legacy26/jstests/tool/csvexport2.js
deleted file mode 100644
index 3e0dd2c6829..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/csvexport2.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// csvexport2.js
-
-t = new ToolTest( "csvexport2" )
-
-c = t.startDB( "foo" );
-
-// This test is designed to test exporting of a CodeWithScope object.
-// However, due to SERVER-3391, it is not possible to create a CodeWithScope object in the mongo shell,
-// therefore this test does not work. Once SERVER-3391 is resolved, this test should be un-commented out
-
-//assert.eq( 0 , c.count() , "setup1" );
-
-//c.insert({ a : 1 , b : Code("print(\"Hello \" + x);", {"x" : "World!"})})
-//assert.eq( 1 , c.count() , "setup2" );
-//t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b")
-
-
-//c.drop()
-
-//assert.eq( 0 , c.count() , "after drop" )
-//t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
-//assert.soon ( 1 + " == c.count()", "after import");
-
-//expected = { a : 1, b : "\"{ \"$code\" : print(\"Hello \" + x); , \"$scope\" : { \"x\" : \"World!\" } }"};
-//actual = c.findOne()
-
-//delete actual._id;
-//assert.eq( expected, actual );
-
-
-t.stop() \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/csvimport1.js b/src/mongo/gotools/test/legacy26/jstests/tool/csvimport1.js
deleted file mode 100644
index 3bff1110cbe..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/csvimport1.js
+++ /dev/null
@@ -1,40 +0,0 @@
-// csvimport1.js
-
-t = new ToolTest( "csvimport1" )
-
-c = t.startDB( "foo" );
-
-base = []
-base.push({ a : 1, b : "this is some text.\nThis text spans multiple lines, and just for fun\ncontains a comma", "c" : "This has leading and trailing whitespace!" })
-base.push({a : 2, b : "When someone says something you \"put it in quotes\"", "c" : "I like embedded quotes/slashes\\backslashes" })
-base.push({a : 3, b : " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", "c" : "" })
-base.push({a : 4, b : "", "c" : "How are empty entries handled?" })
-base.push({a : 5, b : "\"\"", c : "\"This string is in quotes and contains empty quotes (\"\")\""})
-base.push({ a : "a" , b : "b" , c : "c"})
-
-assert.eq( 0 , c.count() , "setup" );
-
-t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c" );
-assert.soon( base.length + " == c.count()" , "after import 1 " );
-
-a = c.find().sort( { a : 1 } ).toArray();
-for (i = 0; i < base.length; i++ ) {
- delete a[i]._id
- assert.eq( tojson(base[i]), tojson(a[i]), "csv parse " + i)
-}
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop" )
-
-t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( base.length - 1 , c.count() , "after import 2" );
-
-x = c.find().sort( { a : 1 } ).toArray();
-for (i = 0; i < base.length - 1; i++ ) {
- delete x[i]._id
- assert.eq( tojson(base[i]), tojson(x[i]), "csv parse with headerline " + i)
-}
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/data/a.tsv b/src/mongo/gotools/test/legacy26/jstests/tool/data/a.tsv
deleted file mode 100644
index 1e094179a63..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/data/a.tsv
+++ /dev/null
@@ -1,2 +0,0 @@
-a b c d e
- 1 foobar 5 -6
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/data/csvimport1.csv b/src/mongo/gotools/test/legacy26/jstests/tool/data/csvimport1.csv
deleted file mode 100644
index 256d40a9184..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/data/csvimport1.csv
+++ /dev/null
@@ -1,8 +0,0 @@
-a,b,c
-1,"this is some text.
-This text spans multiple lines, and just for fun
-contains a comma", "This has leading and trailing whitespace!"
-2, "When someone says something you ""put it in quotes""", I like embedded quotes/slashes\backslashes
- 3 , " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", ""
- "4" ,, How are empty entries handled?
-"5","""""", """This string is in quotes and contains empty quotes ("""")"""
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/data/dumprestore6/foo.bson b/src/mongo/gotools/test/legacy26/jstests/tool/data/dumprestore6/foo.bson
deleted file mode 100644
index b8f8f99e6bf..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/data/dumprestore6/foo.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/data/dumprestore6/system.indexes.bson b/src/mongo/gotools/test/legacy26/jstests/tool/data/dumprestore6/system.indexes.bson
deleted file mode 100644
index dde25da302a..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/data/dumprestore6/system.indexes.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumpauth.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumpauth.js
deleted file mode 100644
index 2a2d613b708..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumpauth.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// dumpauth.js
-// test mongodump with authentication
-port = allocatePorts( 1 )[ 0 ];
-baseName = "tool_dumpauth";
-
-m = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
-db = m.getDB( "admin" );
-
-t = db[ baseName ];
-t.drop();
-
-for(var i = 0; i < 100; i++) {
- t["testcol"].save({ "x": i });
-}
-
-db.createUser({user: "testuser" , pwd: "testuser", roles: jsTest.adminUserRoles});
-
-assert( db.auth( "testuser" , "testuser" ) , "auth failed" );
-
-x = runMongoProgram( "mongodump",
- "--db", baseName,
- "--authenticationDatabase=admin",
- "-u", "testuser",
- "-p", "testuser",
- "-h", "127.0.0.1:"+port,
- "--collection", "testcol" );
-assert.eq(x, 0, "mongodump should succeed with authentication");
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumpfilename1.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumpfilename1.js
deleted file mode 100644
index fbe24551929..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumpfilename1.js
+++ /dev/null
@@ -1,14 +0,0 @@
-//dumpfilename1.js
-
-//Test designed to make sure error that dumping a collection with "/" fails
-
-t = new ToolTest( "dumpfilename1" );
-
-t.startDB( "foo" );
-
-c = t.db;
-c.getCollection("df/").insert({ a: 3 })
-assert(c.getCollection("df/").count() > 0) // check write worked
-assert(t.runTool( "dump" , "--out" , t.ext ) != 0, "dump should fail with non-zero return code")
-t.stop();
-
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore1.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore1.js
deleted file mode 100644
index fd1e8789ea6..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore1.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// dumprestore1.js
-
-t = new ToolTest( "dumprestore1" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-c.save( { a : 22 } );
-assert.eq( 1 , c.count() , "setup2" );
-
-t.runTool( "dump" , "--out" , t.ext );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" );
-
-t.runTool( "restore" , "--dir" , t.ext );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-assert.eq( 22 , c.findOne().a , "after restore 2" );
-
-// ensure that --collection is used with --db. See SERVER-7721
-var ret = t.runTool( "dump" , "--collection" , "col" );
-assert.neq( ret, 0, "mongodump should return failure code" );
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore10.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore10.js
deleted file mode 100644
index 49f008ea591..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore10.js
+++ /dev/null
@@ -1,63 +0,0 @@
-// simple test to ensure write concern functions as expected
-
-var name = "dumprestore10";
-
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-step();
-
-var replTest = new ReplSetTest( {name: name, nodes: 2} );
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-var total = 1000;
-
-{
- step("store data");
- var foo = master.getDB("foo");
- for (i = 0; i < total; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-
-{
- step("wait");
- replTest.awaitReplication();
-}
-
-step("mongodump from replset");
-
-var data = MongoRunner.dataDir + "/dumprestore10-dump1/";
-
-runMongoProgram( "mongodump", "--host", "127.0.0.1:"+replTest.ports[0], "--out", data );
-
-
-{
- step("remove data after dumping");
- master.getDB("foo").getCollection("bar").drop();
-}
-
-{
- step("wait");
- replTest.awaitReplication();
-}
-
-step("try mongorestore with write concern");
-
-runMongoProgram( "mongorestore", "--writeConcern", "2", "--host", "127.0.0.1:"+replTest.ports[0], "--dir", data );
-
-var x = 0;
-
-// no waiting for replication
-x = master.getDB("foo").getCollection("bar").count();
-
-assert.eq(x, total, "mongorestore should have successfully restored the collection");
-
-step("stopSet");
-replTest.stopSet();
-
-step("SUCCESS");
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore3.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore3.js
deleted file mode 100644
index f1e5941cbd0..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore3.js
+++ /dev/null
@@ -1,60 +0,0 @@
-// dumprestore3.js
-
-var name = "dumprestore3";
-
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-step();
-
-var replTest = new ReplSetTest( {name: name, nodes: 2} );
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-
-{
- step("populate master");
- var foo = master.getDB("foo");
- for (i = 0; i < 20; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-
-{
- step("wait for slaves");
- replTest.awaitReplication();
-}
-
-{
- step("dump & restore a db into a slave");
- var port = 30020;
- var conn = startMongodTest(port, name + "-other");
- var c = conn.getDB("foo").bar;
- c.save({ a: 22 });
- assert.eq(1, c.count(), "setup2");
-}
-
-step("try mongorestore to slave");
-
-var data = MongoRunner.dataDir + "/dumprestore3-other1/";
-resetDbpath(data);
-runMongoProgram( "mongodump", "--host", "127.0.0.1:"+port, "--out", data );
-
-var x = runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+replTest.ports[1], "--dir", data );
-assert.eq(x, 1, "mongorestore should exit w/ -1 on slave");
-
-step("try mongoimport to slave");
-
-dataFile = MongoRunner.dataDir + "/dumprestore3-other2.json";
-runMongoProgram( "mongoexport", "--host", "127.0.0.1:"+port, "--out", dataFile, "--db", "foo", "--collection", "bar" );
-
-x = runMongoProgram( "mongoimport", "--host", "127.0.0.1:"+replTest.ports[1], "--file", dataFile );
-assert.eq(x, 1, "mongoreimport should exit w/ 1 on slave"); // windows return is signed
-
-step("stopSet");
-replTest.stopSet();
-
-step("SUCCESS");
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore4.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore4.js
deleted file mode 100644
index 568e196061f..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore4.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// dumprestore4.js -- see SERVER-2186
-
-// The point of this test is to ensure that mongorestore successfully
-// constructs indexes when the database being restored into has a
-// different name than the database dumped from. There are 2
-// issues here: (1) if you dumped from database "A" and restore into
-// database "B", B should have exactly the right indexes; (2) if for
-// some reason you have another database called "A" at the time of the
-// restore, mongorestore shouldn't touch it.
-
-t = new ToolTest( "dumprestore4" );
-
-c = t.startDB( "dumprestore4" );
-
-db=t.db
-
-dbname = db.getName();
-dbname2 = "NOT_"+dbname;
-
-db2=db.getSisterDB( dbname2 );
-
-db.dropDatabase(); // make sure it's empty
-db2.dropDatabase(); // make sure everybody's empty
-
-assert.eq( 0 , db.system.indexes.count() , "setup1" );
-c.ensureIndex({ x : 1} );
-assert.eq( 2 , db.system.indexes.count() , "setup2" ); // _id and x_1
-
-assert.eq( 0, t.runTool( "dump" , "-d" , dbname, "--out", t.ext ), "dump")
-
-// to ensure issue (2), we have to clear out the first db.
-// By inspection, db.dropIndexes() doesn't get rid of the _id index on c,
-// so we have to drop the collection.
-c.drop();
-assert.eq( 0, t.runTool( "restore" , "--dir" , t.ext + "/" + dbname, "-d", dbname2 ), "restore" );
-
-// issue (1)
-assert.eq( 2 , db2.system.indexes.count() , "after restore 1" );
-// issue (2)
-assert.eq( 0 , db.system.indexes.count() , "after restore 2" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore6.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore6.js
deleted file mode 100644
index d8b349e9589..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore6.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// dumprestore6.js
-// Test restoring from a dump with an old index version
-
-t = new ToolTest( "dumprestore6" );
-
-c = t.startDB( "foo" );
-db = t.db
-assert.eq( 0 , c.count() , "setup1" );
-
-t.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6")
-
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore" );
-assert.eq( 1 , db.system.indexes.findOne({name:'a_1'}).v, "index version wasn't updated")
-assert.eq( 1, c.count({v:0}), "dropped the 'v' field from a non-index collection")
-
-db.dropDatabase()
-assert.eq( 0 , c.count() , "after drop" );
-
-t.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6", "--keepIndexVersion")
-
-assert.soon( "c.findOne()" , "no data after sleep2" );
-assert.eq( 1 , c.count() , "after restore2" );
-assert.eq( 0 , db.system.indexes.findOne({name:'a_1'}).v, "index version wasn't maintained")
-assert.eq( 1, c.count({v:0}), "dropped the 'v' field from a non-index collection")
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore7.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore7.js
deleted file mode 100644
index a71725f434b..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore7.js
+++ /dev/null
@@ -1,66 +0,0 @@
-var name = "dumprestore7";
-
-
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-step();
-
-var replTest = new ReplSetTest( {name: name, nodes: 1} );
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-
-{
- step("first chunk of data");
- var foo = master.getDB("foo");
- for (i = 0; i < 20; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-
-{
- step("wait");
- replTest.awaitReplication();
- var time = replTest.getPrimary().getDB("local").getCollection("oplog.rs").find().limit(1).sort({$natural:-1}).next();
- step(time.ts.t);
-}
-
-{
- step("second chunk of data");
- var foo = master.getDB("foo");
- for (i = 30; i < 50; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-{
- var port = 30020;
- var conn = startMongodTest(port, name + "-other");
-}
-
-step("try mongodump with $timestamp");
-
-var data = MongoRunner.dataDir + "/dumprestore7-dump1/";
-var query = "{\"ts\":{\"$gt\":{\"$timestamp\":{\"t\":"+ time.ts.t + ",\"i\":" + time.ts.i +"}}}}";
-
-MongoRunner.runMongoTool( "mongodump",
- { "host": "127.0.0.1:"+replTest.ports[0],
- "db": "local", "collection": "oplog.rs",
- "query": query, "out": data });
-
-step("try mongorestore from $timestamp");
-
-runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+port, "--dir", data );
-var x = 9;
-x = conn.getDB("local").getCollection("oplog.rs").count();
-
-assert.eq(x, 20, "mongorestore should only have the latter 20 entries");
-
-step("stopSet");
-replTest.stopSet();
-
-step("SUCCESS");
-
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore8.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore8.js
deleted file mode 100644
index 4e6591738d6..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore8.js
+++ /dev/null
@@ -1,105 +0,0 @@
-// dumprestore8.js
-
-// This file tests that indexes and capped collection options get properly dumped and restored.
-// It checks that this works both when doing a full database dump/restore and when doing it just for a single db or collection
-
-t = new ToolTest( "dumprestore8" );
-
-t.startDB( "foo" );
-db = t.db;
-
-dbname = db.getName();
-dbname2 = "NOT_"+dbname;
-
-db.dropDatabase();
-
-assert.eq( 0 , db.foo.count() , "setup1" );
-db.foo.save( { a : 1, b : 1 } );
-db.foo.ensureIndex({a:1});
-db.foo.ensureIndex({b:1, _id:-1});
-assert.eq( 1 , db.foo.count() , "setup2" );
-
-
-assert.eq( 0 , db.bar.count() , "setup3" );
-db.createCollection("bar", {capped:true, size:1000});
-
-for (var i = 0; i < 1000; i++) {
- db.bar.save( { x : i } );
-}
-db.bar.ensureIndex({x:1});
-
-barDocCount = db.bar.count();
-assert.gt( barDocCount, 0 , "No documents inserted" );
-assert.lt( db.bar.count(), 1000 , "Capped collection didn't evict documents" );
-assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created right" );
-
-
-// Full dump/restore
-
-t.runTool( "dump" , "--out" , t.ext );
-
-db.dropDatabase();
-assert.eq( 0 , db.foo.count() , "foo not dropped" );
-assert.eq( 0 , db.bar.count() , "bar not dropped" );
-assert.eq( 0 , db.system.indexes.count() , "indexes not dropped" );
-
-t.runTool( "restore" , "--dir" , t.ext );
-
-assert.soon( "db.foo.findOne()" , "no data after sleep" );
-assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo" );
-assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar" );
-for (var i = 0; i < 10; i++) {
- db.bar.save({x:i});
-}
-assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore." );
-assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created correctly by restore" );
-
-
-// Dump/restore single DB
-
-dumppath = t.ext + "singledbdump/";
-mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
-
-db.dropDatabase();
-assert.eq( 0 , db.foo.count() , "foo not dropped2" );
-assert.eq( 0 , db.bar.count() , "bar not dropped2" );
-assert.eq( 0 , db.system.indexes.count() , "indexes not dropped2" );
-
-t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname );
-
-db = db.getSiblingDB(dbname2);
-
-assert.soon( "db.foo.findOne()" , "no data after sleep 2" );
-assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo 2" );
-assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar 2" );
-for (var i = 0; i < 10; i++) {
- db.bar.save({x:i});
-}
-assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore 2." );
-assert.eq( 5 , db.system.indexes.count() , "Indexes weren't created correctly by restore 2" );
-
-
-// Dump/restore single collection
-
-dumppath = t.ext + "singlecolldump/";
-mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname2, "-c", "bar", "--out" , dumppath );
-
-db.dropDatabase();
-assert.eq( 0 , db.bar.count() , "bar not dropped3" );
-assert.eq( 0 , db.system.indexes.count() , "indexes not dropped3" );
-
-t.runTool( "restore" , "-d", dbname, "-c", "baz", "--dir" , dumppath + dbname2 + "/bar.bson" );
-
-db = db.getSiblingDB(dbname);
-
-assert.soon( "db.baz.findOne()" , "no data after sleep 2" );
-assert.eq( barDocCount, db.baz.count(), "wrong number of docs restored to bar 2" );
-for (var i = 0; i < 10; i++) {
- db.baz.save({x:i});
-}
-assert.eq( barDocCount, db.baz.count(), "Capped collection didn't evict documents after restore 3." );
-assert.eq( 2 , db.system.indexes.count() , "Indexes weren't created correctly by restore 3" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore9.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore9.js
deleted file mode 100644
index 4bbb2fc18b1..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore9.js
+++ /dev/null
@@ -1,79 +0,0 @@
-if (0) { // Test disabled until SERVER-3853 is finished.
-var name = "dumprestore9";
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-s = new ShardingTest( "dumprestore9a", 2, 0, 3, {chunksize:1} );
-
-step("Shard collection");
-
-s.adminCommand( { enablesharding : "aaa" } ); // Make this db alphabetically before 'config' so it gets restored first
-s.adminCommand( { shardcollection : "aaa.foo" , key : { x : 1 } } );
-
-db = s.getDB( "aaa" );
-coll = db.foo;
-
-step("insert data");
-
-str = 'a';
-while (str.length < 1024*512) {
- str += str;
-}
-
-numDocs = 20;
-for (var i = 0; i < numDocs; i++) {
- coll.insert({x:i, str:str});
-}
-
-step("Wait for balancing");
-
-assert.soon( function(){ var x = s.chunkDiff( "foo" , "aaa" ); print( "chunk diff: " + x ); return x < 2; } , "no balance happened" , 8 * 60 * 1000 , 2000 );
-
-assert.eq(numDocs, coll.count(), "Documents weren't inserted correctly");
-
-step("dump cluster");
-
-dumpdir = MongoRunner.dataDir + "/dumprestore9-dump1/";
-resetDbpath(dumpdir);
-runMongoProgram( "mongodump", "--host", s._mongos[0].host, "--out", dumpdir );
-
-step("Shutting down cluster");
-
-s.stop();
-
-step("Starting up clean cluster");
-s = new ShardingTest( "dumprestore9b", 2, 0, 3, {chunksize:1} );
-
-db = s.getDB( "aaa" );
-coll = db.foo;
-
-assert.eq(0, coll.count(), "Data wasn't cleaned up by restarting sharding test");
-
-step("Restore data and config");
-
-runMongoProgram( "mongorestore", dumpdir, "--host", s._mongos[1].host, "--restoreShardingConfig", "--forceConfigRestore");
-
-config = s.getDB("config");
-assert(config.databases.findOne({_id:'aaa'}).partitioned, "Config data wasn't restored properly");
-
-assert( s.chunkDiff( "foo" , "aaa" ) < 2, "Chunk data wasn't restored properly");
-
-assert.eq(numDocs, coll.count(), "Didn't restore all documents properly2");
-assert.eq(numDocs, coll.find().itcount(), "Didn't restore all documents properly");
-
-for (var i = 0; i < numDocs; i++) {
- doc = coll.findOne({x:i});
- assert.eq(i, doc.x, "Doc missing from the shard it should be on");
-}
-
-for (var i = 0; i < s._connections.length; i++) {
- assert(s._connections[i].getDB("aaa").foo.count() > 0, "No data on shard: " + s._connections[i].host);
-}
-
-step("Stop cluster");
-s.stop();
-step("SUCCESS");
-} \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestoreWithNoOptions.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestoreWithNoOptions.js
deleted file mode 100644
index d6b87ffe70c..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestoreWithNoOptions.js
+++ /dev/null
@@ -1,107 +0,0 @@
-// SERVER-6366
-// relates to SERVER-808
-//
-// This file tests that options are not restored upon
-// mongorestore with --noOptionsRestore
-//
-// It checks that this works both when doing a full
-// database dump/restore and when doing it just for a
-// single db or collection.
-
-t = new ToolTest( "dumprestoreWithNoOptions" );
-
-t.startDB( "foo" );
-db = t.db;
-
-dbname = db.getName();
-dbname2 = "NOT_"+dbname;
-
-db.dropDatabase();
-
-var options = { capped: true, size: 4096, autoIndexId: true };
-db.createCollection('capped', options);
-assert.eq( 1, db.system.indexes.count(), "auto index not created" );
-var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt],
- 'invalid option:' + tojson(options) + " " + tojson(cappedOptions));
-}
-db.capped.insert({ x: 1 });
-db.getLastError()
-
-// Full dump/restore
-
-t.runTool( "dump" , "--out" , t.ext );
-
-db.dropDatabase();
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
-
-t.runTool( "restore" , "--dir" , t.ext , "--noOptionsRestore");
-
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
-assert(true !== db.capped.stats().capped, "restore options were not ignored");
-assert(undefined === db.capped.exists().options,
- "restore options not ignored: " + tojson( db.capped.exists() ) );
-
-// Dump/restore single DB
-
-db.dropDatabase();
-var options = { capped: true, size: 4096, autoIndexId: true };
-db.createCollection('capped', options);
-assert.eq( 1, db.system.indexes.count(), "auto index not created" );
-var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt], 'invalid option')
-}
-db.capped.insert({ x: 1 });
-db.getLastError()
-
-dumppath = t.ext + "noOptionsSingleDump/";
-mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
-
-db.dropDatabase();
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
-
-t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname, "--noOptionsRestore");
-
-db = db.getSiblingDB(dbname2);
-
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
-assert(true !== db.capped.stats().capped, "restore options were not ignored");
-assert(undefined === db.capped.exists().options, "restore options not ignored");
-
-// Dump/restore single collection
-
-db.dropDatabase();
-var options = { capped: true, size: 4096, autoIndexId: true };
-db.createCollection('capped', options);
-assert.eq( 1, db.system.indexes.count(), "auto index not created" );
-var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt], 'invalid option')
-}
-db.capped.insert({ x: 1 });
-db.getLastError()
-
-dumppath = t.ext + "noOptionsSingleColDump/";
-mkdir(dumppath);
-dbname = db.getName();
-t.runTool( "dump" , "-d", dbname, "-c", "capped", "--out" , dumppath );
-
-db.dropDatabase();
-
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.system.indexes.count(), "indexes not dropped" );
-
-t.runTool( "restore", "-d", dbname, "--drop", "--noOptionsRestore", dumppath + dbname );
-
-db = db.getSiblingDB(dbname);
-
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
-assert( true !== db.capped.stats().capped, "restore options were not ignored" );
-assert( undefined === db.capped.exists().options );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth.js
deleted file mode 100644
index f99b5d0405c..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth.js
+++ /dev/null
@@ -1,35 +0,0 @@
-// dumprestore_auth.js
-
-t = new ToolTest("dumprestore_auth", { auth : "" });
-
-c = t.startDB("foo");
-
-adminDB = c.getDB().getSiblingDB('admin');
-adminDB.createUser({user: 'admin', pwd: 'password', roles: ['root']});
-adminDB.auth('admin','password');
-adminDB.createUser({user: 'backup', pwd: 'password', roles: ['backup']});
-adminDB.createUser({user: 'restore', pwd: 'password', roles: ['restore']});
-
-assert.eq(0 , c.count() , "setup1");
-c.save({ a : 22 });
-assert.eq(1 , c.count() , "setup2");
-
-assert.commandWorked(c.runCommand("collMod", {usePowerOf2Sizes: false}));
-assert.eq(0, c.getDB().system.namespaces.findOne({name: c.getFullName()}).options.flags);
-
-t.runTool("dump" , "--out" , t.ext, "--username", "backup", "--password", "password");
-
-c.drop();
-assert.eq(0 , c.count() , "after drop");
-
-t.runTool("restore" , "--dir" , t.ext, "--writeConcern", "0"); // Should fail
-assert.eq(0 , c.count() , "after restore without auth");
-
-t.runTool("restore" , "--dir" , t.ext, "--username", "restore", "--password", "password", "--writeConcern", "0");
-assert.soon("c.findOne()" , "no data after sleep");
-assert.eq(1 , c.count() , "after restore 2");
-assert.eq(22 , c.findOne().a , "after restore 2");
-assert.eq(0, c.getDB().system.namespaces.findOne({name: c.getFullName()}).options.flags);
-assert.eq(3, adminDB.system.users.count());
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth2.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth2.js
deleted file mode 100644
index fd7d9a034d3..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth2.js
+++ /dev/null
@@ -1,96 +0,0 @@
-// dumprestore_auth2.js
-// Tests that mongodump and mongorestore properly handle access control information
-// Tests that the default auth roles of backup and restore work properly.
-
-t = new ToolTest("dumprestore_auth2", {auth: ""});
-
-coll = t.startDB("foo");
-admindb = coll.getDB().getSiblingDB("admin")
-
-// Create the relevant users and roles.
-admindb.createUser({user: "root", pwd: "pass", roles: ["root"]});
-admindb.auth("root", "pass");
-
-admindb.createUser({user: "backup", pwd: "pass", roles: ["backup"]});
-admindb.createUser({user: "restore", pwd: "pass", roles: ["restore"]});
-
-admindb.createRole({role: "customRole",
- privileges:[{resource: {db: "jstests_tool_dumprestore_auth2",
- collection: "foo"},
- actions: ["find"]}],
- roles:[]});
-admindb.createUser({user: "test", pwd: "pass", roles: ["customRole"]});
-
-coll.insert({word: "tomato"});
-assert.eq(1, coll.count());
-
-assert.eq(4, admindb.system.users.count(), "setup users")
-assert.eq(2, admindb.system.indexes.count({ns: "admin.system.users"}),
- "setup2: " + tojson( admindb.system.users.getIndexes() ) );
-assert.eq(1, admindb.system.roles.count(), "setup3")
-assert.eq(2, admindb.system.indexes.count({ns: "admin.system.roles"}), "setup4")
-assert.eq(1, admindb.system.version.count());
-var versionDoc = admindb.system.version.findOne();
-
-// Logout root user.
-admindb.logout();
-
-// Verify that the custom role works as expected.
-admindb.auth("test", "pass");
-assert.eq("tomato", coll.findOne().word);
-admindb.logout();
-
-// Dump the database.
-t.runTool("dump", "--out", t.ext, "--username", "backup", "--password", "pass");
-
-// Drop the relevant data in the database.
-admindb.auth("root", "pass");
-coll.getDB().dropDatabase();
-admindb.dropUser("backup");
-admindb.dropUser("test");
-admindb.dropRole("customRole");
-
-assert.eq(2, admindb.system.users.count(), "didn't drop backup and test users");
-assert.eq(0, admindb.system.roles.count(), "didn't drop roles");
-assert.eq(0, coll.count(), "didn't drop foo coll");
-
-t.runTool("restore", "--dir", t.ext, "--username", "restore", "--password", "pass", "--writeConcern", "0");
-
-assert.soon("admindb.system.users.findOne()", "no data after restore");
-assert.eq(4, admindb.system.users.count(), "didn't restore users");
-assert.eq(2, admindb.system.indexes.count({ns: "admin.system.users"}),
- "didn't restore user indexes");
-assert.eq(1, admindb.system.roles.find({role:'customRole'}).count(), "didn't restore roles");
-assert.eq(2, admindb.system.indexes.count({ns: "admin.system.roles"}),
- "didn't restore role indexes");
-
-admindb.logout();
-
-// Login as user with customRole to verify privileges are restored.
-admindb.auth("test", "pass");
-assert.eq("tomato", coll.findOne().word);
-admindb.logout();
-
-admindb.auth("root", "pass");
-admindb.createUser({user: "root2", pwd: "pass", roles: ["root"]});
-admindb.dropRole("customRole");
-admindb.createRole({role: "customRole2", roles: [], privileges:[]});
-admindb.dropUser("root");
-admindb.logout();
-
-t.runTool("restore", "--dir", t.ext, "--username", "restore", "--password", "pass", "--drop", "--writeConcern", "0");
-
-admindb.auth("root", "pass");
-assert.soon("1 == admindb.system.users.find({user:'root'}).count()", "didn't restore users 2");
-assert.eq(0, admindb.system.users.find({user:'root2'}).count(), "didn't drop users");
-assert.eq(0, admindb.system.roles.find({role:'customRole2'}).count(), "didn't drop roles");
-assert.eq(1, admindb.system.roles.find({role:'customRole'}).count(), "didn't restore roles");
-assert.eq(2, admindb.system.indexes.count({ns: "admin.system.users"}),
- "didn't maintain user indexes");
-assert.eq(2, admindb.system.indexes.count({ns: "admin.system.roles"}),
- "didn't maintain role indexes");
-assert.eq(1, admindb.system.version.count(), "didn't restore version");
-assert.docEq(versionDoc, admindb.system.version.findOne(), "version doc wasn't restored properly");
-admindb.logout();
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth3.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth3.js
deleted file mode 100644
index b87418ed176..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumprestore_auth3.js
+++ /dev/null
@@ -1,199 +0,0 @@
-// dumprestore_auth3.js
-// Tests that mongodump and mongorestore properly handle access control information when doing
-// single-db dumps and restores
-
-
-// Runs the tool with the given name against the given mongod.
-function runTool(toolName, mongod, options) {
- var opts = {host: mongod.host};
- Object.extend(opts, options);
- MongoRunner.runMongoTool(toolName, opts);
-}
-
-var mongod = MongoRunner.runMongod();
-var admindb = mongod.getDB("admin");
-var db = mongod.getDB("foo");
-
-jsTestLog("Creating Admin user & initial data");
-admindb.createUser({user: 'root', pwd: 'pass', roles: ['root']});
-admindb.createUser({user: 'backup', pwd: 'pass', roles: ['backup']});
-admindb.createUser({user: 'restore', pwd: 'pass', roles: ['restore']});
-admindb.createRole({role: "dummyRole", roles: [], privileges:[]});
-db.createUser({user: 'user', pwd: 'pass', roles: jsTest.basicUserRoles});
-db.createRole({role: 'role', roles: [], privileges:[]});
-var backupActions = ['find'];
-db.createRole({role: 'backupFooChester',
- privileges: [{resource: {db: 'foo', collection: 'chester'}, actions: backupActions}],
- roles: []});
-db.createUser({user: 'backupFooChester', pwd: 'pass', roles: ['backupFooChester']});
-
-var userCount = db.getUsers().length;
-var rolesCount = db.getRoles().length;
-var adminUsersCount = admindb.getUsers().length;
-var adminRolesCount = admindb.getRoles().length;
-var systemUsersCount = admindb.system.users.count();
-var systemVersionCount = admindb.system.version.count();
-
-db.bar.insert({a:1});
-
-assert.eq(1, db.bar.findOne().a);
-assert.eq(userCount, db.getUsers().length, "setup");
-assert.eq(rolesCount, db.getRoles().length, "setup2");
-assert.eq(adminUsersCount, admindb.getUsers().length, "setup3");
-assert.eq(adminRolesCount, admindb.getRoles().length, "setup4");
-assert.eq(systemUsersCount, admindb.system.users.count(), "setup5");
-assert.eq(systemVersionCount, admindb.system.version.count(),"system version");
-assert.eq(1, admindb.system.users.count({user: "restore"}), "Restore user is missing");
-assert.eq(1, admindb.system.users.count({user: "backup"}), "Backup user is missing");
-var versionDoc = admindb.system.version.findOne();
-
-jsTestLog("Dump foo database without dumping user data");
-var dumpDir = MongoRunner.getAndPrepareDumpDirectory("dumprestore_auth3");
-runTool("mongodump", mongod, {out: dumpDir, db: "foo"});
-db = mongod.getDB('foo');
-
-db.dropDatabase();
-db.dropAllUsers();
-db.dropAllRoles();
-
-jsTestLog("Restore foo database from dump that doesn't contain user data ");
-runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"});
-
-db = mongod.getDB('foo');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(0, db.getUsers().length, "Restore created users somehow");
-assert.eq(0, db.getRoles().length, "Restore created roles somehow");
-
-// Re-create user data
-db.createUser({user: 'user', pwd: 'password', roles: jsTest.basicUserRoles});
-db.createRole({role: 'role', roles: [], privileges:[]});
-userCount = 1;
-rolesCount = 1;
-
-assert.eq(1, db.bar.findOne().a);
-assert.eq(userCount, db.getUsers().length, "didn't create user");
-assert.eq(rolesCount, db.getRoles().length, "didn't create role");
-
-jsTestLog("Dump foo database *with* user data");
-runTool("mongodump", mongod, {out: dumpDir, db: "foo", dumpDbUsersAndRoles: ""});
-db = mongod.getDB('foo');
-
-db.dropDatabase();
-db.dropAllUsers();
-db.dropAllRoles();
-
-assert.eq(0, db.getUsers().length, "didn't drop users");
-assert.eq(0, db.getRoles().length, "didn't drop roles");
-assert.eq(0, db.bar.count(), "didn't drop 'bar' collection");
-
-jsTestLog("Restore foo database without restoring user data, even though it's in the dump");
-runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', writeConcern: "0"});
-db = mongod.getDB('foo');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(0, db.getUsers().length, "Restored users even though it shouldn't have");
-assert.eq(0, db.getRoles().length, "Restored roles even though it shouldn't have");
-
-jsTestLog("Restore foo database *with* user data");
-runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"});
-db = mongod.getDB('foo');
-admindb = mongod.getDB('admin');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(userCount, db.getUsers().length, "didn't restore users");
-assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
-assert.eq(1, admindb.system.users.count({user: "restore", db: "admin"}), "Restore user is missing");
-assert.docEq(versionDoc,
- db.getSiblingDB('admin').system.version.findOne(),
- "version doc was changed by restore");
-
-jsTestLog("Make modifications to user data that should be overridden by the restore");
-db.dropUser('user')
-db.createUser({user: 'user2', pwd: 'password2', roles: jsTest.basicUserRoles});
-db.dropRole('role')
-db.createRole({role: 'role2', roles: [], privileges:[]});
-
-jsTestLog("Restore foo database (and user data) with --drop so it overrides the changes made");
-// Restore with --drop to override the changes to user data
-runTool("mongorestore", mongod,
- {dir: dumpDir + "foo/", db: 'foo', drop: "", restoreDbUsersAndRoles: "", writeConcern: "0"});
-db = mongod.getDB('foo');
-admindb = mongod.getDB('admin');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(adminUsersCount, admindb.getUsers().length, "Admin users were dropped");
-assert.eq(adminRolesCount, admindb.getRoles().length, "Admin roles were dropped");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(userCount, db.getUsers().length, "didn't restore users");
-assert.eq("user", db.getUser('user').user, "didn't update user");
-assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
-assert.eq("role", db.getRole('role').role, "didn't update role");
-assert.docEq(versionDoc,
- db.getSiblingDB('admin').system.version.findOne(),
- "version doc was changed by restore");
-
-
-jsTestLog("Dump just the admin database. User data should be dumped by default");
-// Make a user in another database to make sure it is properly captured
-db.getSiblingDB('bar').createUser({user: "user", pwd: 'pwd', roles: []});
-db.getSiblingDB('admin').createUser({user: "user", pwd: 'pwd', roles: []});
-adminUsersCount += 1;
-runTool("mongodump", mongod, {out: dumpDir, db: "admin"});
-db = mongod.getDB('foo');
-
-// Change user data a bit.
-db.dropAllUsers();
-db.getSiblingDB('bar').createUser({user: "user2", pwd: 'pwd', roles: []});
-db.getSiblingDB('admin').dropAllUsers();
-
-jsTestLog("Restore just the admin database. User data should be restored by default");
-runTool("mongorestore", mongod, {dir: dumpDir + "admin/", db: 'admin', drop: "", writeConcern: "0"});
-db = mongod.getDB('foo');
-var otherdb = db.getSiblingDB('bar');
-var admindb = db.getSiblingDB('admin');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(userCount, db.getUsers().length, "didn't restore users");
-assert.eq("user", db.getUser('user').user, "didn't restore user");
-assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
-assert.eq("role", db.getRole('role').role, "didn't restore role");
-assert.eq(1, otherdb.getUsers().length, "didn't restore users for bar database");
-assert.eq("user", otherdb.getUsers()[0].user, "didn't restore user for bar database");
-assert.eq(adminUsersCount, admindb.getUsers().length, "didn't restore users for admin database");
-assert.eq("user", admindb.getUser("user").user, "didn't restore user for admin database");
-assert.eq(6, admindb.system.users.count(), "has the wrong # of users for the whole server");
-assert.eq(2, admindb.system.roles.count(), "has the wrong # of roles for the whole server");
-assert.docEq(versionDoc,
- db.getSiblingDB('admin').system.version.findOne(),
- "version doc was changed by restore");
-
-jsTestLog("Dump all databases");
-runTool("mongodump", mongod, {out: dumpDir});
-db = mongod.getDB('foo');
-
-db.dropDatabase();
-db.dropAllUsers();
-db.dropAllRoles();
-
-assert.eq(0, db.getUsers().length, "didn't drop users");
-assert.eq(0, db.getRoles().length, "didn't drop roles");
-assert.eq(0, db.bar.count(), "didn't drop 'bar' collection");
-
-jsTestLog("Restore all databases");
-runTool("mongorestore", mongod, {dir: dumpDir, writeConcern: "0"});
-db = mongod.getDB('foo');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(1, db.getUsers().length, "didn't restore users");
-assert.eq(1, db.getRoles().length, "didn't restore roles");
-assert.docEq(versionDoc,
- db.getSiblingDB('admin').system.version.findOne(),
- "version doc was changed by restore");
-
-MongoRunner.stopMongod(mongod);
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/dumpsecondary.js b/src/mongo/gotools/test/legacy26/jstests/tool/dumpsecondary.js
deleted file mode 100644
index 7a641542498..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/dumpsecondary.js
+++ /dev/null
@@ -1,38 +0,0 @@
-var replTest = new ReplSetTest( {name: 'testSet', nodes: 2} );
-
-var nodes = replTest.startSet();
-replTest.initiate();
-
-var master = replTest.getPrimary();
-db = master.getDB("foo")
-db.foo.save({a: 1000});
-replTest.awaitReplication();
-replTest.awaitSecondaryNodes();
-
-assert.eq( 1 , db.foo.count() , "setup" );
-
-var slaves = replTest.liveNodes.slaves;
-assert( slaves.length == 1, "Expected 1 slave but length was " + slaves.length );
-slave = slaves[0];
-
-var args = ['mongodump', '-h', slave.host, '--out', MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
-var authargs = ['--username', jsTest.options().authUser, '--password', jsTest.options().authPassword];
-if (jsTest.options().keyFile) {
- args = args.concat(authargs);
-}
-runMongoProgram.apply(null, args);
-db.foo.drop()
-
-assert.eq( 0 , db.foo.count() , "after drop" );
-args = ['mongorestore', '-h', master.host, MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
-if (jsTest.options().keyFile) {
- args = args.concat(authargs);
-}
-runMongoProgram.apply(null, args)
-assert.soon( "db.foo.findOne()" , "no data after sleep" );
-assert.eq( 1 , db.foo.count() , "after restore" );
-assert.eq( 1000 , db.foo.findOne().a , "after restore 2" );
-
-resetDbpath(MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external')
-
-replTest.stopSet(15)
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport1.js b/src/mongo/gotools/test/legacy26/jstests/tool/exportimport1.js
deleted file mode 100644
index a7a7bcee90c..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport1.js
+++ /dev/null
@@ -1,66 +0,0 @@
-// exportimport1.js
-
-t = new ToolTest( "exportimport1" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-var arr = ["x", undefined, "y", undefined];
-c.save( { a : 22 , b : arr} );
-assert.eq( 1 , c.count() , "setup2" );
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-var doc = c.findOne();
-assert.eq( 22 , doc.a , "after restore 2" );
-for (var i=0; i<arr.length; i++) {
- if (typeof arr[i] == 'undefined') {
- // null should be { "$undefined" : true }, but this is a workaround for SERVER-6102
- assert.eq( null, doc.b[i] , "after restore array: "+i );
- } else {
- assert.eq( arr[i], doc.b[i] , "after restore array: "+i );
- }
-}
-
-// now with --jsonArray
-
-t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-assert.eq( 22 , c.findOne().a , "after restore 2" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-arr = ["a", undefined, "c"];
-c.save({a : arr});
-assert.eq( 1 , c.count() , "setup2" );
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-var doc = c.findOne();
-for (var i=0; i<arr.length; i++) {
- if (typeof arr[i] == 'undefined') {
- // null should be { "$undefined" : true }, but this is a workaround for SERVER-6102
- assert.eq( null, doc.a[i] , "after restore array: "+i );
- } else {
- assert.eq( arr[i], doc.a[i] , "after restore array: "+i );
- }
-}
-
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport3.js b/src/mongo/gotools/test/legacy26/jstests/tool/exportimport3.js
deleted file mode 100644
index f18ba6cbd4b..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport3.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// exportimport3.js
-
-t = new ToolTest( "exportimport3" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-c.save({a:1})
-c.save({a:2})
-c.save({a:3})
-c.save({a:4})
-c.save({a:5})
-
-assert.eq( 5 , c.count() , "setup2" );
-
-
-t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 5 , c.count() , "after restore 2" );
-
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport4.js b/src/mongo/gotools/test/legacy26/jstests/tool/exportimport4.js
deleted file mode 100644
index c0d82a135bc..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport4.js
+++ /dev/null
@@ -1,57 +0,0 @@
-// exportimport4.js
-
-
-t = new ToolTest( "exportimport4" );
-c = t.startDB( "foo" );
-
-install_test_data = function() {
- c.drop();
-
- assert.eq( 0 , c.count() , "setup1" );
-
- c.save( { a : [1, 2, 3, NaN, 4, null, 5] } );
- c.save( { a : [1, 2, 3, 4, 5] } );
- c.save( { a : [ NaN ] } );
- c.save( { a : [1, 2, 3, 4, NaN, NaN, 5, NaN] } );
- c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
-
- assert.eq( 5 , c.count() , "setup2" );
-};
-
-// attempt to export fields without NaN
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[NaN]}}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 2 , c.count() , "after restore 1" );
-
-// attempt to export fields with NaN
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:NaN}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 3 , c.count() , "after restore 2" );
-
-// attempt to export everything
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 5 , c.count() , "after restore 3" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport5.js b/src/mongo/gotools/test/legacy26/jstests/tool/exportimport5.js
deleted file mode 100644
index 47dd98c2553..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport5.js
+++ /dev/null
@@ -1,82 +0,0 @@
-// exportimport4.js
-
-
-t = new ToolTest( "exportimport5" );
-c = t.startDB( "foo" );
-
-install_test_data = function() {
- c.drop();
-
- assert.eq( 0 , c.count() , "setup1" );
-
- c.save( { a : [1, 2, 3, Infinity, 4, null, 5] } );
- c.save( { a : [1, 2, 3, 4, 5] } );
- c.save( { a : [ Infinity ] } );
- c.save( { a : [1, 2, 3, 4, Infinity, Infinity, 5, -Infinity] } );
- c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
- c.save( { a : [ -Infinity ] } );
-
- assert.eq( 6 , c.count() , "setup2" );
-};
-
-// attempt to export fields without Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[Infinity]}}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 3 , c.count() , "after restore 1" );
-
-// attempt to export fields with Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:Infinity}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 3 , c.count() , "after restore 2" );
-
-// attempt to export fields without -Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[-Infinity]}}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 4 , c.count() , "after restore 3" );
-
-// attempt to export fields with -Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:-Infinity}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 2 , c.count() , "after restore 4" );
-
-// attempt to export everything
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 6 , c.count() , "after restore 5" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport6.js b/src/mongo/gotools/test/legacy26/jstests/tool/exportimport6.js
deleted file mode 100644
index a01d49a9c8b..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport6.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// exportimport6.js
-// test export with skip, limit and sort
-
-t = new ToolTest("exportimport6");
-
-c = t.startDB("foo");
-assert.eq(0, c.count(), "setup1");
-c.save({a:1, b:1})
-c.save({a:1, b:2})
-c.save({a:2, b:3})
-c.save({a:2, b:3})
-c.save({a:3, b:4})
-c.save({a:3, b:5})
-
-assert.eq(6, c.count(), "setup2");
-
-t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo",
- "--sort", "{a:1, b:-1}", "--skip", "4", "--limit", "1");
-
-c.drop();
-assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo");
-assert.eq(1, c.count(), "count should be 1");
-assert.eq(5, c.findOne().b, printjson(c.findOne()));
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport_bigarray.js b/src/mongo/gotools/test/legacy26/jstests/tool/exportimport_bigarray.js
deleted file mode 100644
index 43a209b8453..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport_bigarray.js
+++ /dev/null
@@ -1,62 +0,0 @@
-// Test importing collections represented as a single line array above the maximum document size
-var tt = new ToolTest('exportimport_bigarray_test');
-
-var exportimport_db = tt.startDB();
-
-var src = exportimport_db.src;
-var dst = exportimport_db.dst;
-
-src.drop();
-dst.drop();
-
-// Calculate the number of documents it takes to get above 16MB (here using 20MB just to be safe)
-var bigString = new Array(1025).toString();
-var doc = {_id: new ObjectId(), x:bigString};
-var docSize = Object.bsonsize(doc);
-var numDocs = Math.floor(20*1024*1024 / docSize);
-
-print('Size of one document: ' + docSize)
-print('Number of documents to exceed maximum BSON size: ' + numDocs)
-
-print('About to insert ' + numDocs + ' documents into ' +
- exportimport_db.getName() + '.' + src.getName());
-var i;
-for (i = 0; i < numDocs; ++i) {
- src.insert({ x : bigString });
-}
-var lastError = exportimport_db.getLastError();
-if (lastError == null) {
- print('Finished inserting ' + numDocs + ' documents');
-}
-else {
- doassert('Insertion failed: ' + lastError);
-}
-
-data = 'data/exportimport_array_test.json';
-
-print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
- ' with file: ' + data);
-tt.runTool('export', '--out' , data, '-d', exportimport_db.getName(), '-c', src.getName(),
- '--jsonArray');
-
-print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
- ' with file: ' + data);
-tt.runTool('import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.getName(),
- '--jsonArray');
-
-print('About to verify that source and destination collections match');
-
-src_cursor = src.find().sort({ _id : 1 });
-dst_cursor = dst.find().sort({ _id : 1 });
-
-var documentCount = 0;
-while (src_cursor.hasNext()) {
- assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
- 'Destination has ' + documentCount + ' documents.');
- assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
- ++documentCount;
-}
-assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
- 'Source has ' + documentCount + ' documents.');
-
-print('Verified that source and destination collections match');
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport_date.js b/src/mongo/gotools/test/legacy26/jstests/tool/exportimport_date.js
deleted file mode 100644
index 57a860ca1a8..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/exportimport_date.js
+++ /dev/null
@@ -1,49 +0,0 @@
-var tt = new ToolTest('exportimport_date_test');
-
-var exportimport_db = tt.startDB();
-
-var src = exportimport_db.src;
-var dst = exportimport_db.dst;
-
-src.drop();
-dst.drop();
-
-// Insert a date that we can format
-var formatable = ISODate("1970-01-02T05:00:00Z");
-assert.eq(formatable.valueOf(), 104400000);
-src.insert({ "_id" : formatable });
-
-// Insert a date that we cannot format as an ISODate string
-var nonformatable = ISODate("3001-01-01T00:00:00Z");
-assert.eq(nonformatable.valueOf(), 32535216000000);
-src.insert({ "_id" : nonformatable });
-
-// Verify number of documents inserted
-assert.eq(2, src.find().itcount());
-
-data = 'data/exportimport_date_test.json';
-
-print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
- ' with file: ' + data);
-tt.runTool('export', '--out' , data, '-d', exportimport_db.getName(), '-c', src.getName());
-
-print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
- ' with file: ' + data);
-tt.runTool('import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.getName());
-
-print('About to verify that source and destination collections match');
-
-src_cursor = src.find().sort({ _id : 1 });
-dst_cursor = dst.find().sort({ _id : 1 });
-
-var documentCount = 0;
-while (src_cursor.hasNext()) {
- assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
- 'Destination has ' + documentCount + ' documents.');
- assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
- ++documentCount;
-}
-assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
- 'Source has ' + documentCount + ' documents.');
-
-print('Verified that source and destination collections match');
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/files1.js b/src/mongo/gotools/test/legacy26/jstests/tool/files1.js
deleted file mode 100644
index acfcc16dcc3..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/files1.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// files1.js
-
-t = new ToolTest( "files1" )
-
-db = t.startDB();
-
-filename = 'mongod'
-if ( _isWindows() )
- filename += '.exe'
-
-t.runTool( "files" , "-d" , t.baseName , "put" , filename );
-md5 = md5sumFile(filename);
-
-file_obj = db.fs.files.findOne()
-assert( file_obj , "A 0" );
-md5_stored = file_obj.md5;
-md5_computed = db.runCommand({filemd5: file_obj._id}).md5;
-assert.eq( md5 , md5_stored , "A 1" );
-assert.eq( md5 , md5_computed, "A 2" );
-
-mkdir(t.ext);
-
-t.runTool( "files" , "-d" , t.baseName , "get" , filename , '-l' , t.extFile );
-md5 = md5sumFile(t.extFile);
-assert.eq( md5 , md5_stored , "B" );
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/oplog1.js b/src/mongo/gotools/test/legacy26/jstests/tool/oplog1.js
deleted file mode 100644
index e9a002bfb65..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/oplog1.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// oplog1.js
-
-// very basic test for mongooplog
-// need a lot more, but test that it functions at all
-
-t = new ToolTest( "oplog1" );
-
-db = t.startDB();
-
-output = db.output
-
-doc = { _id : 5 , x : 17 };
-
-db.oplog.insert( { ts : new Timestamp() , "op" : "i" , "ns" : output.getFullName() , "o" : doc } );
-
-assert.eq( 0 , output.count() , "before" )
-
-t.runTool( "oplog" , "--oplogns" , db.getName() + ".oplog" , "--from" , "127.0.0.1:" + t.port , "-vv" );
-
-assert.eq( 1 , output.count() , "after" );
-
-assert.eq( doc , output.findOne() , "after check" );
-
-t.stop();
-
-
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/oplog_all_ops.js b/src/mongo/gotools/test/legacy26/jstests/tool/oplog_all_ops.js
deleted file mode 100644
index 8f231cb233d..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/oplog_all_ops.js
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Performs a simple test on mongooplog by doing different types of operations
- * that will show up in the oplog then replaying it on another replica set.
- * Correctness is verified using the dbhash command.
- */
-
-var repl1 = new ReplSetTest({ name: 'rs1', nodes: [{ nopreallocj: '' },
- { arbiter: true }, { arbiter: true }]});
-
-repl1.startSet({ oplogSize: 10 });
-repl1.initiate();
-repl1.awaitSecondaryNodes();
-
-var repl1Conn = new Mongo(repl1.getURL());
-var testDB = repl1Conn.getDB('test');
-var testColl = testDB.user;
-
-// op i
-testColl.insert({ x: 1 });
-testColl.insert({ x: 2 });
-
-// op c
-testDB.dropDatabase();
-
-testColl.insert({ y: 1 });
-testColl.insert({ y: 2 });
-testColl.insert({ y: 3 });
-
-// op u
-testColl.update({}, { $inc: { z: 1 }}, true, true);
-
-// op d
-testColl.remove({ y: 2 });
-
-// op n
-var oplogColl = repl1Conn.getCollection('local.oplog.rs');
-oplogColl.insert({ ts: new Timestamp(), op: 'n', ns: testColl.getFullName(), 'o': { x: 'noop' }});
-
-var repl2 = new ReplSetTest({ name: 'rs2', startPort: 31100, nodes: [{ nopreallocj: '' },
- { arbiter: true }, { arbiter: true }]});
-
-repl2.startSet({ oplogSize: 10 });
-repl2.initiate();
-repl2.awaitSecondaryNodes();
-
-var srcConn = repl1.getPrimary();
-runMongoProgram('mongooplog', '--from', repl1.getPrimary().host,
- '--host', repl2.getPrimary().host);
-
-var repl1Hash = testDB.runCommand({ dbhash: 1 });
-
-var repl2Conn = new Mongo(repl2.getURL());
-var testDB2 = repl2Conn.getDB(testDB.getName());
-var repl2Hash = testDB2.runCommand({ dbhash: 1 });
-
-assert(repl1Hash.md5);
-assert.eq(repl1Hash.md5, repl2Hash.md5);
-
-repl1.stopSet();
-repl2.stopSet();
-
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/restorewithauth.js b/src/mongo/gotools/test/legacy26/jstests/tool/restorewithauth.js
deleted file mode 100644
index ac9e7bc756b..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/restorewithauth.js
+++ /dev/null
@@ -1,113 +0,0 @@
-/* SERVER-4972
- * Test for mongorestore on server with --auth allows restore without credentials of colls
- * with no index
- */
-/*
- * 1) Start mongo without auth.
- * 2) Write to collection
- * 3) Take dump of the collection using mongodump.
- * 4) Drop the collection.
- * 5) Stop mongod from step 1.
- * 6) Restart mongod with auth.
- * 7) Add admin user to kick authentication
- * 8) Try restore without auth credentials. The restore should fail
- * 9) Try restore with correct auth credentials. The restore should succeed this time.
- */
-
-var port = allocatePorts(1)[0];
-baseName = "jstests_restorewithauth";
-var conn = startMongod( "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface",
- "--nojournal", "--bind_ip", "127.0.0.1" );
-
-// write to ns foo.bar
-var foo = conn.getDB( "foo" );
-for( var i = 0; i < 4; i++ ) {
- foo["bar"].save( { "x": i } );
- foo["baz"].save({"x": i});
-}
-
-// make sure the collection exists
-assert.eq( foo.system.namespaces.count({name: "foo.bar"}), 1 )
-
-//make sure it has no index except _id
-assert.eq(foo.system.indexes.count(), 2);
-
-foo.bar.createIndex({x:1});
-assert.eq(foo.system.indexes.count(), 3);
-
-// get data dump
-var dumpdir = MongoRunner.dataDir + "/restorewithauth-dump1/";
-resetDbpath( dumpdir );
-x = runMongoProgram("mongodump", "--db", "foo", "-h", "127.0.0.1:"+port, "--out", dumpdir);
-
-// now drop the db
-foo.dropDatabase();
-
-// stop mongod
-stopMongod( port );
-
-// start mongod with --auth
-conn = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface",
- "--nojournal", "--bind_ip", "127.0.0.1" );
-
-// admin user
-var admin = conn.getDB( "admin" )
-admin.createUser({user: "admin" , pwd: "admin", roles: jsTest.adminUserRoles});
-admin.auth( "admin" , "admin" );
-
-var foo = conn.getDB( "foo" )
-
-// make sure no collection with the same name exists
-assert.eq(foo.system.namespaces.count( {name: "foo.bar"}), 0);
-assert.eq(foo.system.namespaces.count( {name: "foo.baz"}), 0);
-
-// now try to restore dump
-x = runMongoProgram( "mongorestore", "-h", "127.0.0.1:" + port, "--dir" , dumpdir, "-vvvvv" );
-
-// make sure that the collection isn't restored
-assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 0);
-assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 0);
-
-// now try to restore dump with correct credentials
-x = runMongoProgram( "mongorestore",
- "-h", "127.0.0.1:" + port,
- "-d", "foo",
- "--authenticationDatabase=admin",
- "-u", "admin",
- "-p", "admin",
- "--dir", dumpdir + "foo/",
- "-vvvvv");
-
-// make sure that the collection was restored
-assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 1);
-assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 1);
-
-// make sure the collection has 4 documents
-assert.eq(foo.bar.count(), 4);
-assert.eq(foo.baz.count(), 4);
-
-foo.dropDatabase();
-
-// make sure that the collection is empty
-assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 0);
-assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 0);
-
-foo.createUser({user: 'user', pwd: 'password', roles: jsTest.basicUserRoles});
-
-// now try to restore dump with foo database credentials
-x = runMongoProgram("mongorestore",
- "-h", "127.0.0.1:" + port,
- "-d", "foo",
- "-u", "user",
- "-p", "password",
- "--dir", dumpdir + "foo/",
- "-vvvvv");
-
-// make sure that the collection was restored
-assert.eq(foo.system.namespaces.count({name: "foo.bar"}), 1);
-assert.eq(foo.system.namespaces.count({name: "foo.baz"}), 1);
-assert.eq(foo.bar.count(), 4);
-assert.eq(foo.baz.count(), 4);
-assert.eq(foo.system.indexes.count(), 3); // _id on foo, _id on bar, x on foo
-
-stopMongod( port );
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/stat1.js b/src/mongo/gotools/test/legacy26/jstests/tool/stat1.js
deleted file mode 100644
index 539827e1704..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/stat1.js
+++ /dev/null
@@ -1,22 +0,0 @@
-// stat1.js
-// test mongostat with authentication SERVER-3875
-port = allocatePorts( 1 )[ 0 ];
-baseName = "tool_stat1";
-
-m = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
-db = m.getDB( "admin" );
-
-t = db[ baseName ];
-t.drop();
-
-db.dropAllUsers();
-
-db.createUser({user: "eliot" , pwd: "eliot", roles: jsTest.adminUserRoles});
-
-assert( db.auth( "eliot" , "eliot" ) , "auth failed" );
-
-x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+port, "--username", "eliot", "--password", "eliot", "--rowcount", "1", "--authenticationDatabase=admin" );
-assert.eq(x, 0, "mongostat should exit successfully with eliot:eliot");
-
-x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+port, "--username", "eliot", "--password", "wrong", "--rowcount", "1", "--authenticationDatabase=admin" );
-assert.eq(x, 1, "mongostat should exit with 1 with eliot:wrong");
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/tool1.js b/src/mongo/gotools/test/legacy26/jstests/tool/tool1.js
deleted file mode 100644
index f7c6f769e72..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/tool1.js
+++ /dev/null
@@ -1,44 +0,0 @@
-// mongo tool tests, very basic to start with
-
-
-baseName = "jstests_tool_tool1";
-dbPath = MongoRunner.dataPath + baseName + "/";
-externalPath = MongoRunner.dataPath + baseName + "_external/";
-externalBaseName = "export.json";
-externalFile = externalPath + externalBaseName;
-
-function fileSize(){
- var l = listFiles( externalPath );
- for ( var i=0; i<l.length; i++ ){
- if ( l[i].baseName == externalBaseName )
- return l[i].size;
- }
- return -1;
-}
-
-
-port = allocatePorts( 1 )[ 0 ];
-resetDbpath( externalPath );
-
-m = startMongod( "--port", port, "--dbpath", dbPath, "--nohttpinterface", "--noprealloc" , "--bind_ip", "127.0.0.1" );
-c = m.getDB( baseName ).getCollection( baseName );
-c.save( { a: 1 } );
-assert( c.findOne() );
-
-runMongoProgram( "mongodump", "--host", "127.0.0.1:" + port, "--out", externalPath );
-c.drop();
-runMongoProgram( "mongorestore", "--host", "127.0.0.1:" + port, "--dir", externalPath );
-assert.soon( "c.findOne()" , "mongodump then restore has no data w/sleep" );
-assert( c.findOne() , "mongodump then restore has no data" );
-assert.eq( 1 , c.findOne().a , "mongodump then restore has no broken data" );
-
-resetDbpath( externalPath );
-
-assert.eq( -1 , fileSize() , "mongoexport prep invalid" );
-runMongoProgram( "mongoexport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--out", externalFile );
-assert.lt( 10 , fileSize() , "file size changed" );
-
-c.drop();
-runMongoProgram( "mongoimport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--file", externalFile );
-assert.soon( "c.findOne()" , "mongo import json A" );
-assert( c.findOne() && 1 == c.findOne().a , "mongo import json B" );
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/tool_replset.js b/src/mongo/gotools/test/legacy26/jstests/tool/tool_replset.js
deleted file mode 100644
index bc50a0fd7d4..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/tool_replset.js
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Test to ensure that (dump/restore/export/import/oplog) works with a replica set connection string
- * 1. Start a replica set.
- * 2. Add data to a collection.
- * 3. Take a dump of the database.
- * 4. Drop the db.
- * 5. Restore the db.
- * 6. Export a collection.
- * 7. Drop the collection.
- * 8. Import the collection.
- * 9. Add data to the oplog.rs collection.
- * 10. Ensure that the document doesn't exist yet.
- * 11. Now play the mongooplog tool.
- * 12. Make sure that the oplog was played
-*/
-
-// Load utility methods for replica set tests
-load("jstests/replsets/rslib.js");
-
-print("starting the replica set")
-
-var replTest = new ReplSetTest({ name: 'tool_replset', nodes: 2, oplogSize: 5 });
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-for (var i = 0; i < 100; i++) {
- master.getDB("foo").bar.insert({ a: i });
-}
-replTest.awaitReplication();
-
-var replSetConnString = "tool_replset/127.0.0.1:" + replTest.ports[0] +
- ",127.0.0.1:" + replTest.ports[1];
-
-// Test with mongodump/mongorestore
-print("dump the db");
-var data = MongoRunner.dataDir + "/tool_replset-dump1/";
-runMongoProgram("mongodump", "--host", replSetConnString, "--out", data);
-
-print("db successfully dumped, dropping now");
-master.getDB("foo").dropDatabase();
-replTest.awaitReplication();
-
-print("restore the db");
-runMongoProgram("mongorestore", "--host", replSetConnString, "--dir", data);
-
-print("db successfully restored, checking count")
-var x = master.getDB("foo").getCollection("bar").count();
-assert.eq(x, 100, "mongorestore should have successfully restored the collection");
-
-replTest.awaitReplication();
-
-// Test with mongoexport/mongoimport
-print("export the collection");
-var extFile = MongoRunner.dataDir + "/tool_replset/export";
-runMongoProgram("mongoexport", "--host", replSetConnString, "--out", extFile,
- "-d", "foo", "-c", "bar");
-
-print("collection successfully exported, dropping now");
-master.getDB("foo").getCollection("bar").drop();
-replTest.awaitReplication();
-
-print("import the collection");
-runMongoProgram("mongoimport", "--host", replSetConnString, "--file", extFile,
- "-d", "foo", "-c", "bar");
-
-var x = master.getDB("foo").getCollection("bar").count();
-assert.eq(x, 100, "mongoimport should have successfully imported the collection");
-
-// Test with mongooplog
-var doc = { _id : 5, x : 17 };
-master.getDB("local").oplog.rs.insert({ ts : new Timestamp(), "op" : "i", "ns" : "foo.bar",
- "o" : doc });
-
-assert.eq(100, master.getDB("foo").getCollection("bar").count(), "count before running mongooplog " +
- "was not 100 as expected");
-
-runMongoProgram("mongooplog" , "--from", "127.0.0.1:" + replTest.ports[0],
- "--host", replSetConnString);
-
-print("running mongooplog to replay the oplog")
-
-assert.eq(101, master.getDB("foo").getCollection("bar").count(), "count after running mongooplog " +
- "was not 101 as expected")
-
-print("all tests successful, stopping replica set")
-
-replTest.stopSet();
-
-print("replica set stopped, test complete")
diff --git a/src/mongo/gotools/test/legacy26/jstests/tool/tsv1.js b/src/mongo/gotools/test/legacy26/jstests/tool/tsv1.js
deleted file mode 100644
index 1b0ddbb7c9e..00000000000
--- a/src/mongo/gotools/test/legacy26/jstests/tool/tsv1.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// tsv1.js
-
-t = new ToolTest( "tsv1" )
-
-c = t.startDB( "foo" );
-
-base = { a : "", b : 1 , c : "foobar" , d: 5, e: -6 };
-
-t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "-f" , "a,b,c,d,e" );
-assert.soon( "2 == c.count()" , "restore 2" );
-
-a = c.find().sort( { a : 1 } ).toArray();
-delete a[0]._id
-delete a[1]._id
-
-assert.eq( { a : "a" , b : "b" , c : "c" , d: "d", e: "e"} , a[1] , "tsv parse 1" );
-assert.eq( base , a[0] , "tsv parse 0" )
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop 2" )
-
-t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "--headerline" )
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-
-x = c.findOne()
-delete x._id;
-assert.eq( base , x , "tsv parse 2" )
-
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy28/buildscripts/buildlogger.py b/src/mongo/gotools/test/legacy28/buildscripts/buildlogger.py
deleted file mode 100644
index d2466e495c0..00000000000
--- a/src/mongo/gotools/test/legacy28/buildscripts/buildlogger.py
+++ /dev/null
@@ -1,479 +0,0 @@
-"""
-buildlogger.py
-
-Wrap a command (specified on the command line invocation of buildlogger.py)
-and send output in batches to the buildlogs web application via HTTP POST.
-
-The script configures itself from environment variables:
-
- required env vars:
- MONGO_BUILDER_NAME (e.g. "Nightly Linux 64-bit")
- MONGO_BUILD_NUMBER (an integer)
- MONGO_TEST_FILENAME (not required when invoked with -g)
-
- optional env vars:
- MONGO_PHASE (e.g. "core", "slow nightly", etc)
- MONGO_* (any other environment vars are passed to the web app)
- BUILDLOGGER_CREDENTIALS (see below)
-
-This script has two modes: a "test" mode, intended to wrap the invocation of
-an individual test file, and a "global" mode, intended to wrap the mongod
-instances that run throughout the duration of a mongo test phase (the logs
-from "global" invocations are displayed interspersed with the logs of each
-test, in order to let the buildlogs web app display the full output sensibly.)
-
-If the BUILDLOGGER_CREDENTIALS environment variable is set, it should be a
-path to a valid Python file containing "username" and "password" variables,
-which should be valid credentials for authenticating to the buildlogger web
-app. For example:
-
- username = "hello"
- password = "world"
-
-If BUILDLOGGER_CREDENTIALS is a relative path, then the working directory
-and the directories one, two, and three levels up, are searched, in that
-order.
-"""
-
-import functools
-import os
-import os.path
-import re
-import signal
-import socket
-import subprocess
-import sys
-import time
-import traceback
-import urllib2
-import utils
-
-# suppress deprecation warnings that happen when
-# we import the 'buildbot.tac' file below
-import warnings
-warnings.simplefilter('ignore', DeprecationWarning)
-
-try:
- import json
-except:
- try:
- import simplejson as json
- except:
- json = None
-
-# try to load the shared secret from settings.py
-# which will be one, two, or three directories up
-# from this file's location
-credentials_file = os.environ.get('BUILDLOGGER_CREDENTIALS', 'buildbot.tac')
-credentials_loc, credentials_name = os.path.split(credentials_file)
-if not credentials_loc:
- here = os.path.abspath(os.path.dirname(__file__))
- possible_paths = [
- os.path.abspath(os.path.join(here, '..')),
- os.path.abspath(os.path.join(here, '..', '..')),
- os.path.abspath(os.path.join(here, '..', '..', '..')),
- ]
-else:
- possible_paths = [credentials_loc]
-
-username, password = None, None
-for path in possible_paths:
- credentials_path = os.path.join(path, credentials_name)
- if os.path.isfile(credentials_path):
- credentials = {}
- try:
- execfile(credentials_path, credentials, credentials)
- username = credentials.get('slavename', credentials.get('username'))
- password = credentials.get('passwd', credentials.get('password'))
- break
- except:
- pass
-
-
-URL_ROOT = os.environ.get('BUILDLOGGER_URL', 'http://buildlogs.mongodb.org/')
-TIMEOUT_SECONDS = 10
-socket.setdefaulttimeout(TIMEOUT_SECONDS)
-
-digest_handler = urllib2.HTTPDigestAuthHandler()
-digest_handler.add_password(
- realm='buildlogs',
- uri=URL_ROOT,
- user=username,
- passwd=password)
-
-# This version of HTTPErrorProcessor is copied from
-# Python 2.7, and allows REST response codes (e.g.
-# "201 Created") which are treated as errors by
-# older versions.
-class HTTPErrorProcessor(urllib2.HTTPErrorProcessor):
- def http_response(self, request, response):
- code, msg, hdrs = response.code, response.msg, response.info()
-
- # According to RFC 2616, "2xx" code indicates that the client's
- # request was successfully received, understood, and accepted.
- if not (200 <= code < 300):
- response = self.parent.error(
- 'http', request, response, code, msg, hdrs)
-
- return response
-
-url_opener = urllib2.build_opener(digest_handler, HTTPErrorProcessor())
-
-def url(endpoint):
- if not endpoint.endswith('/'):
- endpoint = '%s/' % endpoint
-
- return '%s/%s' % (URL_ROOT.rstrip('/'), endpoint)
-
-def post(endpoint, data, headers=None):
- data = json.dumps(data, encoding='utf-8')
-
- headers = headers or {}
- headers.update({'Content-Type': 'application/json; charset=utf-8'})
-
- req = urllib2.Request(url=url(endpoint), data=data, headers=headers)
- try:
- response = url_opener.open(req)
- except urllib2.URLError:
- import traceback
- traceback.print_exc(file=sys.stderr)
- sys.stderr.flush()
- # indicate that the request did not succeed
- return None
-
- response_headers = dict(response.info())
-
- # eg "Content-Type: application/json; charset=utf-8"
- content_type = response_headers.get('content-type')
- match = re.match(r'(?P<mimetype>[^;]+).*(?:charset=(?P<charset>[^ ]+))?$', content_type)
- if match and match.group('mimetype') == 'application/json':
- encoding = match.group('charset') or 'utf-8'
- return json.load(response, encoding=encoding)
-
- return response.read()
-
-def traceback_to_stderr(func):
- """
- decorator which logs any exceptions encountered to stderr
- and returns none.
- """
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- try:
- return func(*args, **kwargs)
- except urllib2.HTTPError, err:
- sys.stderr.write('error: HTTP code %d\n----\n' % err.code)
- if hasattr(err, 'hdrs'):
- for k, v in err.hdrs.items():
- sys.stderr.write("%s: %s\n" % (k, v))
- sys.stderr.write('\n')
- sys.stderr.write(err.read())
- sys.stderr.write('\n----\n')
- sys.stderr.flush()
- except:
- sys.stderr.write('Traceback from buildlogger:\n')
- traceback.print_exc(file=sys.stderr)
- sys.stderr.flush()
- return None
- return wrapper
-
-
-@traceback_to_stderr
-def get_or_create_build(builder, buildnum, extra={}):
- data = {'builder': builder, 'buildnum': buildnum}
- data.update(extra)
- response = post('build', data)
- if response is None:
- return None
- return response['id']
-
-@traceback_to_stderr
-def create_test(build_id, test_filename, test_command, test_phase):
- response = post('build/%s/test' % build_id, {
- 'test_filename': test_filename,
- 'command': test_command,
- 'phase': test_phase,
- })
- if response is None:
- return None
- return response['id']
-
-@traceback_to_stderr
-def append_test_logs(build_id, test_id, log_lines):
- response = post('build/%s/test/%s' % (build_id, test_id), data=log_lines)
- if response is None:
- return False
- return True
-
-@traceback_to_stderr
-def append_global_logs(build_id, log_lines):
- """
- "global" logs are for the mongod(s) started by smoke.py
- that last the duration of a test phase -- since there
- may be output in here that is important but spans individual
- tests, the buildlogs webapp handles these logs specially.
- """
- response = post('build/%s' % build_id, data=log_lines)
- if response is None:
- return False
- return True
-
-@traceback_to_stderr
-def finish_test(build_id, test_id, failed=False):
- response = post('build/%s/test/%s' % (build_id, test_id), data=[], headers={
- 'X-Sendlogs-Test-Done': 'true',
- 'X-Sendlogs-Test-Failed': failed and 'true' or 'false',
- })
- if response is None:
- return False
- return True
-
-def run_and_echo(command):
- """
- this just calls the command, and returns its return code,
- allowing stdout and stderr to work as normal. it is used
- as a fallback when environment variables or python
- dependencies cannot be configured, or when the logging
- webapp is unavailable, etc
- """
- proc = subprocess.Popen(command)
-
- def handle_sigterm(signum, frame):
- try:
- proc.send_signal(signum)
- except AttributeError:
- os.kill(proc.pid, signum)
- orig_handler = signal.signal(signal.SIGTERM, handle_sigterm)
-
- proc.wait()
-
- signal.signal(signal.SIGTERM, orig_handler)
- return proc.returncode
-
-class LogAppender(object):
- def __init__(self, callback, args, send_after_lines=2000, send_after_seconds=10):
- self.callback = callback
- self.callback_args = args
-
- self.send_after_lines = send_after_lines
- self.send_after_seconds = send_after_seconds
-
- self.buf = []
- self.retrybuf = []
- self.last_sent = time.time()
-
- def __call__(self, line):
- self.buf.append((time.time(), line))
-
- delay = time.time() - self.last_sent
- if len(self.buf) >= self.send_after_lines or delay >= self.send_after_seconds:
- self.submit()
-
- # no return value is expected
-
- def submit(self):
- if len(self.buf) + len(self.retrybuf) == 0:
- return True
-
- args = list(self.callback_args)
- args.append(list(self.buf) + self.retrybuf)
-
- self.last_sent = time.time()
-
- if self.callback(*args):
- self.buf = []
- self.retrybuf = []
- return True
- else:
- self.retrybuf += self.buf
- self.buf = []
- return False
-
-
-def wrap_test(command):
- """
- call the given command, intercept its stdout and stderr,
- and send results in batches of 100 lines or 10s to the
- buildlogger webapp
- """
-
- # get builder name and build number from environment
- builder = os.environ.get('MONGO_BUILDER_NAME')
- buildnum = os.environ.get('MONGO_BUILD_NUMBER')
-
- if builder is None or buildnum is None:
- return run_and_echo(command)
-
- try:
- buildnum = int(buildnum)
- except ValueError:
- sys.stderr.write('buildlogger: build number ("%s") was not an int\n' % buildnum)
- sys.stderr.flush()
- return run_and_echo(command)
-
- # test takes some extra info
- phase = os.environ.get('MONGO_PHASE', 'unknown')
- test_filename = os.environ.get('MONGO_TEST_FILENAME', 'unknown')
-
- build_info = dict((k, v) for k, v in os.environ.items() if k.startswith('MONGO_'))
- build_info.pop('MONGO_BUILDER_NAME', None)
- build_info.pop('MONGO_BUILD_NUMBER', None)
- build_info.pop('MONGO_PHASE', None)
- build_info.pop('MONGO_TEST_FILENAME', None)
-
- build_id = get_or_create_build(builder, buildnum, extra=build_info)
- if not build_id:
- return run_and_echo(command)
-
- test_id = create_test(build_id, test_filename, ' '.join(command), phase)
- if not test_id:
- return run_and_echo(command)
-
- # the peculiar formatting here matches what is printed by
- # smoke.py when starting tests
- output_url = '%s/build/%s/test/%s/' % (URL_ROOT.rstrip('/'), build_id, test_id)
- sys.stdout.write(' (output suppressed; see %s)\n' % output_url)
- sys.stdout.flush()
-
- callback = LogAppender(callback=append_test_logs, args=(build_id, test_id))
- returncode = loop_and_callback(command, callback)
- failed = bool(returncode != 0)
-
- # this will append any remaining unsubmitted logs, or
- # return True if there are none left to submit
- tries = 5
- while not callback.submit() and tries > 0:
- sys.stderr.write('failed to finish sending test logs, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- tries = 5
- while not finish_test(build_id, test_id, failed) and tries > 5:
- sys.stderr.write('failed to mark test finished, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- return returncode
-
-def wrap_global(command):
- """
- call the given command, intercept its stdout and stderr,
- and send results in batches of 100 lines or 10s to the
- buildlogger webapp. see :func:`append_global_logs` for the
- difference between "global" and "test" log output.
- """
-
- # get builder name and build number from environment
- builder = os.environ.get('MONGO_BUILDER_NAME')
- buildnum = os.environ.get('MONGO_BUILD_NUMBER')
-
- if builder is None or buildnum is None:
- return run_and_echo(command)
-
- try:
- buildnum = int(buildnum)
- except ValueError:
- sys.stderr.write('int(os.environ["MONGO_BUILD_NUMBER"]):\n')
- sys.stderr.write(traceback.format_exc())
- sys.stderr.flush()
- return run_and_echo(command)
-
- build_info = dict((k, v) for k, v in os.environ.items() if k.startswith('MONGO_'))
- build_info.pop('MONGO_BUILDER_NAME', None)
- build_info.pop('MONGO_BUILD_NUMBER', None)
-
- build_id = get_or_create_build(builder, buildnum, extra=build_info)
- if not build_id:
- return run_and_echo(command)
-
- callback = LogAppender(callback=append_global_logs, args=(build_id, ))
- returncode = loop_and_callback(command, callback)
-
- # this will append any remaining unsubmitted logs, or
- # return True if there are none left to submit
- tries = 5
- while not callback.submit() and tries > 0:
- sys.stderr.write('failed to finish sending global logs, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- return returncode
-
-def loop_and_callback(command, callback):
- """
- run the given command (a sequence of arguments, ordinarily
- from sys.argv), and call the given callback with each line
- of stdout or stderr encountered. after the command is finished,
- callback is called once more with None instead of a string.
- """
- proc = subprocess.Popen(
- command,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- )
-
- def handle_sigterm(signum, frame):
- try:
- proc.send_signal(signum)
- except AttributeError:
- os.kill(proc.pid, signum)
-
- # register a handler to delegate SIGTERM
- # to the child process
- orig_handler = signal.signal(signal.SIGTERM, handle_sigterm)
-
- while proc.poll() is None:
- try:
- line = proc.stdout.readline().strip('\r\n')
- line = utils.unicode_dammit(line)
- callback(line)
- except IOError:
- # if the signal handler is called while
- # we're waiting for readline() to return,
- # don't show a traceback
- break
-
- # There may be additional buffered output
- for line in proc.stdout.readlines():
- callback(line.strip('\r\n'))
-
- # restore the original signal handler, if any
- signal.signal(signal.SIGTERM, orig_handler)
- return proc.returncode
-
-
-if __name__ == '__main__':
- # argv[0] is 'buildlogger.py'
- del sys.argv[0]
-
- if sys.argv[0] in ('-g', '--global'):
- # then this is wrapping a "global" command, and should
- # submit global logs to the build, not test logs to a
- # test within the build
- del sys.argv[0]
- wrapper = wrap_global
-
- else:
- wrapper = wrap_test
-
- # if we are missing credentials or the json module, then
- # we can't use buildlogger; so just echo output, but also
- # log why we can't work.
- if json is None:
- sys.stderr.write('buildlogger: could not import a json module\n')
- sys.stderr.flush()
- wrapper = run_and_echo
-
- elif username is None or password is None:
- sys.stderr.write('buildlogger: could not find or import %s for authentication\n' % credentials_file)
- sys.stderr.flush()
- wrapper = run_and_echo
-
- # otherwise wrap a test command as normal; the
- # wrapper functions return the return code of
- # the wrapped command, so that should be our
- # exit code as well.
- sys.exit(wrapper(sys.argv))
diff --git a/src/mongo/gotools/test/legacy28/buildscripts/cleanbb.py b/src/mongo/gotools/test/legacy28/buildscripts/cleanbb.py
deleted file mode 100644
index fee7efdc0c1..00000000000
--- a/src/mongo/gotools/test/legacy28/buildscripts/cleanbb.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-
-import re
-import sys
-import os, os.path
-import utils
-import time
-from optparse import OptionParser
-
-def shouldKill( c, root=None ):
-
- if "smoke.py" in c:
- return False
-
- if "emr.py" in c:
- return False
-
- if "java" in c:
- return False
-
- # if root directory is provided, see if command line matches mongod process running
- # with the same data directory
-
- if root and re.compile("(\W|^)mongod(.exe)?\s+.*--dbpath(\s+|=)%s(\s+|$)" % root).search( c ):
- return True
-
- if ( c.find( "buildbot" ) >= 0 or c.find( "slave" ) >= 0 ) and c.find( "/mongo/" ) >= 0:
- return True
-
- if c.find( "xml-data/build-dir" ) >= 0: # for bamboo
- return True
-
- return False
-
-def killprocs( signal="", root=None ):
- killed = 0
-
- if sys.platform == 'win32':
- return killed
-
- l = utils.getprocesslist()
- print( "num procs:" + str( len( l ) ) )
- if len(l) == 0:
- print( "no procs" )
- try:
- print( execsys( "/sbin/ifconfig -a" ) )
- except Exception,e:
- print( "can't get interfaces" + str( e ) )
-
- for x in l:
- x = x.lstrip()
- if not shouldKill( x, root=root ):
- continue
-
- pid = x.split( " " )[0]
- print( "killing: " + x )
- utils.execsys( "/bin/kill " + signal + " " + pid )
- killed = killed + 1
-
- return killed
-
-
-def tryToRemove(path):
- for _ in range(60):
- try:
- os.remove(path)
- return True
- except OSError, e:
- errno = getattr(e, 'winerror', None)
- # check for the access denied and file in use WindowsErrors
- if errno in (5, 32):
- print("os.remove(%s) failed, retrying in one second." % path)
- time.sleep(1)
- else:
- raise e
- return False
-
-
-def cleanup( root , nokill ):
- if nokill:
- print "nokill requested, not killing anybody"
- else:
- if killprocs( root=root ) > 0:
- time.sleep(3)
- killprocs( "-9", root=root )
-
- # delete all regular files, directories can stay
- # NOTE: if we delete directories later, we can't delete diskfulltest
- for ( dirpath , dirnames , filenames ) in os.walk( root , topdown=False ):
- for x in filenames:
- foo = dirpath + "/" + x
- if os.path.exists(foo):
- if not tryToRemove(foo):
- raise Exception("Couldn't remove file '%s' after 60 seconds" % foo)
-
-if __name__ == "__main__":
- parser = OptionParser(usage="read the script")
- parser.add_option("--nokill", dest='nokill', default=False, action='store_true')
- (options, args) = parser.parse_args()
-
- root = "/data/db/"
- if len(args) > 0:
- root = args[0]
-
- cleanup( root , options.nokill )
diff --git a/src/mongo/gotools/test/legacy28/buildscripts/smoke.py b/src/mongo/gotools/test/legacy28/buildscripts/smoke.py
deleted file mode 100755
index cd06b315664..00000000000
--- a/src/mongo/gotools/test/legacy28/buildscripts/smoke.py
+++ /dev/null
@@ -1,1447 +0,0 @@
-#!/usr/bin/env python
-
-# smoke.py: run some mongo tests.
-
-# Bugs, TODOs:
-
-# 0 Some tests hard-code pathnames relative to the mongo repository,
-# so the smoke.py process and all its children must be run with the
-# mongo repo as current working directory. That's kinda icky.
-
-# 1 The tests that are implemented as standalone executables ("test"),
-# don't take arguments for the dbpath, but unconditionally use
-# "/tmp/unittest".
-
-# 2 mongod output gets intermingled with mongo output, and it's often
-# hard to find error messages in the slop. Maybe have smoke.py do
-# some fancier wrangling of child process output?
-
-# 3 Some test suites run their own mongods, and so don't need us to
-# run any mongods around their execution. (It's harmless to do so,
-# but adds noise in the output.)
-
-# 4 Running a separate mongo shell for each js file is slower than
-# loading js files into one mongo shell process. Maybe have runTest
-# queue up all filenames ending in ".js" and run them in one mongo
-# shell at the "end" of testing?
-
-# 5 Right now small-oplog implies master/slave replication. Maybe
-# running with replication should be an orthogonal concern. (And
-# maybe test replica set replication, too.)
-
-# 6 We use cleanbb.py to clear out the dbpath, but cleanbb.py kills
-# off all mongods on a box, which means you can't run two smoke.py
-# jobs on the same host at once. So something's gotta change.
-
-from datetime import datetime
-from itertools import izip
-import glob
-from optparse import OptionParser
-import os
-import pprint
-import re
-import shlex
-import signal
-import socket
-import stat
-from subprocess import (PIPE, Popen, STDOUT)
-import sys
-import time
-import threading
-import traceback
-
-from pymongo import MongoClient
-from pymongo.errors import OperationFailure
-from pymongo import ReadPreference
-
-import cleanbb
-import smoke
-import utils
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-try:
- from hashlib import md5 # new in 2.5
-except ImportError:
- from md5 import md5 # deprecated in 2.5
-
-try:
- import json
-except:
- try:
- import simplejson as json
- except:
- json = None
-
-
-# TODO clean this up so we don't need globals...
-mongo_repo = os.getcwd() #'./'
-failfile = os.path.join(mongo_repo, 'failfile.smoke')
-test_path = None
-mongod_executable = None
-mongod_port = None
-shell_executable = None
-continue_on_failure = None
-file_of_commands_mode = False
-start_mongod = True
-temp_path = None
-clean_every_n_tests = 1
-clean_whole_dbroot = False
-
-tests = []
-winners = []
-losers = {}
-fails = [] # like losers but in format of tests
-
-# For replication hash checking
-replicated_collections = []
-lost_in_slave = []
-lost_in_master = []
-screwy_in_slave = {}
-
-smoke_db_prefix = ''
-small_oplog = False
-small_oplog_rs = False
-
-test_report = { "results": [] }
-report_file = None
-
-# This class just implements the with statement API
-class NullMongod(object):
- def start(self):
- pass
-
- def stop(self):
- pass
-
- def __enter__(self):
- self.start()
- return self
-
- def __exit__(self, type, value, traceback):
- self.stop()
- return not isinstance(value, Exception)
-
-
-def dump_stacks(signal, frame):
- print "======================================"
- print "DUMPING STACKS due to SIGUSR1 signal"
- print "======================================"
- threads = threading.enumerate();
-
- print "Total Threads: " + str(len(threads))
-
- for id, stack in sys._current_frames().items():
- print "Thread %d" % (id)
- print "".join(traceback.format_stack(stack))
- print "======================================"
-
-
-def buildlogger(cmd, is_global=False):
- # if the environment variable MONGO_USE_BUILDLOGGER
- # is set to 'true', then wrap the command with a call
- # to buildlogger.py, which sends output to the buidlogger
- # machine; otherwise, return as usual.
- if os.environ.get('MONGO_USE_BUILDLOGGER', '').lower().strip() == 'true':
- if is_global:
- return [utils.find_python(), 'buildscripts/buildlogger.py', '-g'] + cmd
- else:
- return [utils.find_python(), 'buildscripts/buildlogger.py'] + cmd
- return cmd
-
-
-def clean_dbroot(dbroot="", nokill=False):
- # Clean entire /data/db dir if --with-cleanbb, else clean specific database path.
- if clean_whole_dbroot and not small_oplog:
- dbroot = os.path.normpath(smoke_db_prefix + "/data/db")
- if os.path.exists(dbroot):
- print("clean_dbroot: %s" % dbroot)
- cleanbb.cleanup(dbroot, nokill)
-
-
-class mongod(NullMongod):
- def __init__(self, **kwargs):
- self.kwargs = kwargs
- self.proc = None
- self.auth = False
-
- def ensure_test_dirs(self):
- utils.ensureDir(smoke_db_prefix + "/tmp/unittest/")
- utils.ensureDir(smoke_db_prefix + "/data/")
- utils.ensureDir(smoke_db_prefix + "/data/db/")
-
- def check_mongo_port(self, port=27017):
- sock = socket.socket()
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- sock.settimeout(1)
- sock.connect(("localhost", int(port)))
- sock.close()
-
- def is_mongod_up(self, port=mongod_port):
- if not start_mongod:
- return False
- try:
- self.check_mongo_port(int(port))
- return True
- except Exception,e:
- print >> sys.stderr, e
- return False
-
- def did_mongod_start(self, port=mongod_port, timeout=300):
- while timeout > 0:
- time.sleep(1)
- is_up = self.is_mongod_up(port)
- if is_up:
- return True
- timeout = timeout - 1
- print >> sys.stderr, "timeout starting mongod"
- return False
-
- def start(self):
- global mongod_port
- global mongod
- if self.proc:
- print >> sys.stderr, "probable bug: self.proc already set in start()"
- return
- self.ensure_test_dirs()
- dir_name = smoke_db_prefix + "/data/db/sconsTests/"
- self.port = int(mongod_port)
- self.slave = False
- if 'slave' in self.kwargs:
- dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/'
- srcport = mongod_port
- self.port += 1
- self.slave = True
-
- clean_dbroot(dbroot=dir_name, nokill=self.slave)
- utils.ensureDir(dir_name)
-
- argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name]
- # These parameters are always set for tests
- # SERVER-9137 Added httpinterface parameter to keep previous behavior
- argv += ['--setParameter', 'enableTestCommands=1', '--httpinterface']
- if self.kwargs.get('small_oplog'):
- argv += ["--master", "--oplogSize", "511"]
- if self.kwargs.get('storage_engine'):
- argv += ["--storageEngine", self.kwargs.get('storage_engine')]
- if self.kwargs.get('wiredtiger_engine_config'):
- argv += ["--wiredTigerEngineConfig", self.kwargs.get('wiredtiger_engine_config')]
- if self.kwargs.get('wiredtiger_collection_config'):
- argv += ["--wiredTigerCollectionConfig", self.kwargs.get('wiredtiger_collection_config')]
- if self.kwargs.get('wiredtiger_index_config'):
- argv += ["--wiredTigerIndexConfig", self.kwargs.get('wiredtiger_index_config')]
- params = self.kwargs.get('set_parameters', None)
- if params:
- for p in params.split(','): argv += ['--setParameter', p]
- if self.kwargs.get('small_oplog_rs'):
- argv += ["--replSet", "foo", "--oplogSize", "511"]
- if self.slave:
- argv += ['--slave', '--source', 'localhost:' + str(srcport)]
- if self.kwargs.get('no_journal'):
- argv += ['--nojournal']
- if self.kwargs.get('no_preallocj'):
- argv += ['--nopreallocj']
- if self.kwargs.get('auth'):
- argv += ['--auth', '--setParameter', 'enableLocalhostAuthBypass=false']
- authMechanism = self.kwargs.get('authMechanism', 'SCRAM-SHA-1')
- if authMechanism != 'SCRAM-SHA-1':
- argv += ['--setParameter', 'authenticationMechanisms=' + authMechanism]
- self.auth = True
- if self.kwargs.get('keyFile'):
- argv += ['--keyFile', self.kwargs.get('keyFile')]
- if self.kwargs.get('use_ssl') or self.kwargs.get('use_x509'):
- argv += ['--sslMode', "requireSSL",
- '--sslPEMKeyFile', 'jstests/libs/server.pem',
- '--sslCAFile', 'jstests/libs/ca.pem',
- '--sslWeakCertificateValidation']
- if self.kwargs.get('use_x509'):
- argv += ['--clusterAuthMode','x509'];
- self.auth = True
- print "running " + " ".join(argv)
- self.proc = self._start(buildlogger(argv, is_global=True))
-
- if not self.did_mongod_start(self.port):
- raise Exception("Failed to start mongod")
-
- if self.slave:
- local = MongoClient(port=self.port,
- read_preference=ReadPreference.SECONDARY_PREFERRED).local
- synced = False
- while not synced:
- synced = True
- for source in local.sources.find(fields=["syncedTo"]):
- synced = synced and "syncedTo" in source and source["syncedTo"]
-
- def _start(self, argv):
- """In most cases, just call subprocess.Popen(). On windows,
- add the started process to a new Job Object, so that any
- child processes of this process can be killed with a single
- call to TerminateJobObject (see self.stop()).
- """
-
- if os.sys.platform == "win32":
- # Create a job object with the "kill on job close"
- # flag; this is inherited by child processes (ie
- # the mongod started on our behalf by buildlogger)
- # and lets us terminate the whole tree of processes
- # rather than orphaning the mongod.
- import win32job
-
- # Magic number needed to allow job reassignment in Windows 7
- # see: MSDN - Process Creation Flags - ms684863
- CREATE_BREAKAWAY_FROM_JOB = 0x01000000
-
- proc = Popen(argv, creationflags=CREATE_BREAKAWAY_FROM_JOB)
-
- self.job_object = win32job.CreateJobObject(None, '')
-
- job_info = win32job.QueryInformationJobObject(
- self.job_object, win32job.JobObjectExtendedLimitInformation)
- job_info['BasicLimitInformation']['LimitFlags'] |= win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
- win32job.SetInformationJobObject(
- self.job_object,
- win32job.JobObjectExtendedLimitInformation,
- job_info)
-
- win32job.AssignProcessToJobObject(self.job_object, proc._handle)
-
- else:
- proc = Popen(argv)
-
- return proc
-
- def stop(self):
- if not self.proc:
- print >> sys.stderr, "probable bug: self.proc unset in stop()"
- return
- try:
- if os.sys.platform == "win32":
- import win32job
- win32job.TerminateJobObject(self.job_object, -1)
- # Windows doesn't seem to kill the process immediately, so give it some time to die
- time.sleep(5)
- elif hasattr(self.proc, "terminate"):
- # This method added in Python 2.6
- self.proc.terminate()
- else:
- os.kill(self.proc.pid, 15)
- except Exception, e:
- print >> sys.stderr, "error shutting down mongod"
- print >> sys.stderr, e
- self.proc.wait()
- sys.stderr.flush()
- sys.stdout.flush()
-
- # Fail hard if mongod terminates with an error. That might indicate that an
- # instrumented build (e.g. LSAN) has detected an error. For now we aren't doing this on
- # windows because the exit code seems to be unpredictable. We don't have LSAN there
- # anyway.
- retcode = self.proc.returncode
- if os.sys.platform != "win32" and retcode != 0:
- raise(Exception('mongod process exited with non-zero code %d' % retcode))
-
- def wait_for_repl(self):
- print "Awaiting replicated (w:2, wtimeout:5min) insert (port:" + str(self.port) + ")"
- MongoClient(port=self.port).testing.smokeWait.insert({}, w=2, wtimeout=5*60*1000)
- print "Replicated write completed -- done wait_for_repl"
-
-class Bug(Exception):
- def __str__(self):
- return 'bug in smoke.py: ' + super(Bug, self).__str__()
-
-class TestFailure(Exception):
- pass
-
-class TestExitFailure(TestFailure):
- def __init__(self, *args):
- self.path = args[0]
- self.status=args[1]
-
- def __str__(self):
- return "test %s exited with status %d" % (self.path, self.status)
-
-class TestServerFailure(TestFailure):
- def __init__(self, *args):
- self.path = args[0]
- self.status = -1 # this is meaningless as an exit code, but
- # that's the point.
- def __str__(self):
- return 'mongod not running after executing test %s' % self.path
-
-def check_db_hashes(master, slave):
- # Need to pause a bit so a slave might catch up...
- if not slave.slave:
- raise(Bug("slave instance doesn't have slave attribute set"))
-
- print "waiting for slave (%s) to catch up to master (%s)" % (slave.port, master.port)
- master.wait_for_repl()
- print "caught up!"
-
- # FIXME: maybe make this run dbhash on all databases?
- for mongod in [master, slave]:
- client = MongoClient(port=mongod.port, read_preference=ReadPreference.SECONDARY_PREFERRED)
- mongod.dbhash = client.test.command("dbhash")
- mongod.dict = mongod.dbhash["collections"]
-
- global lost_in_slave, lost_in_master, screwy_in_slave, replicated_collections
-
- replicated_collections += master.dict.keys()
-
- for coll in replicated_collections:
- if coll not in slave.dict and coll not in lost_in_slave:
- lost_in_slave.append(coll)
- mhash = master.dict[coll]
- shash = slave.dict[coll]
- if mhash != shash:
- mTestDB = MongoClient(port=master.port).test
- sTestDB = MongoClient(port=slave.port,
- read_preference=ReadPreference.SECONDARY_PREFERRED).test
- mCount = mTestDB[coll].count()
- sCount = sTestDB[coll].count()
- stats = {'hashes': {'master': mhash, 'slave': shash},
- 'counts':{'master': mCount, 'slave': sCount}}
- try:
- mDocs = list(mTestDB[coll].find().sort("_id", 1))
- sDocs = list(sTestDB[coll].find().sort("_id", 1))
- mDiffDocs = list()
- sDiffDocs = list()
- for left, right in izip(mDocs, sDocs):
- if left != right:
- mDiffDocs.append(left)
- sDiffDocs.append(right)
-
- stats["docs"] = {'master': mDiffDocs, 'slave': sDiffDocs }
- except Exception, e:
- stats["error-docs"] = e;
-
- screwy_in_slave[coll] = stats
- if mhash == "no _id _index":
- mOplog = mTestDB.connection.local["oplog.$main"];
- oplog_entries = list(mOplog.find({"$or": [{"ns":mTestDB[coll].full_name}, \
- {"op":"c"}]}).sort("$natural", 1))
- print "oplog for %s" % mTestDB[coll].full_name
- for doc in oplog_entries:
- pprint.pprint(doc, width=200)
-
-
- for db in slave.dict.keys():
- if db not in master.dict and db not in lost_in_master:
- lost_in_master.append(db)
-
-
-def ternary( b , l="true", r="false" ):
- if b:
- return l
- return r
-
-# Blech.
-def skipTest(path):
- basename = os.path.basename(path)
- parentPath = os.path.dirname(path)
- parentDir = os.path.basename(parentPath)
- if small_oplog: # For tests running in parallel
- if basename in ["cursor8.js", "indexh.js", "dropdb.js", "dropdb_race.js",
- "connections_opened.js", "opcounters_write_cmd.js", "dbadmin.js"]:
- return True
- if use_ssl:
- # Skip tests using mongobridge since it does not support SSL
- # TODO: Remove when SERVER-10910 has been resolved.
- if basename in ["gridfs.js", "initial_sync3.js", "majority.js", "no_chaining.js",
- "rollback4.js", "slavedelay3.js", "sync2.js", "tags.js"]:
- return True
- # TODO: For now skip tests using MongodRunner, remove when SERVER-10909 has been resolved
- if basename in ["fastsync.js", "index_retry.js", "ttl_repl_maintenance.js",
- "unix_socket1.js"]:
- return True;
- if auth or keyFile or use_x509: # For tests running with auth
- # Skip any tests that run with auth explicitly
- if parentDir.lower() == "auth" or "auth" in basename.lower():
- return True
- if parentPath == mongo_repo: # Skip client tests
- return True
- if parentDir == "tool": # SERVER-6368
- return True
- if parentDir == "dur": # SERVER-7317
- return True
- if parentDir == "disk": # SERVER-7356
- return True
-
- authTestsToSkip = [("jstests", "drop2.js"), # SERVER-8589,
- ("jstests", "killop.js"), # SERVER-10128
- ("sharding", "sync3.js"), # SERVER-6388 for this and those below
- ("sharding", "sync6.js"),
- ("sharding", "parallel.js"),
- ("sharding", "copydb_from_mongos.js"), # SERVER-13080
- ("jstests", "bench_test1.js"),
- ("jstests", "bench_test2.js"),
- ("jstests", "bench_test3.js"),
- ("core", "bench_test1.js"),
- ("core", "bench_test2.js"),
- ("core", "bench_test3.js"),
- ]
-
- if os.path.join(parentDir,basename) in [ os.path.join(*test) for test in authTestsToSkip ]:
- return True
-
- return False
-
-legacyWriteRE = re.compile(r"jstests[/\\]multiVersion")
-def setShellWriteModeForTest(path, argv):
- swm = shell_write_mode
- if legacyWriteRE.search(path):
- swm = "legacy"
- argv += ["--writeMode", swm]
-
-def runTest(test, result):
- # result is a map containing test result details, like result["url"]
-
- # test is a tuple of ( filename , usedb<bool> )
- # filename should be a js file to run
- # usedb is true if the test expects a mongod to be running
-
- (path, usedb) = test
- (ignore, ext) = os.path.splitext(path)
- test_mongod = mongod()
- mongod_is_up = test_mongod.is_mongod_up(mongod_port)
- result["mongod_running_at_start"] = mongod_is_up;
-
- if file_of_commands_mode:
- # smoke.py was invoked like "--mode files --from-file foo",
- # so don't try to interpret the test path too much
- if os.sys.platform == "win32":
- argv = [path]
- else:
- argv = shlex.split(path)
- path = argv[0]
- # if the command is a python script, use the script name
- if os.path.basename(path) in ('python', 'python.exe'):
- path = argv[1]
- elif ext == ".js":
- argv = [shell_executable, "--port", mongod_port, '--authenticationMechanism', authMechanism]
-
- setShellWriteModeForTest(path, argv)
-
- if not usedb:
- argv += ["--nodb"]
- if small_oplog or small_oplog_rs:
- argv += ["--eval", 'testingReplication = true;']
- if use_ssl:
- argv += ["--ssl",
- "--sslPEMKeyFile", "jstests/libs/client.pem",
- "--sslCAFile", "jstests/libs/ca.pem",
- "--sslAllowInvalidCertificates"]
- argv += [path]
- elif ext in ["", ".exe"]:
- # Blech.
- if os.path.basename(path) in ["dbtest", "dbtest.exe"]:
- argv = [path]
- # default data directory for dbtest is /tmp/unittest
- if smoke_db_prefix:
- dir_name = smoke_db_prefix + '/unittests'
- argv.extend(["--dbpath", dir_name] )
-
- if storage_engine:
- argv.extend(["--storageEngine", storage_engine])
- if wiredtiger_engine_config:
- argv.extend(["--wiredTigerEngineConfig", wiredtiger_engine_config])
- if wiredtiger_collection_config:
- argv.extend(["--wiredTigerCollectionConfig", wiredtiger_collection_config])
- if wiredtiger_index_config:
- argv.extend(["--wiredTigerIndexConfig", wiredtiger_index_config])
-
- # more blech
- elif os.path.basename(path) in ['mongos', 'mongos.exe']:
- argv = [path, "--test"]
- else:
- argv = [test_path and os.path.abspath(os.path.join(test_path, path)) or path,
- "--port", mongod_port]
- else:
- raise Bug("fell off in extension case: %s" % path)
-
- mongo_test_filename = os.path.basename(path)
-
- # sys.stdout.write() is more atomic than print, so using it prevents
- # lines being interrupted by, e.g., child processes
- sys.stdout.write(" *******************************************\n")
- sys.stdout.write(" Test : %s ...\n" % mongo_test_filename)
- sys.stdout.flush()
-
- # FIXME: we don't handle the case where the subprocess
- # hangs... that's bad.
- if ( argv[0].endswith( 'mongo' ) or argv[0].endswith( 'mongo.exe' ) ) and not '--eval' in argv :
- evalString = 'load("jstests/libs/servers.js");load("jstests/libs/servers_misc.js");' +\
- 'TestData = new Object();' + \
- 'TestData.storageEngine = "' + ternary( storage_engine, storage_engine, "" ) + '";' + \
- 'TestData.wiredTigerEngineConfig = "' + ternary( wiredtiger_engine_config, wiredtiger_engine_config, "" ) + '";' + \
- 'TestData.wiredTigerCollectionConfig = "' + ternary( wiredtiger_collection_config, wiredtiger_collection_config, "" ) + '";' + \
- 'TestData.wiredTigerIndexConfig = "' + ternary( wiredtiger_index_config, wiredtiger_index_config, "" ) + '";' + \
- 'TestData.testPath = "' + path + '";' + \
- 'TestData.testFile = "' + os.path.basename( path ) + '";' + \
- 'TestData.testName = "' + re.sub( ".js$", "", os.path.basename( path ) ) + '";' + \
- 'TestData.setParameters = "' + ternary( set_parameters, set_parameters, "" ) + '";' + \
- 'TestData.setParametersMongos = "' + ternary( set_parameters_mongos, set_parameters_mongos, "" ) + '";' + \
- 'TestData.noJournal = ' + ternary( no_journal ) + ";" + \
- 'TestData.noJournalPrealloc = ' + ternary( no_preallocj ) + ";" + \
- 'TestData.auth = ' + ternary( auth ) + ";" + \
- 'TestData.keyFile = ' + ternary( keyFile , '"' + str(keyFile) + '"' , 'null' ) + ";" + \
- 'TestData.keyFileData = ' + ternary( keyFile , '"' + str(keyFileData) + '"' , 'null' ) + ";" + \
- 'TestData.authMechanism = ' + ternary( authMechanism,
- '"' + str(authMechanism) + '"', 'null') + ";" + \
- 'TestData.useSSL = ' + ternary( use_ssl ) + ";" + \
- 'TestData.useX509 = ' + ternary( use_x509 ) + ";"
- # this updates the default data directory for mongod processes started through shell (src/mongo/shell/servers.js)
- evalString += 'MongoRunner.dataDir = "' + os.path.abspath(smoke_db_prefix + '/data/db') + '";'
- evalString += 'MongoRunner.dataPath = MongoRunner.dataDir + "/";'
- if temp_path:
- evalString += 'TestData.tmpPath = "' + temp_path + '";'
- if os.sys.platform == "win32":
- # double quotes in the evalString on windows; this
- # prevents the backslashes from being removed when
- # the shell (i.e. bash) evaluates this string. yuck.
- evalString = evalString.replace('\\', '\\\\')
-
- if auth and usedb:
- evalString += 'jsTest.authenticate(db.getMongo());'
-
- if os.getenv('SMOKE_EVAL') is not None:
- evalString += os.getenv('SMOKE_EVAL')
-
- argv = argv + [ '--eval', evalString]
-
-
- if argv[0].endswith( 'dbtest' ) or argv[0].endswith( 'dbtest.exe' ):
- if no_preallocj :
- argv = argv + [ '--nopreallocj' ]
- if temp_path:
- argv = argv + [ '--tempPath', temp_path ]
-
-
- sys.stdout.write(" Command : %s\n" % ' '.join(argv))
- sys.stdout.write(" Date : %s\n" % datetime.now().ctime())
- sys.stdout.flush()
-
- os.environ['MONGO_TEST_FILENAME'] = mongo_test_filename
- t1 = time.time()
-
- proc = Popen(buildlogger(argv), cwd=test_path, stdout=PIPE, stderr=STDOUT, bufsize=0)
- first_line = proc.stdout.readline() # Get suppressed output URL
- m = re.search(r"\s*\(output suppressed; see (?P<url>.*)\)" + os.linesep, first_line)
- if m:
- result["url"] = m.group("url")
- sys.stdout.write(first_line)
- sys.stdout.flush()
- while True:
- # print until subprocess's stdout closed.
- # Not using "for line in file" since that has unwanted buffering.
- line = proc.stdout.readline()
- if not line:
- break;
-
- sys.stdout.write(line)
- sys.stdout.flush()
-
- proc.wait() # wait if stdout is closed before subprocess exits.
- r = proc.returncode
-
- t2 = time.time()
- del os.environ['MONGO_TEST_FILENAME']
-
- timediff = t2 - t1
- # timediff is seconds by default
- scale = 1
- suffix = "seconds"
- # if timediff is less than 10 seconds use ms
- if timediff < 10:
- scale = 1000
- suffix = "ms"
- # if timediff is more than 60 seconds use minutes
- elif timediff > 60:
- scale = 1.0 / 60.0
- suffix = "minutes"
- sys.stdout.write(" %10.4f %s\n" % ((timediff) * scale, suffix))
- sys.stdout.flush()
-
- result["exit_code"] = r
-
-
- is_mongod_still_up = test_mongod.is_mongod_up(mongod_port)
- if start_mongod and not is_mongod_still_up:
- print "mongod is not running after test"
- result["mongod_running_at_end"] = is_mongod_still_up;
- raise TestServerFailure(path)
-
- result["mongod_running_at_end"] = is_mongod_still_up;
-
- if r != 0:
- raise TestExitFailure(path, r)
-
- print ""
-
-def run_tests(tests):
- # FIXME: some suites of tests start their own mongod, so don't
- # need this. (So long as there are no conflicts with port,
- # dbpath, etc., and so long as we shut ours down properly,
- # starting this mongod shouldn't break anything, though.)
-
- # The reason we want to use "with" is so that we get __exit__ semantics
- # but "with" is only supported on Python 2.5+
-
- master = NullMongod()
- slave = NullMongod()
-
- try:
- if start_mongod:
- master = mongod(small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- storage_engine=storage_engine,
- wiredtiger_engine_config=wiredtiger_engine_config,
- wiredtiger_collection_config=wiredtiger_collection_config,
- wiredtiger_index_config=wiredtiger_index_config,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509)
- master.start()
-
- if small_oplog:
- slave = mongod(slave=True,
- storage_engine=storage_engine,
- wiredtiger_engine_config=wiredtiger_engine_config,
- wiredtiger_collection_config=wiredtiger_collection_config,
- wiredtiger_index_config=wiredtiger_index_config,
- set_parameters=set_parameters)
- slave.start()
- elif small_oplog_rs:
- slave = mongod(slave=True,
- small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- storage_engine=storage_engine,
- wiredtiger_engine_config=wiredtiger_engine_config,
- wiredtiger_collection_config=wiredtiger_collection_config,
- wiredtiger_index_config=wiredtiger_index_config,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509)
- slave.start()
- primary = MongoClient(port=master.port);
-
- primary.admin.command({'replSetInitiate' : {'_id' : 'foo', 'members' : [
- {'_id': 0, 'host':'localhost:%s' % master.port},
- {'_id': 1, 'host':'localhost:%s' % slave.port,'priority':0}]}})
-
- ismaster = False
- while not ismaster:
- result = primary.admin.command("ismaster");
- ismaster = result["ismaster"]
- time.sleep(1)
-
- if small_oplog or small_oplog_rs:
- master.wait_for_repl()
-
- for tests_run, test in enumerate(tests):
- tests_run += 1 # enumerate from 1, python 2.5 compatible
- test_result = { "start": time.time() }
-
- (test_path, use_db) = test
-
- if test_path.startswith(mongo_repo + os.path.sep):
- test_result["test_file"] = test_path[len(mongo_repo)+1:]
- else:
- # user could specify a file not in repo. leave it alone.
- test_result["test_file"] = test_path
-
- try:
- if skipTest(test_path):
- test_result["status"] = "skip"
-
- print "skipping " + test_path
- else:
- fails.append(test)
- runTest(test, test_result)
- fails.pop()
- winners.append(test)
-
- test_result["status"] = "pass"
-
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_report["results"].append( test_result )
- if small_oplog or small_oplog_rs:
- master.wait_for_repl()
- # check the db_hashes
- if isinstance(slave, mongod):
- check_db_hashes(master, slave)
- check_and_report_replication_dbhashes()
-
- elif use_db: # reach inside test and see if "usedb" is true
- if clean_every_n_tests and (tests_run % clean_every_n_tests) == 0:
- # Restart mongod periodically to clean accumulated test data
- # clean_dbroot() is invoked by mongod.start()
- master.stop()
- master = mongod(small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- storage_engine=storage_engine,
- wiredtiger_engine_config=wiredtiger_engine_config,
- wiredtiger_collection_config=wiredtiger_collection_config,
- wiredtiger_index_config=wiredtiger_index_config,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509)
- master.start()
-
- except TestFailure, f:
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_result["error"] = str(f)
- test_result["status"] = "fail"
- test_report["results"].append( test_result )
- try:
- print f
- # Record the failing test and re-raise.
- losers[f.path] = f.status
- raise f
- except TestServerFailure, f:
- return 2
- except TestFailure, f:
- if not continue_on_failure:
- return 1
- if isinstance(slave, mongod):
- check_db_hashes(master, slave)
-
- finally:
- slave.stop()
- master.stop()
- return 0
-
-
-def check_and_report_replication_dbhashes():
- def missing(lst, src, dst):
- if lst:
- print """The following collections were present in the %s but not the %s
-at the end of testing:""" % (src, dst)
- for db in lst:
- print db
-
- missing(lost_in_slave, "master", "slave")
- missing(lost_in_master, "slave", "master")
- if screwy_in_slave:
- print """The following collections has different hashes in master and slave
-at the end of testing:"""
- for coll in screwy_in_slave.keys():
- stats = screwy_in_slave[coll]
- # Counts are "approx" because they are collected after the dbhash runs and may not
- # reflect the states of the collections that were hashed. If the hashes differ, one
- # possibility is that a test exited with writes still in-flight.
- print "collection: %s\t (master/slave) hashes: %s/%s counts (approx): %i/%i" % (coll, stats['hashes']['master'], stats['hashes']['slave'], stats['counts']['master'], stats['counts']['slave'])
- if "docs" in stats:
- if (("master" in stats["docs"] and len(stats["docs"]["master"]) != 0) or
- ("slave" in stats["docs"] and len(stats["docs"]["slave"]) != 0)):
- print "All docs matched!"
- else:
- print "Different Docs"
- print "Master docs:"
- pprint.pprint(stats["docs"]["master"], indent=2)
- print "Slave docs:"
- pprint.pprint(stats["docs"]["slave"], indent=2)
- if "error-docs" in stats:
- print "Error getting docs to diff:"
- pprint.pprint(stats["error-docs"])
- return True
-
- if (small_oplog or small_oplog_rs) and not (lost_in_master or lost_in_slave or screwy_in_slave):
- print "replication ok for %d collections" % (len(replicated_collections))
-
- return False
-
-
-def report():
- print "%d tests succeeded" % len(winners)
- num_missed = len(tests) - (len(winners) + len(losers.keys()))
- if num_missed:
- print "%d tests didn't get run" % num_missed
- if losers:
- print "The following tests failed (with exit code):"
- for loser in losers:
- print "%s\t%d" % (loser, losers[loser])
-
- test_result = { "start": time.time() }
- if check_and_report_replication_dbhashes():
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_result["test_file"] = "/#dbhash#"
- test_result["error"] = "dbhash mismatch"
- test_result["status"] = "fail"
- test_report["results"].append( test_result )
-
- if report_file:
- f = open( report_file, "wb" )
- f.write( json.dumps( test_report ) )
- f.close()
-
- if losers or lost_in_slave or lost_in_master or screwy_in_slave:
- raise Exception("Test failures")
-
-# Keys are the suite names (passed on the command line to smoke.py)
-# Values are pairs: (filenames, <start mongod before running tests>)
-suiteGlobalConfig = {"js": ("core/*.js", True),
- "quota": ("quota/*.js", True),
- "jsPerf": ("perf/*.js", True),
- "disk": ("disk/*.js", True),
- "noPassthroughWithMongod": ("noPassthroughWithMongod/*.js", True),
- "noPassthrough": ("noPassthrough/*.js", False),
- "parallel": ("parallel/*.js", True),
- "clone": ("clone/*.js", False),
- "repl": ("repl/*.js", False),
- "replSets": ("replsets/*.js", False),
- "dur": ("dur/*.js", False),
- "auth": ("auth/*.js", False),
- "sharding": ("sharding/*.js", False),
- "tool": ("tool/*.js", False),
- "aggregation": ("aggregation/*.js", True),
- "multiVersion": ("multiVersion/*.js", True),
- "failPoint": ("fail_point/*.js", False),
- "ssl": ("ssl/*.js", True),
- "sslSpecial": ("sslSpecial/*.js", True),
- "jsCore": ("core/*.js", True),
- "mmap_v1": ("mmap_v1/*.js", True),
- "gle": ("gle/*.js", True),
- "rocksDB": ("rocksDB/*.js", True),
- "slow1": ("slow1/*.js", True),
- "slow2": ("slow2/*.js", True),
- }
-
-def get_module_suites():
- """Attempts to discover and return information about module test suites
-
- Returns a dictionary of module suites in the format:
-
- {
- "<suite_name>" : "<full_path_to_suite_directory/[!_]*.js>",
- ...
- }
-
- This means the values of this dictionary can be used as "glob"s to match all jstests in the
- suite directory that don't start with an underscore
-
- The module tests should be put in 'src/mongo/db/modules/<module_name>/<suite_name>/*.js'
-
- NOTE: This assumes that if we have more than one module the suite names don't conflict
- """
- modules_directory = 'src/mongo/db/modules'
- test_suites = {}
-
- # Return no suites if we have no modules
- if not os.path.exists(modules_directory) or not os.path.isdir(modules_directory):
- return {}
-
- module_directories = os.listdir(modules_directory)
- for module_directory in module_directories:
-
- test_directory = os.path.join(modules_directory, module_directory, "jstests")
-
- # Skip this module if it has no "jstests" directory
- if not os.path.exists(test_directory) or not os.path.isdir(test_directory):
- continue
-
- # Get all suites for this module
- for test_suite in os.listdir(test_directory):
- test_suites[test_suite] = os.path.join(test_directory, test_suite, "[!_]*.js")
-
- return test_suites
-
-def expand_suites(suites,expandUseDB=True):
- """Takes a list of suites and expands to a list of tests according to a set of rules.
-
- Keyword arguments:
- suites -- list of suites specified by the user
- expandUseDB -- expand globs (such as [!_]*.js) for tests that are run against a database
- (default True)
-
- This function handles expansion of globs (such as [!_]*.js), aliases (such as "client" and
- "all"), detection of suites in the "modules" directory, and enumerating the test files in a
- given suite. It returns a list of tests of the form (path_to_test, usedb), where the second
- part of the tuple specifies whether the test is run against the database (see --nodb in the
- mongo shell)
-
- """
- globstr = None
- tests = []
- module_suites = get_module_suites()
- for suite in suites:
- if suite == 'all':
- return expand_suites(['dbtest',
- 'jsCore',
- 'jsPerf',
- 'mmap_v1',
- 'noPassthroughWithMongod',
- 'noPassthrough',
- 'clone',
- 'parallel',
- 'repl',
- 'auth',
- 'sharding',
- 'slow1',
- 'slow2',
- 'tool'],
- expandUseDB=expandUseDB)
- if suite == 'dbtest' or suite == 'test':
- if os.sys.platform == "win32":
- program = 'dbtest.exe'
- else:
- program = 'dbtest'
- (globstr, usedb) = (program, False)
- elif suite == 'mongosTest':
- if os.sys.platform == "win32":
- program = 'mongos.exe'
- else:
- program = 'mongos'
- tests += [(os.path.join(mongo_repo, program), False)]
- elif os.path.exists( suite ):
- usedb = True
- for name in suiteGlobalConfig:
- if suite in glob.glob( "jstests/" + suiteGlobalConfig[name][0] ):
- usedb = suiteGlobalConfig[name][1]
- break
- tests += [ ( os.path.join( mongo_repo , suite ) , usedb ) ]
- elif suite in module_suites:
- # Currently we connect to a database in all module tests since there's no mechanism yet
- # to configure it independently
- usedb = True
- paths = glob.glob(module_suites[suite])
- paths.sort()
- tests += [(path, usedb) for path in paths]
- else:
- try:
- globstr, usedb = suiteGlobalConfig[suite]
- except KeyError:
- raise Exception('unknown test suite %s' % suite)
-
- if globstr:
- if usedb and not expandUseDB:
- tests += [ (suite,False) ]
- else:
- if globstr.endswith('.js'):
- loc = 'jstests/'
- else:
- loc = ''
- globstr = os.path.join(mongo_repo, (os.path.join(loc, globstr)))
- globstr = os.path.normpath(globstr)
- paths = glob.glob(globstr)
- paths.sort()
- tests += [(path, usedb) for path in paths]
-
- return tests
-
-
-def filter_tests_by_tag(tests, tag_query):
- """Selects tests from a list based on a query over the tags in the tests."""
-
- test_map = {}
- roots = []
- for test in tests:
- root = os.path.abspath(test[0])
- roots.append(root)
- test_map[root] = test
-
- new_style_tests = smoke.tests.build_tests(roots, extract_metadata=True)
- new_style_tests = smoke.suites.build_suite(new_style_tests, tag_query)
-
- print "\nTag query matches %s tests out of %s.\n" % (len(new_style_tests),
- len(tests))
-
- tests = []
- for new_style_test in new_style_tests:
- tests.append(test_map[os.path.abspath(new_style_test.filename)])
-
- return tests
-
-
-def add_exe(e):
- if os.sys.platform.startswith( "win" ) and not e.endswith( ".exe" ):
- e += ".exe"
- return e
-
-
-def set_globals(options, tests):
- global mongod_executable, mongod_port, shell_executable, continue_on_failure
- global small_oplog, small_oplog_rs
- global no_journal, set_parameters, set_parameters_mongos, no_preallocj, storage_engine, wiredtiger_engine_config, wiredtiger_collection_config, wiredtiger_index_config
- global auth, authMechanism, keyFile, keyFileData, smoke_db_prefix, test_path, start_mongod
- global use_ssl, use_x509
- global file_of_commands_mode
- global report_file, shell_write_mode, use_write_commands
- global temp_path
- global clean_every_n_tests
- global clean_whole_dbroot
-
- start_mongod = options.start_mongod
- if hasattr(options, 'use_ssl'):
- use_ssl = options.use_ssl
- if hasattr(options, 'use_x509'):
- use_x509 = options.use_x509
- use_ssl = use_ssl or use_x509
- #Careful, this can be called multiple times
- test_path = options.test_path
-
- mongod_executable = add_exe(options.mongod_executable)
- if not os.path.exists(mongod_executable):
- raise Exception("no mongod found in this directory.")
-
- mongod_port = options.mongod_port
-
- shell_executable = add_exe( options.shell_executable )
- if not os.path.exists(shell_executable):
- raise Exception("no mongo shell found in this directory.")
-
- continue_on_failure = options.continue_on_failure
- smoke_db_prefix = options.smoke_db_prefix
- small_oplog = options.small_oplog
- if hasattr(options, "small_oplog_rs"):
- small_oplog_rs = options.small_oplog_rs
- no_journal = options.no_journal
- storage_engine = options.storage_engine
- wiredtiger_engine_config = options.wiredtiger_engine_config
- wiredtiger_collection_config = options.wiredtiger_collection_config
- wiredtiger_index_config = options.wiredtiger_index_config
- set_parameters = options.set_parameters
- set_parameters_mongos = options.set_parameters_mongos
- no_preallocj = options.no_preallocj
- auth = options.auth
- authMechanism = options.authMechanism
- keyFile = options.keyFile
-
- clean_every_n_tests = options.clean_every_n_tests
- clean_whole_dbroot = options.with_cleanbb
-
- if auth and not keyFile:
- # if only --auth was given to smoke.py, load the
- # default keyFile from jstests/libs/authTestsKey
- keyFile = os.path.join(mongo_repo, 'jstests', 'libs', 'authTestsKey')
-
- if keyFile:
- f = open(keyFile, 'r')
- keyFileData = re.sub(r'\s', '', f.read()) # Remove all whitespace
- f.close()
- os.chmod(keyFile, stat.S_IRUSR | stat.S_IWUSR)
- else:
- keyFileData = None
-
- # if smoke.py is running a list of commands read from a
- # file (or stdin) rather than running a suite of js tests
- file_of_commands_mode = options.File and options.mode == 'files'
- # generate json report
- report_file = options.report_file
- temp_path = options.temp_path
-
- use_write_commands = options.use_write_commands
- shell_write_mode = options.shell_write_mode
-
-def file_version():
- return md5(open(__file__, 'r').read()).hexdigest()
-
-def clear_failfile():
- if os.path.exists(failfile):
- os.remove(failfile)
-
-def run_old_fails():
- global tests
-
- try:
- f = open(failfile, 'r')
- state = pickle.load(f)
- f.close()
- except Exception:
- try:
- f.close()
- except:
- pass
- clear_failfile()
- return # This counts as passing so we will run all tests
-
- if ('version' not in state or state['version'] != file_version()):
- print "warning: old version of failfile.smoke detected. skipping recent fails"
- clear_failfile()
- return
-
- testsAndOptions = state['testsAndOptions']
- tests = [x[0] for x in testsAndOptions]
- passed = []
- try:
- for (i, (test, options)) in enumerate(testsAndOptions):
- # SERVER-5102: until we can figure out a better way to manage
- # dependencies of the --only-old-fails build phase, just skip
- # tests which we can't safely run at this point
- path, usedb = test
-
- if not os.path.exists(path):
- passed.append(i)
- winners.append(test)
- continue
-
- filename = os.path.basename(path)
- if filename in ('dbtest', 'dbtest.exe') or filename.endswith('.js'):
- set_globals(options, [filename])
- oldWinners = len(winners)
- run_tests([test])
- if len(winners) != oldWinners: # can't use return value due to continue_on_failure
- passed.append(i)
- finally:
- for offset, i in enumerate(passed):
- testsAndOptions.pop(i - offset)
-
- if testsAndOptions:
- f = open(failfile, 'w')
- state = {'version':file_version(), 'testsAndOptions':testsAndOptions}
- pickle.dump(state, f)
- else:
- clear_failfile()
-
- report() # exits with failure code if there is an error
-
-def add_to_failfile(tests, options):
- try:
- f = open(failfile, 'r')
- testsAndOptions = pickle.load(f)["testsAndOptions"]
- except Exception:
- testsAndOptions = []
-
- for test in tests:
- if (test, options) not in testsAndOptions:
- testsAndOptions.append( (test, options) )
-
- state = {'version':file_version(), 'testsAndOptions':testsAndOptions}
- f = open(failfile, 'w')
- pickle.dump(state, f)
-
-
-
-def main():
- global mongod_executable, mongod_port, shell_executable, continue_on_failure, small_oplog
- global no_journal, set_parameters, set_parameters_mongos, no_preallocj, auth, storage_engine, wiredtiger_engine_config, wiredtiger_collection_config, wiredtiger_index_config
- global keyFile, smoke_db_prefix, test_path, use_write_commands
-
- try:
- signal.signal(signal.SIGUSR1, dump_stacks)
- except AttributeError:
- print "Cannot catch signals on Windows"
-
- parser = OptionParser(usage="usage: smoke.py [OPTIONS] ARGS*")
- parser.add_option('--mode', dest='mode', default='suite',
- help='If "files", ARGS are filenames; if "suite", ARGS are sets of tests (%default)')
- # Some of our tests hard-code pathnames e.g., to execute, so until
- # that changes we don't have the freedom to run from anyplace.
- # parser.add_option('--mongo-repo', dest='mongo_repo', default=None,
- parser.add_option('--test-path', dest='test_path', default=None,
- help="Path to the test executables to run, "
- "currently only used for 'client' (%default)")
- parser.add_option('--mongod', dest='mongod_executable', default=os.path.join(mongo_repo, 'mongod'),
- help='Path to mongod to run (%default)')
- parser.add_option('--port', dest='mongod_port', default="27999",
- help='Port the mongod will bind to (%default)')
- parser.add_option('--mongo', dest='shell_executable', default=os.path.join(mongo_repo, 'mongo'),
- help='Path to mongo, for .js test files (%default)')
- parser.add_option('--continue-on-failure', dest='continue_on_failure',
- action="store_true", default=False,
- help='If supplied, continue testing even after a test fails')
- parser.add_option('--from-file', dest='File',
- help="Run tests/suites named in FILE, one test per line, '-' means stdin")
- parser.add_option('--smoke-db-prefix', dest='smoke_db_prefix', default=smoke_db_prefix,
- help="Prefix to use for the mongods' dbpaths ('%default')")
- parser.add_option('--small-oplog', dest='small_oplog', default=False,
- action="store_true",
- help='Run tests with master/slave replication & use a small oplog')
- parser.add_option('--small-oplog-rs', dest='small_oplog_rs', default=False,
- action="store_true",
- help='Run tests with replica set replication & use a small oplog')
- parser.add_option('--storageEngine', dest='storage_engine', default=None,
- help='What storage engine to start mongod with')
- parser.add_option('--wiredTigerEngineConfig', dest='wiredtiger_engine_config', default=None,
- help='Wired Tiger configuration to pass through to mongod')
- parser.add_option('--wiredTigerCollectionConfig', dest='wiredtiger_collection_config', default=None,
- help='Wired Tiger collection configuration to pass through to mongod')
- parser.add_option('--wiredTigerIndexConfig', dest='wiredtiger_index_config', default=None,
- help='Wired Tiger index configuration to pass through to mongod')
- parser.add_option('--nojournal', dest='no_journal', default=False,
- action="store_true",
- help='Do not turn on journaling in tests')
- parser.add_option('--nopreallocj', dest='no_preallocj', default=False,
- action="store_true",
- help='Do not preallocate journal files in tests')
- parser.add_option('--auth', dest='auth', default=False,
- action="store_true",
- help='Run standalone mongods in tests with authentication enabled')
- parser.add_option('--use-x509', dest='use_x509', default=False,
- action="store_true",
- help='Use x509 auth for internal cluster authentication')
- parser.add_option('--authMechanism', dest='authMechanism', default='SCRAM-SHA-1',
- help='Use the given authentication mechanism, when --auth is used.')
- parser.add_option('--keyFile', dest='keyFile', default=None,
- help='Path to keyFile to use to run replSet and sharding tests with authentication enabled')
- parser.add_option('--ignore', dest='ignore_files', default=None,
- help='Pattern of files to ignore in tests')
- parser.add_option('--only-old-fails', dest='only_old_fails', default=False,
- action="store_true",
- help='Check the failfile and only run all tests that failed last time')
- parser.add_option('--reset-old-fails', dest='reset_old_fails', default=False,
- action="store_true",
- help='Clear the failfile. Do this if all tests pass')
- parser.add_option('--with-cleanbb', dest='with_cleanbb', action="store_true",
- default=False,
- help='Clear database files before first test')
- parser.add_option('--clean-every', dest='clean_every_n_tests', type='int',
- default=(1 if 'detect_leaks=1' in os.getenv("ASAN_OPTIONS", "") else 20),
- help='Clear database files every N tests [default %default]')
- parser.add_option('--dont-start-mongod', dest='start_mongod', default=True,
- action='store_false',
- help='Do not start mongod before commencing test running')
- parser.add_option('--use-ssl', dest='use_ssl', default=False,
- action='store_true',
- help='Run mongo shell and mongod instances with SSL encryption')
- parser.add_option('--set-parameters', dest='set_parameters', default="",
- help='Adds --setParameter to mongod for each passed in item in the csv list - ex. "param1=1,param2=foo" ')
- parser.add_option('--set-parameters-mongos', dest='set_parameters_mongos', default="",
- help='Adds --setParameter to mongos for each passed in item in the csv list - ex. "param1=1,param2=foo" ')
- parser.add_option('--temp-path', dest='temp_path', default=None,
- help='If present, passed as --tempPath to unittests and dbtests or TestData.tmpPath to mongo')
- # Buildlogger invocation from command line
- parser.add_option('--buildlogger-builder', dest='buildlogger_builder', default=None,
- action="store", help='Set the "builder name" for buildlogger')
- parser.add_option('--buildlogger-buildnum', dest='buildlogger_buildnum', default=None,
- action="store", help='Set the "build number" for buildlogger')
- parser.add_option('--buildlogger-url', dest='buildlogger_url', default=None,
- action="store", help='Set the url root for the buildlogger service')
- parser.add_option('--buildlogger-credentials', dest='buildlogger_credentials', default=None,
- action="store", help='Path to Python file containing buildlogger credentials')
- parser.add_option('--buildlogger-phase', dest='buildlogger_phase', default=None,
- action="store", help='Set the "phase" for buildlogger (e.g. "core", "auth") for display in the webapp (optional)')
- parser.add_option('--report-file', dest='report_file', default=None,
- action='store',
- help='Path to generate detailed json report containing all test details')
- parser.add_option('--use-write-commands', dest='use_write_commands', default=False,
- action='store_true',
- help='Deprecated(use --shell-write-mode): Sets the shell to use write commands by default')
- parser.add_option('--shell-write-mode', dest='shell_write_mode', default="commands",
- help='Sets the shell to use a specific write mode: commands/compatibility/legacy (default:legacy)')
-
- parser.add_option('--include-tags', dest='include_tags', default="", action='store',
- help='Filters jstests run by tag regex(es) - a tag in the test must match the regexes. ' +
- 'Specify single regex string or JSON array.')
-
- parser.add_option('--exclude-tags', dest='exclude_tags', default="", action='store',
- help='Filters jstests run by tag regex(es) - no tags in the test must match the regexes. ' +
- 'Specify single regex string or JSON array.')
-
- global tests
- (options, tests) = parser.parse_args()
-
- set_globals(options, tests)
-
- buildlogger_opts = (options.buildlogger_builder, options.buildlogger_buildnum, options.buildlogger_credentials)
- if all(buildlogger_opts):
- os.environ['MONGO_USE_BUILDLOGGER'] = 'true'
- os.environ['MONGO_BUILDER_NAME'] = options.buildlogger_builder
- os.environ['MONGO_BUILD_NUMBER'] = options.buildlogger_buildnum
- os.environ['BUILDLOGGER_CREDENTIALS'] = options.buildlogger_credentials
- if options.buildlogger_phase:
- os.environ['MONGO_PHASE'] = options.buildlogger_phase
- elif any(buildlogger_opts):
- # some but not all of the required options were sete
- raise Exception("you must set all of --buildlogger-builder, --buildlogger-buildnum, --buildlogger-credentials")
-
- if options.buildlogger_url: #optional; if None, defaults to const in buildlogger.py
- os.environ['BUILDLOGGER_URL'] = options.buildlogger_url
-
- if options.File:
- if options.File == '-':
- tests = sys.stdin.readlines()
- else:
- f = open(options.File)
- tests = f.readlines()
- tests = [t.rstrip('\n') for t in tests]
-
- if options.only_old_fails:
- run_old_fails()
- return
- elif options.reset_old_fails:
- clear_failfile()
- return
-
- # If we're in suite mode, tests is a list of names of sets of tests.
- if options.mode == 'suite':
- tests = expand_suites(tests)
- elif options.mode == 'files':
- tests = [(os.path.abspath(test), start_mongod) for test in tests]
-
- if options.ignore_files != None :
- ignore_patt = re.compile( options.ignore_files )
- print "Ignoring files with pattern: ", ignore_patt
-
- def ignore_test( test ):
- if ignore_patt.search( test[0] ) != None:
- print "Ignoring test ", test[0]
- return False
- else:
- return True
-
- tests = filter( ignore_test, tests )
-
- if options.include_tags or options.exclude_tags:
-
- def to_regex_array(tags_option):
- if not tags_option:
- return []
-
- tags_list = smoke.json_options.json_coerce(tags_option)
- if isinstance(tags_list, basestring):
- tags_list = [tags_list]
-
- return map(re.compile, tags_list)
-
- tests = filter_tests_by_tag(tests,
- smoke.suites.RegexQuery(include_res=to_regex_array(options.include_tags),
- exclude_res=to_regex_array(options.exclude_tags)))
-
- if not tests:
- print "warning: no tests specified"
- return
-
- if options.with_cleanbb:
- clean_dbroot(nokill=True)
-
- test_report["start"] = time.time()
- test_report["mongod_running_at_start"] = mongod().is_mongod_up(mongod_port)
- try:
- run_tests(tests)
- finally:
- add_to_failfile(fails, options)
-
- test_report["end"] = time.time()
- test_report["elapsed"] = test_report["end"] - test_report["start"]
- test_report["failures"] = len(losers.keys())
- test_report["mongod_running_at_end"] = mongod().is_mongod_up(mongod_port)
- if report_file:
- f = open( report_file, "wb" )
- f.write( json.dumps( test_report, indent=4, separators=(',', ': ')) )
- f.close()
-
- report()
-
-if __name__ == "__main__":
- main()
diff --git a/src/mongo/gotools/test/legacy28/buildscripts/utils.py b/src/mongo/gotools/test/legacy28/buildscripts/utils.py
deleted file mode 100644
index 0a46ef440d4..00000000000
--- a/src/mongo/gotools/test/legacy28/buildscripts/utils.py
+++ /dev/null
@@ -1,235 +0,0 @@
-
-import codecs
-import re
-import socket
-import time
-import os
-import os.path
-import itertools
-import subprocess
-import sys
-import hashlib
-
-# various utilities that are handy
-
-def getAllSourceFiles( arr=None , prefix="." ):
- if arr is None:
- arr = []
-
- if not os.path.isdir( prefix ):
- # assume a file
- arr.append( prefix )
- return arr
-
- for x in os.listdir( prefix ):
- if x.startswith( "." ) or x.startswith( "pcre-" ) or x.startswith( "32bit" ) or x.startswith( "mongodb-" ) or x.startswith("debian") or x.startswith( "mongo-cxx-driver" ):
- continue
- # XXX: Avoid conflict between v8 and v8-3.25 source files in
- # src/mongo/scripting
- # Remove after v8-3.25 migration.
- if x.find("v8-3.25") != -1:
- continue
- full = prefix + "/" + x
- if os.path.isdir( full ) and not os.path.islink( full ):
- getAllSourceFiles( arr , full )
- else:
- if full.endswith( ".cpp" ) or full.endswith( ".h" ) or full.endswith( ".c" ):
- full = full.replace( "//" , "/" )
- arr.append( full )
-
- return arr
-
-
-def getGitBranch():
- if not os.path.exists( ".git" ) or not os.path.isdir(".git"):
- return None
-
- version = open( ".git/HEAD" ,'r' ).read().strip()
- if not version.startswith( "ref: " ):
- return version
- version = version.split( "/" )
- version = version[len(version)-1]
- return version
-
-def getGitBranchString( prefix="" , postfix="" ):
- t = re.compile( '[/\\\]' ).split( os.getcwd() )
- if len(t) > 2 and t[len(t)-1] == "mongo":
- par = t[len(t)-2]
- m = re.compile( ".*_([vV]\d+\.\d+)$" ).match( par )
- if m is not None:
- return prefix + m.group(1).lower() + postfix
- if par.find("Nightly") > 0:
- return ""
-
-
- b = getGitBranch()
- if b == None or b == "master":
- return ""
- return prefix + b + postfix
-
-def getGitVersion():
- if not os.path.exists( ".git" ) or not os.path.isdir(".git"):
- return "nogitversion"
-
- version = open( ".git/HEAD" ,'r' ).read().strip()
- if not version.startswith( "ref: " ):
- return version
- version = version[5:]
- f = ".git/" + version
- if not os.path.exists( f ):
- return version
- return open( f , 'r' ).read().strip()
-
-def execsys( args ):
- import subprocess
- if isinstance( args , str ):
- r = re.compile( "\s+" )
- args = r.split( args )
- p = subprocess.Popen( args , stdout=subprocess.PIPE , stderr=subprocess.PIPE )
- r = p.communicate()
- return r;
-
-def getprocesslist():
- raw = ""
- try:
- raw = execsys( "/bin/ps axww" )[0]
- except Exception,e:
- print( "can't get processlist: " + str( e ) )
-
- r = re.compile( "[\r\n]+" )
- return r.split( raw )
-
-def removeIfInList( lst , thing ):
- if thing in lst:
- lst.remove( thing )
-
-def findVersion( root , choices ):
- for c in choices:
- if ( os.path.exists( root + c ) ):
- return root + c
- raise "can't find a version of [" + root + "] choices: " + choices
-
-def choosePathExist( choices , default=None):
- for c in choices:
- if c != None and os.path.exists( c ):
- return c
- return default
-
-def filterExists(paths):
- return filter(os.path.exists, paths)
-
-def ensureDir( name ):
- d = os.path.dirname( name )
- if not os.path.exists( d ):
- print( "Creating dir: " + name );
- os.makedirs( d )
- if not os.path.exists( d ):
- raise "Failed to create dir: " + name
-
-
-def distinctAsString( arr ):
- s = set()
- for x in arr:
- s.add( str(x) )
- return list(s)
-
-def checkMongoPort( port=27017 ):
- sock = socket.socket()
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- sock.settimeout(1)
- sock.connect(("localhost", port))
- sock.close()
-
-
-def didMongodStart( port=27017 , timeout=20 ):
- while timeout > 0:
- time.sleep( 1 )
- try:
- checkMongoPort( port )
- return True
- except Exception,e:
- print( e )
- timeout = timeout - 1
- return False
-
-def which(executable):
- if sys.platform == 'win32':
- paths = os.environ.get('Path', '').split(';')
- else:
- paths = os.environ.get('PATH', '').split(':')
-
- for path in paths:
- path = os.path.expandvars(path)
- path = os.path.expanduser(path)
- path = os.path.abspath(path)
- executable_path = os.path.join(path, executable)
- if os.path.exists(executable_path):
- return executable_path
-
- return executable
-
-def md5sum( file ):
- #TODO error handling, etc..
- return execsys( "md5sum " + file )[0].partition(" ")[0]
-
-def md5string( a_string ):
- return hashlib.md5(a_string).hexdigest()
-
-def find_python(min_version=(2, 5)):
- try:
- if sys.version_info >= min_version:
- return sys.executable
- except AttributeError:
- # In case the version of Python is somehow missing sys.version_info or sys.executable.
- pass
-
- version = re.compile(r'[Pp]ython ([\d\.]+)', re.MULTILINE)
- binaries = ('python27', 'python2.7', 'python26', 'python2.6', 'python25', 'python2.5', 'python')
- for binary in binaries:
- try:
- out, err = subprocess.Popen([binary, '-V'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
- for stream in (out, err):
- match = version.search(stream)
- if match:
- versiontuple = tuple(map(int, match.group(1).split('.')))
- if versiontuple >= min_version:
- return which(binary)
- except:
- pass
-
- raise Exception('could not find suitable Python (version >= %s)' % '.'.join(str(v) for v in min_version))
-
-def smoke_command(*args):
- # return a list of arguments that comprises a complete
- # invocation of smoke.py
- here = os.path.dirname(__file__)
- smoke_py = os.path.abspath(os.path.join(here, 'smoke.py'))
- # the --with-cleanbb argument causes smoke.py to run
- # buildscripts/cleanbb.py before each test phase; this
- # prevents us from running out of disk space on slaves
- return [find_python(), smoke_py, '--with-cleanbb'] + list(args)
-
-def run_smoke_command(*args):
- # to run a command line script from a scons Alias (or any
- # Action), the command sequence must be enclosed in a list,
- # otherwise SCons treats it as a list of dependencies.
- return [smoke_command(*args)]
-
-# unicode is a pain. some strings cannot be unicode()'d
-# but we want to just preserve the bytes in a human-readable
-# fashion. this codec error handler will substitute the
-# repr() of the offending bytes into the decoded string
-# at the position they occurred
-def replace_with_repr(unicode_error):
- offender = unicode_error.object[unicode_error.start:unicode_error.end]
- return (unicode(repr(offender).strip("'").strip('"')), unicode_error.end)
-
-codecs.register_error('repr', replace_with_repr)
-
-def unicode_dammit(string, encoding='utf8'):
- # convert a string to a unicode, using the Python
- # representation of non-ascii bytes when necessary
- #
- # name inpsired by BeautifulSoup's "UnicodeDammit"
- return string.decode(encoding, 'repr')
-
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/analyze_plan.js b/src/mongo/gotools/test/legacy28/jstests/libs/analyze_plan.js
deleted file mode 100644
index 9c2ebffd890..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/analyze_plan.js
+++ /dev/null
@@ -1,80 +0,0 @@
-// Contains helpers for checking, based on the explain output, properties of a
-// plan. For instance, there are helpers for checking whether a plan is a collection
-// scan or whether the plan is covered (index only).
-
-/**
- * Given the root stage of explain's BSON representation of a query plan ('root'),
- * returns true if the plan has a stage called 'stage'.
- */
-function planHasStage(root, stage) {
- if (root.stage === stage) {
- return true;
- }
- else if ("inputStage" in root) {
- return planHasStage(root.inputStage, stage);
- }
- else if ("inputStages" in root) {
- for (var i = 0; i < root.inputStages.length; i++) {
- if (planHasStage(root.inputStages[i], stage)) {
- return true;
- }
- }
- }
-
- return false;
-}
-
-/**
- * A query is covered iff it does *not* have a FETCH stage or a COLLSCAN.
- *
- * Given the root stage of explain's BSON representation of a query plan ('root'),
- * returns true if the plan is index only. Otherwise returns false.
- */
-function isIndexOnly(root) {
- return !planHasStage(root, "FETCH") && !planHasStage(root, "COLLSCAN");
-}
-
-/**
- * Returns true if the BSON representation of a plan rooted at 'root' is using
- * an index scan, and false otherwise.
- */
-function isIxscan(root) {
- return planHasStage(root, "IXSCAN");
-}
-
-/**
- * Returns true if the BSON representation of a plan rooted at 'root' is using
- * the idhack fast path, and false otherwise.
- */
-function isIdhack(root) {
- return planHasStage(root, "IDHACK");
-}
-
-/**
- * Returns true if the BSON representation of a plan rooted at 'root' is using
- * a collection scan, and false otherwise.
- */
-function isCollscan(root) {
- return planHasStage(root, "COLLSCAN");
-}
-
-/**
- * Get the number of chunk skips for the BSON exec stats tree rooted at 'root'.
- */
-function getChunkSkips(root) {
- if (root.stage === "SHARDING_FILTER") {
- return root.chunkSkips;
- }
- else if ("inputStage" in root) {
- return getChunkSkips(root.inputStage);
- }
- else if ("inputStages" in root) {
- var skips = 0;
- for (var i = 0; i < root.inputStages.length; i++) {
- skips += getChunkSkips(root.inputStages[0]);
- }
- return skips;
- }
-
- return 0;
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/authTestsKey b/src/mongo/gotools/test/legacy28/jstests/libs/authTestsKey
deleted file mode 100644
index 573898a4f05..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/authTestsKey
+++ /dev/null
@@ -1 +0,0 @@
-This key is only for running the suite with authentication dont use it in any tests directly
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/badSAN.pem b/src/mongo/gotools/test/legacy28/jstests/libs/badSAN.pem
deleted file mode 100644
index d8e362731e0..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/badSAN.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDdTCCAl2gAwIBAgIDAYKXMA0GCSqGSIb3DQEBBQUAMHQxFzAVBgNVBAMTDktl
-cm5lbCBUZXN0IENBMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoTB01vbmdvREIx
-FjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYD
-VQQGEwJVUzAeFw0xNDA5MjMxNTE3MjNaFw0zNDA5MjMxNTE3MjNaMG8xEjAQBgNV
-BAMTCTEyNy4wLjAuMTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RC
-MRYwFAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkG
-A1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCDB/lxuzeU
-OHR5nnOTJM0fHz0WeicnuUfGG5wP89Mbkd3Y+BNS0ozbnkW+NAGhD+ehNBjogISZ
-jLCd+uaYu7TLWpkgki+1+gM99Ro0vv7dIc8vD7ToILKMbM8xQmLbSxDT2tCUoXlc
-m7ccgDZl9oW1scQYQ8gWHjmk3yK8sCoGa/uwr49u74aVM7673tLsK41m8oYPzt/q
-VGT+mXpBJQcGXkTNQtIPxBtD25jr+aPietS3u70zrVPY6ZDsGE7DofEeRl97kVoF
-NcpaQmVEwEo8KCWaT6OaPaUUUjAMwzqiZaHNZ6mL1pCr65bLXP6T9tiMtWLw5+SG
-3E09fhQuWod5AgMBAAGjFTATMBEGA1UdEQQKMAiCBmJhZFNBTjANBgkqhkiG9w0B
-AQUFAAOCAQEAQzlibJvlUpJG3vc5JppdrudpXoVAP3wtpzvnkrY0GTWIUE52mCIf
-MJ5sARvjzs/uMhV5GLnjqTcT+DFkihqKyFo1tKBD7LSuSjfDvjmggG9lq0/xDvVU
-uczAuNtI1T7N+6P7LyTG4HqniYouPMDWyCKBOmzzNsk+r1OJb6cxU7QQwmSWw1n1
-ztNcF6JzCQVcd9Isau9AEXZ9q0M0sjD9mL67Qo3Dh3Mvf4UkJKqm3KOQOupUHZLU
-vJwfsS2u+gfHY1Plywzq3AuT7ygbksR3Pqfs8LFPnuRAH+41sFTGUM52hiU7mNPj
-ebl8s1tjK7WQ+a8GTABJV0hDNeWd3Sr+Og==
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAgwf5cbs3lDh0eZ5zkyTNHx89FnonJ7lHxhucD/PTG5Hd2PgT
-UtKM255FvjQBoQ/noTQY6ICEmYywnfrmmLu0y1qZIJIvtfoDPfUaNL7+3SHPLw+0
-6CCyjGzPMUJi20sQ09rQlKF5XJu3HIA2ZfaFtbHEGEPIFh45pN8ivLAqBmv7sK+P
-bu+GlTO+u97S7CuNZvKGD87f6lRk/pl6QSUHBl5EzULSD8QbQ9uY6/mj4nrUt7u9
-M61T2OmQ7BhOw6HxHkZfe5FaBTXKWkJlRMBKPCglmk+jmj2lFFIwDMM6omWhzWep
-i9aQq+uWy1z+k/bYjLVi8OfkhtxNPX4ULlqHeQIDAQABAoIBAC4Bx8jyJmKpq+Pk
-CcqZelg6HLXesA7XlGbv3M0RHIeqoM2E1SwYd5LJMM3G7ueBcR/97dz8+xH6/yyJ
-Ixxvk9xu9CMmkRABN9AyVkA867nzHA73Idr7WBXMQreWCqXa5o6sXt5BEB6/If0k
-23TTqUERqLuoWQHDHRRRsJ218RuNmbvBe8TGXcfunC0eeDVKDeqAXol6bD5lztdu
-B6jkdLt5UZSQ7X8OmClbeDlac90B8usNi+pUE9q1p7X462vAw8LohkxLY2nyIcmU
-feNdTNHP+lklv+E+p9w/Az7Hf6zxm525tw90QVI048fr9SL3ftLHOt4FhucSCn0Z
-CjylP4ECgYEA+nQrNVdVwmxcWCVn69LR1grNXUSz+fLHCo+QKma4IyC1kuuZ+BBo
-Iwdf9t/S1tgtTYru3uxzCpQg7J1iDeEFEsMHl0rc6U1MmIE+6OvACVG3yotqoOqE
-852pi1OWIe94yTk2ZmNXJ8gpUE/gtMprbcSWOb7IzzrXy2lDcaEMuGkCgYEAhe7L
-ZvYI4LEvu6GSPp97qBzDH9m5UrHaTZIJk/Nu7ie919Sdg62LTfphsaK+pSyA55XQ
-8L9P7wNUPC44NnE+7CIJZsIuKdYqR5QI6No9RdTyij0Hgljfc7KuH2b8lf8EjvuH
-qZAf5zL3pIOQs8E8/MYHlGIqmTkYK41eCAcS9JECgYEADnra6KmU9rmnGR2IhZTZ
-tuNG/kZzlVbY9R5ZumnX6YgBl23xp+ri6muJu88y9GLpM5t9tfu7pvfrc2KiAaVp
-0qzd6nxUi1SBwituxK6kmqVT1+z5jDYi26bY34pEms+qjw+0unSx3EXxRYhouGsf
-jOgZu1rxZzHCuirq0E38W0kCgYBzOK16RX37t9OFywlioJekWCIxu4BouSNCirl8
-s/eiIUR8cqiUCPAIRLhZNtZmiTPYiBW5mAyvZiDIqUao56InSVznL3TBf0LeU2ea
-023VLs79yGU2aTjLc1PDJjl03XDRhWj/okMgBsPvn1QUoNDT8ZXBvPZC3VCC31qe
-818GUQKBgQDBUP2BC/Th/0dErOQ5lWkY3YbmzrTp2pDsHGZJRD+OdQ5B8FUvCP8m
-JESk/0ATn7niUqawnOy/2KlKIkeBBV2XL1rjIGEhCkBUuhCiInNDqz1AGdXzIKaT
-myoZ4PhIsH1D643e6iLhyAZuUAA4yB31E2a3l7EMyhV3vKbdWWygGQ==
------END RSA PRIVATE KEY-----
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/ca.pem b/src/mongo/gotools/test/legacy28/jstests/libs/ca.pem
deleted file mode 100644
index d1a5689cf0f..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/ca.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDczCCAlugAwIBAgIBATANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjB0MRcwFQYDVQQD
-Ew5LZXJuZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25n
-b0RCMRYwFAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazEL
-MAkGA1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCBxSXj
-qA5y2EMQkcmvLDNikE88Og3+spJ3ex60HWVPk8EeXN68jyfbKLYsoCcBE2rBAE/N
-shVBJa8irh0o/UTh1XNW4iGCsfMvYamXiHnaOjmGVKjfBoj6pzQH0uK0X5olm3Sa
-zZPkLLCR81yxsK6woJZMFTvrlEjxj/SmDZ9tVXW692bC4i6nGvOCSpgv9kms85xO
-Ed2xbuCLXFDXKafXZd5AK+iegkDs3ah7VXMEE8sbqGnlqC1nsy5bpCnb7aC+3af7
-SV2XEFlSQT5kwTmk9CvTDzM9O78SO8nNhEOFBLQEdGDGd3BShE8dCdh2JTy3zKsb
-WeE+mxy0mEwxNfGfAgMBAAGjEDAOMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEF
-BQADggEBAANwbvhM5K/Jcl6yzUSqAawvyAypT5aWBob7rt9KFq/aemjMN0gY2nsS
-8WTGd9jiXlxGc/TzrK6MOsJ904UAFE1L9uR//G1gIBa9dNbYoiii2Fc8b1xDVJEP
-b23rl/+GAT6UTSY+YgEjeA4Jk6H9zotO07lSw06rbCQam5SdA5UiMvuLHWCo3BHY
-8WzqLiW/uHlb4K5prF9yuTUBEIgkRvvvyOKXlRvm1Ed5UopT2hmwA86mffAfgJc2
-vSbm9/8Q00fYwO7mluB6mbEcnbquaqRLoB83k+WbwUAZ2yjWHXuXVMPwyaysazcp
-nOjaLwQJQgKejY62PiNcw7xC/nIxBeI=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEowIBAAKCAQEAgcUl46gOcthDEJHJrywzYpBPPDoN/rKSd3setB1lT5PBHlze
-vI8n2yi2LKAnARNqwQBPzbIVQSWvIq4dKP1E4dVzVuIhgrHzL2Gpl4h52jo5hlSo
-3waI+qc0B9LitF+aJZt0ms2T5CywkfNcsbCusKCWTBU765RI8Y/0pg2fbVV1uvdm
-wuIupxrzgkqYL/ZJrPOcThHdsW7gi1xQ1ymn12XeQCvonoJA7N2oe1VzBBPLG6hp
-5agtZ7MuW6Qp2+2gvt2n+0ldlxBZUkE+ZME5pPQr0w8zPTu/EjvJzYRDhQS0BHRg
-xndwUoRPHQnYdiU8t8yrG1nhPpsctJhMMTXxnwIDAQABAoIBAD5iGOnM800wO2Uu
-wGbOd9FNEFoiinHDRHfdnw/1BavwmqjO+mBo7T8E3jarsrRosiwfyz1V+7O6uuuQ
-CgKXZlKuOuksgfGDPCWt7EolWHaZAOhbsGaujJD6ah/MuCD/yGmFxtNYOl05QpSX
-Cht9lSzhtf7TQl/og/xkOLbO27JB540ck/OCSOczXg9Z/O8AmIUyDn7AKb6G1Zhk
-2IN//HQoAvDUMZLWrzy+L7YGbA8pBR3yiPsYBH0rX2Oc9INpiGA+B9Nf1HDDsxeZ
-/o+5xLbRDDfIDtlYO0cekJ053W0zUQLrMEIn9991EpG2O/fPgs10NlKJtaFH8CmT
-ExgVA9ECgYEA+6AjtUdxZ0BL3Wk773nmhesNH5/5unWFaGgWpMEaEM7Ou7i6QApL
-KAbzOYItV3NNCbkcrejq7jsDGEmiwUOdXeQx6XN7/Gb2Byc/wezy5ALi0kcUwaur
-6s9+Ah+T4vcU2AjfuCWXIpe46KLEbwORmCRQGwkCBCwRhHGt5sGGxTkCgYEAhAaw
-voHI6Cb+4z3PNAKRnf2rExBYRyCz1KF16ksuwJyQSLzFleXRyRWFUEwLuVRL0+EZ
-JXhMbtrILrc23dJGEsB8kOCFehSH/IuL5eB0QfKpDFA+e6pimsbVeggx/rZhcERB
-WkcV3jN4O82gSL3EnIgvAT1/nwhmbmjvDhFJhZcCgYBaW4E3IbaZaz9S/O0m69Fa
-GbQWvS3CRV1oxqgK9cTUcE9Qnd9UC949O3GwHw0FMERjz3N7B/8FGW/dEuQ9Hniu
-NLmvqWbGlnqWywNcMihutJKbDCdp/Km5olUPkiNbB3sWsOkViXoiU/V0pK6BZvir
-d67EZpGwydpogyH9kVVCEQKBgGHXc3Q7SmCBRbOyQrQQk0m6i+V8328W1S5m2bPg
-M62aWXMOMn976ZRT1pBDSwz1Y5yJ3NDf7gTZLjEwpgCNrFCJRcc4HLL0NDL8V5js
-VjvpUU5GyYdsJdb+M4ZUPHi/QEaqzqPQumwJSLlJEdfWirZWVj9dDA8XcpGwQjjy
-psHRAoGBAJUTgeJYhjK7k5sgfh+PRqiRJP0msIH8FK7SenBGRUkelWrW6td2Riey
-EcOCMFkRWBeDgnZN5xDyWLBgrzpw9iHQQIUyyBaFknQcRUYKHkCx+k+fr0KHHCUb
-X2Kvf0rbeMucb4y/h7950HkBBq83AYKMAoI8Ql3cx7pKmyOLXRov
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/client.pem b/src/mongo/gotools/test/legacy28/jstests/libs/client.pem
deleted file mode 100644
index 50a64e41728..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/client.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDXTCCAkWgAwIBAgIBAzANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBwMQ8wDQYDVQQD
-EwZjbGllbnQxEzARBgNVBAsTCktlcm5lbFVzZXIxEDAOBgNVBAoTB01vbmdvREIx
-FjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYD
-VQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJIFboAk9Fdi
-DY5Xld2iw36vB3IpHEfgWIimd+l1HX4jyp35i6xoqkZZHJUL/NMbUFJ6+44EfFJ5
-biB1y1Twr6GqpYp/3R30jKQU4PowO7DSal38MR34yiRFYPG4ZPPXXfwPSuwKrSNo
-bjqa0/DRJRVQlnGwzJkPsWxIgCjc8KNO/dSHv/CGymc9TjiFAI0VVOhMok1CBNvc
-ifwWjGBg5V1s3ItMw9x5qk+b9ff5hiOAGxPiCrr8R0C7RoeXg7ZG8K/TqXbsOZEG
-AOQPRGcrmqG3t4RNBJpZugarPWW6lr11zMpiPLFTrbq3ZNYB9akdsps4R43TKI4J
-AOtGMJmK430CAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAA+nPgVT4addi13yB6mjW
-+UhdUkFwtb1Wcg0sLtnNucopHZLlCj5FfDdp1RQxe3CyMonxyHTKkrWtQmVtUyvf
-C/fjpIKt9A9kAmveMHBiu9FTNTc0sbiXcrEBeHF5cD7N+Uwfoc/4rJm0WjEGNkAd
-pYLCCLVZXPVr3bnc3ZLY1dFZPsJrdH3nJGMjLgUmoNsKnaGozcjiKiXqm6doFzkg
-0Le5yD4C/QTaie2ycFa1X5bJfrgoMP7NqKko05h4l0B0+DnjpoTJN+zRreNTMKvE
-ETGvpUu0IYGxe8ZVAFnlEO/lUeMrPFvH+nDmJYsxO1Sjpds2hi1M1JoeyrTQPwXj
-2Q==
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAkgVugCT0V2INjleV3aLDfq8HcikcR+BYiKZ36XUdfiPKnfmL
-rGiqRlkclQv80xtQUnr7jgR8UnluIHXLVPCvoaqlin/dHfSMpBTg+jA7sNJqXfwx
-HfjKJEVg8bhk89dd/A9K7AqtI2huOprT8NElFVCWcbDMmQ+xbEiAKNzwo0791Ie/
-8IbKZz1OOIUAjRVU6EyiTUIE29yJ/BaMYGDlXWzci0zD3HmqT5v19/mGI4AbE+IK
-uvxHQLtGh5eDtkbwr9Opduw5kQYA5A9EZyuaobe3hE0Emlm6Bqs9ZbqWvXXMymI8
-sVOturdk1gH1qR2ymzhHjdMojgkA60YwmYrjfQIDAQABAoIBAB249VEoNIRE9TVw
-JpVCuEBlKELYk2UeCWdnWykuKZ6vcmLNlNy3QVGoeeTs172w5ZykY+f4icXP6da5
-o3XauCVUMvYKKNwcFzSe+1xxzPSlH/mZh/Xt2left6f8PLBVuk/AXSPG2I9Ihodv
-VIzERaQdD0J9FmhhhV/hMhUfQ+w5rTCaDpq1KVGU61ks+JAtlQ46g+cvPF9c80cI
-TEC875n2LqWKmLRN43JUnctV3uGTmolIqCRMHPAs/egl+lG2RXJjqXSQ2uFLOvC/
-PXtBb597yadSs2BWPnTu/r7LbLGBAExzlQK1uFsTvuKsBPb3qrvUux0L68qwPuiv
-W24N8BECgYEAydtAvVB7OymQEX3mck2j7ixDN01wc1ZaCLBDvYPYS/Pvzq4MBiAD
-lHRtbIa6HPGA5jskbccPqQn8WGnJWCaYvCQryvgaA+BBgo1UTLfQJUo/7N5517vv
-KvbUa6NF0nj3VwfDV1vvy+amoWi9NOVn6qOh0K84PF4gwagb1EVy9MsCgYEAuTAt
-KCWdZ/aNcKgJc4NCUqBpLPF7EQypX14teixrbF/IRNS1YC9S20hpkG25HMBXjpBe
-tVg/MJe8R8CKzYjCt3z5Ff1bUQ2bzivbAtgjcaO0Groo8WWjnamQlrIQcvWM7vBf
-dnIflQ0slxbHfCi3XEe8tj2T69R7wJZ8L7PxR9cCgYEACgwNtt6Qo6s37obzt3DB
-3hL57YC/Ph5oMNKFLKOpWm5z2zeyhYOGahc5cxNppBMpNUxwTb6AuwsyMjxhty+E
-nqi2PU4IDXVWDWd3cLIdfB2r/OA99Ez4ZI0QmaLw0L8QoJZUVL7QurdqR9JsyHs6
-puUqIrb195s/yiPR7sjeJe0CgYEAuJviKEd3JxCN52RcJ58OGrh2oKsJ9/EbV0rX
-Ixfs7th9GMDDHuOOQbNqKOR4yMSlhCU/hKA4PgTFWPIEbOiM08XtuZIb2i0qyNjH
-N4qnqr166bny3tJnzOAgl1ljNHa8y+UsBTO3cCr17Jh0vL0KLSAGa9XvBAWKaG6b
-1iIXwXkCgYAVz+DA1yy0qfXdS1pgPiCJGlGZXpbBcFnqvbpGSclKWyUG4obYCbrb
-p5VKVfoK7uU0ly60w9+PNIRsX/VN/6SVcoOzKx40qQBMuYfJ72DQrsPjPYvNg/Nb
-4SK94Qhp9TlAyXbqKJ02DjtuDim44sGZ8g7b+k3FfoK4OtzNsqdVdQ==
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/client_revoked.pem b/src/mongo/gotools/test/legacy28/jstests/libs/client_revoked.pem
deleted file mode 100644
index 03db67deb50..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/client_revoked.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDZTCCAk2gAwIBAgIBAjANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjB4MRcwFQYDVQQD
-Ew5jbGllbnRfcmV2b2tlZDETMBEGA1UECxMKS2VybmVsVXNlcjEQMA4GA1UEChMH
-TW9uZ29EQjEWMBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlv
-cmsxCzAJBgNVBAYTAlVTMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
-lJog+grPpvDFKFn9mxxToLgkx1uM+LmuRf1XG707TIccGfSKyZMGCcqlYVQmqT/J
-Fnq2rvtXGG9yxPhHoBwKw4x9rfQEy8Z93BAMNRoIlbt505HaWv7b+M3qksRHDLpw
-/Ix0Yay+fjT9DGmcrahC9h8c8SVtyHoODvxdmR7P+p7e6F6/6vcUkg/aU/50MRUN
-qGUSMkm/kpcjFNmDqRSLQoDPE8G2UOIolG/m95uaCOkGCT4lQjrCpR36fkr5a+vI
-UbDJP8M26Kp2fFnvObKzoSFSEVOMGeBM9p4qa88I4hwfthNFvGDOCNBKZK5ZxLZ3
-gGFcR6kL6u11y4zoLrZ6xwIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQB8WQMn/cjh
-9qFtr7JL4VPIz/+96QaBmkHxMqiYL/iMg5Vko3GllLc1mgfWQfaWOvyRJClKj395
-595L2u8wBKon3DXUPAkinc6+VOwDWsxFLNtWl+jhigat5UDzGm8ZKFhl0WwNhqzZ
-dlNPrh2LJZzPFfimfGyVkhPHYYdELvn+bnEMT8ae1jw2yQEeVFzHe7ZdlV5nMOE7
-Gx6ZZhYlS+jgpIxez5aiKqit/0azq5GGkpCv2H8/EXxkR4gLZGYnIqGuZP3r34NY
-Lkh5J3Qnpyhdopa/34yOCa8mY1wW7vEro0fb/Dh21bpyEOz6tBk3C1QRaGD+XQOM
-cedxtUjYmWqn
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAlJog+grPpvDFKFn9mxxToLgkx1uM+LmuRf1XG707TIccGfSK
-yZMGCcqlYVQmqT/JFnq2rvtXGG9yxPhHoBwKw4x9rfQEy8Z93BAMNRoIlbt505Ha
-Wv7b+M3qksRHDLpw/Ix0Yay+fjT9DGmcrahC9h8c8SVtyHoODvxdmR7P+p7e6F6/
-6vcUkg/aU/50MRUNqGUSMkm/kpcjFNmDqRSLQoDPE8G2UOIolG/m95uaCOkGCT4l
-QjrCpR36fkr5a+vIUbDJP8M26Kp2fFnvObKzoSFSEVOMGeBM9p4qa88I4hwfthNF
-vGDOCNBKZK5ZxLZ3gGFcR6kL6u11y4zoLrZ6xwIDAQABAoIBAFlu0T3q18Iu0VlR
-n5GEYMgvSuM4NAVVKo8wwwYMwu1xuvXb+NMLyuyFqzaCQKpHmywOOnfhCC/KkxX8
-Ho87kTbTDKhuXZyOHx0cA1zKCDSlGdK8yt9M1vJMa0pdGi2M34b+uOQ35IVsOocH
-4KWayIH7g52V2xZ2bpOSSnpm0uCPZSBTgClCgTUYepOT2wbLn/8V0NtVpZhDsBqg
-fORuEHkiurrbLa8yjQsvbR+hsR/XbGhre8sTQapj4EITXvkEuOL/vwbRebhOFHgh
-8sipsXZ9CMaJkBpVoLZTxTKQID/9006cczJK2MGKFhn6mvP6AeFuJAM3xqLGZTc4
-xxpfJyECgYEA0+iKxy5r1WUpBHR8jTh7WjLc6r5MFJQlGgLPjdQW6gCIe/PZc+b9
-x5vDp27EQ1cAEePEu0glQ/yk19yfxbxrqHsRjRrgwoiYTXjGI5zZSjXKArHyEgBj
-XOyo5leO5XMFnk2AShPlh+/RhAW3NhxcWkBEAsCD6QyC3BPvP6aaAXkCgYEAs4WH
-dTuweTdnyquHQm59ijatvBeP8h4tBozSupflQjB9WxJeW5uEa8lNQ3lSz1F4TV3M
-xvGdDSqwftLRS2mWGho/1jaCeAzjsiUQ2WUHChxprt0+QU7XkJbaBY9eF+6THZFw
-sDG688TiolxqoD8OYi8EtxmIvbQhXHmXnrk3jj8CgYBSi74rkrisuqg8tQejl0Ht
-w+xsgM5wIblGJZwmOlzmsGh6KGYnkO6Ap/uSKELJnIVJcrk63wKtNigccjPGufwR
-+EbA+ZxeCwmQ/B/q1XmLP+K+JAUQ4BfUpdexSqA+XwzsOnJj6NY7mr65t+RDbs7G
-1Uvo6oc37Ai5pAZJfCN3uQKBgQAJr5qvaJkM8UBYXwjdPLjpTCnzjBHoLlifkdmM
-18U23QbmcwdESg/LAQF6MoGVTf//rJ/v2/ltTHBZZ2aDex7uKZxoImjHsWpXokhW
-cmz+zqmlFarWOzrGQl1hD2s0P1sQrVg3KXe8z1KrD/Fw0/Yitga7GlWWZrGmG6li
-lvu4YQKBgQANODQYEaz739IoPNnMfTpTqAoQIOR4PNdMfCXSQrCB8i0Hh4z48E4F
-DEAd1xIYyxI8pu7r52dQlBk7yrILOTG0gmgLJd5xKdtCTrasYAICI3hsRLtP8dVA
-8WeykXY4Wf1bYQ+VzKVImkwL/SBm2ik5woyxCzT8JSjyoAwRrQp9Vw==
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/cluster_cert.pem b/src/mongo/gotools/test/legacy28/jstests/libs/cluster_cert.pem
deleted file mode 100644
index a8623ab67ef..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/cluster_cert.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDXjCCAkagAwIBAgIBBDANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBxMRQwEgYDVQQD
-EwtjbHVzdGVydGVzdDEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RC
-MRYwFAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkG
-A1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCX42ZTwADG
-sEkS7ijfADlDQaJpbdgrnQKa5ssMQK3oRGSqXfTp0ThsJiVBbYZ8ZZRpPMgJdowa
-pFCGHQJh6VOdKelR0f/uNVpBGVz1yD4E4AtkA6UYcIJq6ywcj+W7Pli1Ed8VUN3Q
-tBU+HvHiEdMj74kLJb4ID1cP3gehvRv/0szkN8/ODFKCgYb1619BdFb9gRn8eily
-Wcg1m1gXz2xSfqRZkFEcEYet3BeOEGZBhaufJFzinvQjocH+kWFKlZf0+2DEFFbH
-NRqmabMmqMBUke629EUn8a7PBWBYNLld9afoNHwNY68wpONf5IqR2mNar5bVz8/d
-4g7BuVNvEFdJAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAA3U2O+cE/ZS8SDBw/sr
-BVFf0uaoME7+XX2jdTi4RUpWPfQ6uTkhKnXKzTzGrQtKwA96slGp4c3mxGBaAbC5
-IuTS97mLCju9NFvJVtazIajO4eNlG6dJSk0pQzjc0RAeLYksX/9NRNKZ+lQ5QVS2
-NVLce70QZBIvujjVJZ5hqDdjPV0JGOOUzNGyyUhzgY7s9MQagNnBSu5HO4CK1onc
-goOkizulq/5WF+JtqW8VKKx+/CH6SnTkS4b3qbjgKRmHZcOshH/d4KqhoLya7sfH
-pedmm7WgO9p8umXXqNj+04ehuPKTnD8tLMhj+GbJ9eIChPCBf1XnIzOXYep+fq9j
-n/g=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEogIBAAKCAQEAl+NmU8AAxrBJEu4o3wA5Q0GiaW3YK50CmubLDECt6ERkql30
-6dE4bCYlQW2GfGWUaTzICXaMGqRQhh0CYelTnSnpUdH/7jVaQRlc9cg+BOALZAOl
-GHCCaussHI/luz5YtRHfFVDd0LQVPh7x4hHTI++JCyW+CA9XD94Hob0b/9LM5DfP
-zgxSgoGG9etfQXRW/YEZ/HopclnINZtYF89sUn6kWZBRHBGHrdwXjhBmQYWrnyRc
-4p70I6HB/pFhSpWX9PtgxBRWxzUapmmzJqjAVJHutvRFJ/GuzwVgWDS5XfWn6DR8
-DWOvMKTjX+SKkdpjWq+W1c/P3eIOwblTbxBXSQIDAQABAoIBAHhjNFMDZ1oUlgbc
-ICcI/VoyprFb8DA5ZbwzXBMdHPpxYzyp9hpxy3/lCDiAwEzPEsAK/h6KCOiA/pYe
-XioPD0gN1TIV+f3r6dqZaNYi3g1tK3odbXkejDCEsFT/NT6hXxw9yw0RKI9ofUHc
-synVqP3duUjNpH6s8fvQp0nqI0wzoNm1kklpTWVjZmbtSZF9m/xfv7NGwQEYUL2V
-f5YvX6aHPVDtUXAqyPBgv6SGuogSSjwRTsNTef3aY6Se5MlP3YIfRqdad8+ORkKu
-WSrO+GjQccV4sztD8Sn3LR7qe6Lmid4yopHSS4EFq0Sc8LznTeflWcRAsBLezRp5
-xZB/blECgYEA8yrEzFA247AOXbhL1CdqMyPs523oy5+dmByyovjYjEhjUCRlAa9D
-ApvID4TfAkA4n0rUdICCtwbZlFrBZbn6rXNvJ362ufZjvaFIucQm90YkG1J6Ldek
-8ohJfLyyLLWzVHJIS7WxFqqsGmDhYUTErFbJZjI8tNSglrc81jUWT7UCgYEAn+dw
-ICyc09f6+xm3nFZIOq2Gtpw8lrOJlwZugn1AqY2D5Ko2gq1Fx2oZWpVaBivjH3gU
-ONlnPuealE0RJHvCm/+axy7Rcj65IwTrN5V+j6rg1tuEdi70PvNKmN6XQqRvEjOX
-HOh3gQYP6EFAoVINZZqUkwJzqpv4tnOSpEHXncUCgYB3+Z8Vq3IZjtDXvslzCGtm
-hhAp81mLtdocpfQhYqP9Ou39KafIV/+49sGTnpwlUShet53xSUK1KSULBGgtV8Bt
-+ela1DM1t3Joqn3mYfhTwoCoFl5/5cjVfRa8+6DxXEj5nlU7PY79PwIhFbG9ux9K
-ZJuD17+J/Oqq0gerLJAwjQKBgAS4AbkRV/dwcjmiwqZcbXk90bHl3mvcFH1edTho
-ldXrFS9UTpOApYSC/wiLS8LO3L76/i3HTKKwlwE1XQIknNOZsWmbWhby/uenp4FW
-agu3UTdF9xy9uft5loP4XaJb0+NHnnf97DjkgueptUyNbVPIQgYsllk8jRRlSLiM
-MN65AoGAUPLlh8ok/iNirO5YKqc5/3FKA1o1V1KSTHYVUK+Y+vuVJxQZeO3LMybe
-7AJ1cLHEWc8V4B27e6g33rfGGAW+/+RJ7/uHxuYCuKhstbq/x+rf9i4nl93emlMV
-PC3yuZsCmpk9Uypzi2+PT10yVgXkXRYtLpuUpoABWRzVXGnEsXo=
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/command_line/test_parsed_options.js b/src/mongo/gotools/test/legacy28/jstests/libs/command_line/test_parsed_options.js
deleted file mode 100644
index f194b73ce7f..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/command_line/test_parsed_options.js
+++ /dev/null
@@ -1,214 +0,0 @@
-// Merge the two options objects. Used as a helper when we are trying to actually compare options
-// despite the fact that our test framework adds extra stuff to it. Anything set in the second
-// options object overrides the first options object. The two objects must have the same structure.
-function mergeOptions(obj1, obj2) {
- var obj3 = {};
- for (var attrname in obj1) {
- if (typeof obj1[attrname] === "object" &&
- typeof obj2[attrname] !== "undefined") {
- if (typeof obj2[attrname] !== "object") {
- throw Error("Objects being merged must have the same structure");
- }
- obj3[attrname] = mergeOptions(obj1[attrname], obj2[attrname]);
- }
- else {
- obj3[attrname] = obj1[attrname];
- }
- }
- for (var attrname in obj2) {
- if (typeof obj2[attrname] === "object" &&
- typeof obj1[attrname] !== "undefined") {
- if (typeof obj1[attrname] !== "object") {
- throw Error("Objects being merged must have the same structure");
- }
- // Already handled above
- }
- else {
- obj3[attrname] = obj2[attrname];
- }
- }
- return obj3;
-}
-
-// Test that the parsed result of setting certain command line options has the correct format in
-// mongod. See SERVER-13379.
-//
-// Arguments:
-// mongoRunnerConfig - Configuration object to pass to the mongo runner
-// expectedResult - Object formatted the same way as the result of running the "getCmdLineOpts"
-// command, but with only the fields that should be set by the options implied by the first
-// argument set.
-//
-// Example:
-//
-// testGetCmdLineOptsMongod({ port : 10000 }, { "parsed" : { "net" : { "port" : 10000 } } });
-//
-var getCmdLineOptsBaseMongod;
-function testGetCmdLineOptsMongod(mongoRunnerConfig, expectedResult) {
-
- // Get the options object returned by "getCmdLineOpts" when we spawn a mongod using our test
- // framework without passing any additional options. We need this because the framework adds
- // options of its own, and we only want to compare against the options we care about.
- function getBaseOptsObject() {
-
- // Start mongod with no options
- var baseMongod = MongoRunner.runMongod();
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsBaseMongod = baseMongod.adminCommand("getCmdLineOpts");
-
- // Stop the mongod we used to get the options
- MongoRunner.stopMongod(baseMongod.port);
-
- return getCmdLineOptsBaseMongod;
- }
-
- if (typeof getCmdLineOptsBaseMongod === "undefined") {
- getCmdLineOptsBaseMongod = getBaseOptsObject();
- }
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsExpected = getCmdLineOptsBaseMongod;
-
- // Delete port and dbPath if we are not explicitly setting them, since they will change on
- // multiple runs of the test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsExpected.parsed.net.port;
- }
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.storage === "undefined" ||
- typeof expectedResult.parsed.storage.dbPath === "undefined") {
- delete getCmdLineOptsExpected.parsed.storage.dbPath;
- }
-
- // Merge with the result that we expect
- expectedResult = mergeOptions(getCmdLineOptsExpected, expectedResult);
-
- // Start mongod with options
- var mongod = MongoRunner.runMongod(mongoRunnerConfig);
-
- // Create and authenticate high-privilege user in case mongod is running with authorization.
- // Try/catch is necessary in case this is being run on an uninitiated replset, by a test
- // such as repl_options.js for example.
- var ex;
- try {
- mongod.getDB("admin").createUser({user: "root", pwd: "pass", roles: ["root"]});
- mongod.getDB("admin").auth("root", "pass");
- }
- catch (ex) {
- }
-
- // Get the parsed options
- var getCmdLineOptsResult = mongod.adminCommand("getCmdLineOpts");
-
- // Delete port and dbPath if we are not explicitly setting them, since they will change on
- // multiple runs of the test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsResult.parsed.net.port;
- }
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.storage === "undefined" ||
- typeof expectedResult.parsed.storage.dbPath === "undefined") {
- delete getCmdLineOptsResult.parsed.storage.dbPath;
- }
-
- // Make sure the options are equal to what we expect
- assert.docEq(getCmdLineOptsResult.parsed, expectedResult.parsed);
-
- // Cleanup
- mongod.getDB("admin").logout();
- MongoRunner.stopMongod(mongod.port);
-}
-
-// Test that the parsed result of setting certain command line options has the correct format in
-// mongos. See SERVER-13379.
-//
-// Arguments:
-// mongoRunnerConfig - Configuration object to pass to the mongo runner
-// expectedResult - Object formatted the same way as the result of running the "getCmdLineOpts"
-// command, but with only the fields that should be set by the options implied by the first
-// argument set.
-//
-// Example:
-//
-// testGetCmdLineOptsMongos({ port : 10000 }, { "parsed" : { "net" : { "port" : 10000 } } });
-//
-var getCmdLineOptsBaseMongos;
-function testGetCmdLineOptsMongos(mongoRunnerConfig, expectedResult) {
-
- // Get the options object returned by "getCmdLineOpts" when we spawn a mongos using our test
- // framework without passing any additional options. We need this because the framework adds
- // options of its own, and we only want to compare against the options we care about.
- function getBaseOptsObject() {
-
- // Start mongod with no options
- var baseMongod = MongoRunner.runMongod();
-
- // Start mongos with only the configdb option
- var baseMongos = MongoRunner.runMongos({ configdb : baseMongod.host });
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsBaseMongos = baseMongos.adminCommand("getCmdLineOpts");
-
- // Remove the configdb option
- delete getCmdLineOptsBaseMongos.parsed.sharding.configDB;
-
- // Stop the mongod and mongos we used to get the options
- MongoRunner.stopMongos(baseMongos.port);
- MongoRunner.stopMongod(baseMongod.port);
-
- return getCmdLineOptsBaseMongos;
- }
-
- if (typeof getCmdLineOptsBaseMongos === "undefined") {
- getCmdLineOptsBaseMongos = getBaseOptsObject();
- }
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsExpected = getCmdLineOptsBaseMongos;
-
- // Delete port if we are not explicitly setting it, since it will change on multiple runs of the
- // test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsExpected.parsed.net.port;
- }
-
- // Merge with the result that we expect
- expectedResult = mergeOptions(getCmdLineOptsExpected, expectedResult);
-
- // Start mongod with no options
- var mongod = MongoRunner.runMongod();
-
- // Add configdb option
- mongoRunnerConfig['configdb'] = mongod.host;
-
- // Start mongos connected to mongod
- var mongos = MongoRunner.runMongos(mongoRunnerConfig);
-
- // Get the parsed options
- var getCmdLineOptsResult = mongos.adminCommand("getCmdLineOpts");
-
- // Delete port if we are not explicitly setting it, since it will change on multiple runs of the
- // test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsResult.parsed.net.port;
- }
-
- // Remove the configdb option
- delete getCmdLineOptsResult.parsed.sharding.configDB;
-
- // Make sure the options are equal to what we expect
- assert.docEq(getCmdLineOptsResult.parsed, expectedResult.parsed);
-
- // Cleanup
- MongoRunner.stopMongos(mongos.port);
- MongoRunner.stopMongod(mongod.port);
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_auth.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_auth.ini
deleted file mode 100644
index c1193be1b03..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_auth.ini
+++ /dev/null
@@ -1 +0,0 @@
-auth=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_dur.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_dur.ini
deleted file mode 100644
index 8f83f3ae5a7..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_dur.ini
+++ /dev/null
@@ -1 +0,0 @@
-dur=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_httpinterface.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_httpinterface.ini
deleted file mode 100644
index fc839a98a76..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_httpinterface.ini
+++ /dev/null
@@ -1 +0,0 @@
-httpinterface=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_ipv6.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_ipv6.ini
deleted file mode 100644
index a091421022d..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_ipv6.ini
+++ /dev/null
@@ -1 +0,0 @@
-ipv6=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_journal.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_journal.ini
deleted file mode 100644
index d0010a86906..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_journal.ini
+++ /dev/null
@@ -1 +0,0 @@
-journal=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_jsonp.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_jsonp.ini
deleted file mode 100644
index 82847f50b2b..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_jsonp.ini
+++ /dev/null
@@ -1 +0,0 @@
-jsonp=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_jsonp.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_jsonp.json
deleted file mode 100644
index 4d5477a8547..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_jsonp.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "net" : {
- "http" : {
- "JSONPEnabled" : false
- }
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_moveparanoia.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_moveparanoia.ini
deleted file mode 100644
index f21b50f9513..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_moveparanoia.ini
+++ /dev/null
@@ -1 +0,0 @@
-moveParanoia=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noauth.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noauth.ini
deleted file mode 100644
index a65f909baf3..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noauth.ini
+++ /dev/null
@@ -1 +0,0 @@
-noauth=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noautosplit.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noautosplit.ini
deleted file mode 100644
index b490f9038dd..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noautosplit.ini
+++ /dev/null
@@ -1 +0,0 @@
-noAutoSplit=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nodur.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nodur.ini
deleted file mode 100644
index b0c73a48b30..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nodur.ini
+++ /dev/null
@@ -1 +0,0 @@
-nodur=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nohttpinterface.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nohttpinterface.ini
deleted file mode 100644
index 52c4958da6e..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nohttpinterface.ini
+++ /dev/null
@@ -1 +0,0 @@
-nohttpinterface=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noindexbuildretry.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noindexbuildretry.ini
deleted file mode 100644
index 79e428c492f..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noindexbuildretry.ini
+++ /dev/null
@@ -1 +0,0 @@
-noIndexBuildRetry=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nojournal.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nojournal.ini
deleted file mode 100644
index 17172363d25..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nojournal.ini
+++ /dev/null
@@ -1 +0,0 @@
-nojournal=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nomoveparanoia.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nomoveparanoia.ini
deleted file mode 100644
index 4696304134f..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nomoveparanoia.ini
+++ /dev/null
@@ -1 +0,0 @@
-noMoveParanoia=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noobjcheck.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noobjcheck.ini
deleted file mode 100644
index 471e83c3172..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noobjcheck.ini
+++ /dev/null
@@ -1 +0,0 @@
-noobjcheck=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noprealloc.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noprealloc.ini
deleted file mode 100644
index 08c78be3507..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noprealloc.ini
+++ /dev/null
@@ -1 +0,0 @@
-noprealloc=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noscripting.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noscripting.ini
deleted file mode 100644
index 4cfaf3395f6..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_noscripting.ini
+++ /dev/null
@@ -1 +0,0 @@
-noscripting=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nounixsocket.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nounixsocket.ini
deleted file mode 100644
index 66da9f08391..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_nounixsocket.ini
+++ /dev/null
@@ -1 +0,0 @@
-nounixsocket=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_objcheck.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_objcheck.ini
deleted file mode 100644
index bd19d026bbf..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_objcheck.ini
+++ /dev/null
@@ -1 +0,0 @@
-objcheck=false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_rest_interface.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_rest_interface.json
deleted file mode 100644
index f9ad93a4f5d..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/disable_rest_interface.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "net" : {
- "http" : {
- "RESTInterfaceEnabled" : false
- }
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_auth.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_auth.json
deleted file mode 100644
index 9f9cc84d107..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_auth.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "security" : {
- "authorization" : "enabled"
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_autosplit.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_autosplit.json
deleted file mode 100644
index a0d4f8af1be..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_autosplit.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "sharding" : {
- "autoSplit" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_httpinterface.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_httpinterface.json
deleted file mode 100644
index c87dabe125d..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_httpinterface.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "net" : {
- "http" : {
- "enabled" : true
- }
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_indexbuildretry.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_indexbuildretry.json
deleted file mode 100644
index 362db08edd3..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_indexbuildretry.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "storage" : {
- "indexBuildRetry" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_journal.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_journal.json
deleted file mode 100644
index d75b94ccbc7..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_journal.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "storage" : {
- "journal" : {
- "enabled" : false
- }
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_objcheck.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_objcheck.json
deleted file mode 100644
index b52be7382ed..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_objcheck.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "net" : {
- "wireObjectCheck" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_paranoia.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_paranoia.json
deleted file mode 100644
index 218646b1662..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_paranoia.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "sharding" : {
- "archiveMovedChunks" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_prealloc.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_prealloc.json
deleted file mode 100644
index 15ecefbb546..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_prealloc.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "storage" : {
- "preallocDataFiles" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_scripting.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_scripting.json
deleted file mode 100644
index e8f32f2c23c..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_scripting.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "security" : {
- "javascriptEnabled" : true
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_unixsocket.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_unixsocket.json
deleted file mode 100644
index 660d21eb17f..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/enable_unixsocket.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "net" : {
- "unixDomainSocket" : {
- "enabled" : true
- }
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_dur.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_dur.ini
deleted file mode 100644
index 43495fbd0bd..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_dur.ini
+++ /dev/null
@@ -1 +0,0 @@
-dur=
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_journal.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_journal.ini
deleted file mode 100644
index f750ac2e185..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_journal.ini
+++ /dev/null
@@ -1 +0,0 @@
-journal=
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_nodur.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_nodur.ini
deleted file mode 100644
index f1046df16a9..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_nodur.ini
+++ /dev/null
@@ -1 +0,0 @@
-nodur=
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_nojournal.ini b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_nojournal.ini
deleted file mode 100644
index 737e5c28029..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/implicitly_enable_nojournal.ini
+++ /dev/null
@@ -1 +0,0 @@
-nojournal=
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_component_verbosity.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_component_verbosity.json
deleted file mode 100644
index 69c200834a1..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_component_verbosity.json
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "systemLog" : {
- "verbosity" : 2,
- "component" : {
- "accessControl" : {
- "verbosity" : 0
- },
- "storage" : {
- "verbosity" : 3,
- "journaling" : {
- "verbosity" : 5
- }
- }
- }
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_profiling.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_profiling.json
deleted file mode 100644
index 944f0de1575..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_profiling.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "operationProfiling" : {
- "mode" : "all"
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_replsetname.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_replsetname.json
deleted file mode 100644
index 522ca2b766f..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_replsetname.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "replication" : {
- "replSetName" : "myconfigname"
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_shardingrole.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_shardingrole.json
deleted file mode 100644
index 71f92f122db..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_shardingrole.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "sharding" : {
- "clusterRole" : "configsvr"
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_verbosity.json b/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_verbosity.json
deleted file mode 100644
index 47a1cce1b03..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/config_files/set_verbosity.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "systemLog" : {
- "verbosity" : 5
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/crl.pem b/src/mongo/gotools/test/legacy28/jstests/libs/crl.pem
deleted file mode 100644
index 275c9e2d91c..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/crl.pem
+++ /dev/null
@@ -1,38 +0,0 @@
-Certificate Revocation List (CRL):
- Version 2 (0x1)
- Signature Algorithm: sha256WithRSAEncryption
- Issuer: /CN=Kernel Test CA/OU=Kernel/O=MongoDB/L=New York City/ST=New York/C=US
- Last Update: Aug 21 13:56:28 2014 GMT
- Next Update: Aug 18 13:56:28 2024 GMT
- CRL extensions:
- X509v3 CRL Number:
- 4096
-No Revoked Certificates.
- Signature Algorithm: sha256WithRSAEncryption
- 48:1b:0b:b1:89:f5:6f:af:3c:dd:2a:a0:e5:55:04:80:16:b4:
- 23:98:39:bb:9f:16:c9:25:73:72:c6:a6:73:21:1d:1a:b6:99:
- fc:47:5e:bc:af:64:29:02:9c:a5:db:15:8a:65:48:3c:4f:a6:
- cd:35:47:aa:c6:c0:39:f5:a6:88:8f:1b:6c:26:61:4e:10:d7:
- e2:b0:20:3a:64:92:c1:d3:2a:11:3e:03:e2:50:fd:4e:3c:de:
- e2:e5:78:dc:8e:07:a5:69:55:13:2b:8f:ae:21:00:42:85:ff:
- b6:b1:2b:69:08:40:5a:25:8c:fe:57:7f:b1:06:b0:72:ff:61:
- de:21:59:05:a8:1b:9e:c7:8a:08:ab:f5:bc:51:b3:36:68:0f:
- 54:65:3c:8d:b7:80:d0:27:01:3e:43:97:89:19:89:0e:c5:01:
- 2c:55:9f:b6:e4:c8:0b:35:f8:52:45:d3:b4:09:ce:df:73:98:
- f5:4c:e4:5a:06:ac:63:4c:f8:4d:9c:af:88:fc:19:f7:77:ea:
- ee:56:18:49:16:ce:62:66:d1:1b:8d:66:33:b5:dc:b1:25:b3:
- 6c:81:e9:d0:8a:1d:83:61:49:0e:d9:94:6a:46:80:41:d6:b6:
- 59:a9:30:55:3d:5b:d3:5b:f1:37:ec:2b:76:d0:3a:ac:b2:c8:
- 7c:77:04:78
------BEGIN X509 CRL-----
-MIIBzjCBtwIBATANBgkqhkiG9w0BAQsFADB0MRcwFQYDVQQDEw5LZXJuZWwgVGVz
-dCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQH
-Ew1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMX
-DTE0MDgyMTEzNTYyOFoXDTI0MDgxODEzNTYyOFqgDzANMAsGA1UdFAQEAgIQADAN
-BgkqhkiG9w0BAQsFAAOCAQEASBsLsYn1b6883Sqg5VUEgBa0I5g5u58WySVzcsam
-cyEdGraZ/EdevK9kKQKcpdsVimVIPE+mzTVHqsbAOfWmiI8bbCZhThDX4rAgOmSS
-wdMqET4D4lD9Tjze4uV43I4HpWlVEyuPriEAQoX/trEraQhAWiWM/ld/sQawcv9h
-3iFZBagbnseKCKv1vFGzNmgPVGU8jbeA0CcBPkOXiRmJDsUBLFWftuTICzX4UkXT
-tAnO33OY9UzkWgasY0z4TZyviPwZ93fq7lYYSRbOYmbRG41mM7XcsSWzbIHp0Iod
-g2FJDtmUakaAQda2WakwVT1b01vxN+wrdtA6rLLIfHcEeA==
------END X509 CRL-----
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/crl_client_revoked.pem b/src/mongo/gotools/test/legacy28/jstests/libs/crl_client_revoked.pem
deleted file mode 100644
index 0b99d56936e..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/crl_client_revoked.pem
+++ /dev/null
@@ -1,41 +0,0 @@
-Certificate Revocation List (CRL):
- Version 2 (0x1)
- Signature Algorithm: sha256WithRSAEncryption
- Issuer: /CN=Kernel Test CA/OU=Kernel/O=MongoDB/L=New York City/ST=New York/C=US
- Last Update: Aug 21 13:43:27 2014 GMT
- Next Update: Aug 18 13:43:27 2024 GMT
- CRL extensions:
- X509v3 CRL Number:
- 4096
-Revoked Certificates:
- Serial Number: 02
- Revocation Date: Aug 21 13:43:27 2014 GMT
- Signature Algorithm: sha256WithRSAEncryption
- 24:86:73:8d:7f:55:15:d0:d6:8a:47:53:cf:97:f7:e5:3d:0b:
- 4a:ea:fb:02:6a:2e:79:c6:b1:38:b2:ac:f0:c0:64:47:b0:3e:
- ad:4e:2e:94:e6:64:ed:79:34:bd:74:c0:d4:3d:b9:a1:bb:38:
- 89:5c:02:6a:ad:6b:dc:3b:64:34:6a:2d:4c:90:36:82:95:0c:
- 19:88:e2:a3:bf:8e:1b:56:98:37:32:87:ed:f0:bd:dd:e2:0d:
- f9:80:dc:f2:a5:b4:ee:d9:bb:83:fe:b8:3a:13:e0:da:fc:04:
- 77:fb:ce:f9:c5:2a:54:a7:f0:34:09:2a:b2:3d:46:1b:48:e6:
- e8:16:c7:a1:3c:88:8c:72:cd:cc:53:dc:f8:54:63:1f:b9:8b:
- ea:2c:e5:26:c5:b4:a4:9f:8b:e1:6c:85:9b:c6:63:6f:2f:ae:
- 18:c5:6a:23:f0:58:27:85:5c:0f:01:04:da:d2:8b:de:9e:ab:
- 46:00:22:07:28:e1:ef:46:91:90:06:58:95:05:68:67:58:6e:
- 67:a8:0b:06:1a:73:d9:04:18:c9:a3:e4:e3:d6:94:a3:e1:5c:
- e5:08:1b:b3:9d:ab:3e:ea:20:b1:04:e5:90:e1:42:54:b2:58:
- bb:51:1a:48:87:60:b0:95:4a:2e:ce:a0:4f:8c:17:6d:6b:4c:
- 37:aa:4d:d7
------BEGIN X509 CRL-----
-MIIB5DCBzQIBATANBgkqhkiG9w0BAQsFADB0MRcwFQYDVQQDEw5LZXJuZWwgVGVz
-dCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQH
-Ew1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMX
-DTE0MDgyMTEzNDMyN1oXDTI0MDgxODEzNDMyN1owFDASAgECFw0xNDA4MjExMzQz
-MjdaoA8wDTALBgNVHRQEBAICEAAwDQYJKoZIhvcNAQELBQADggEBACSGc41/VRXQ
-1opHU8+X9+U9C0rq+wJqLnnGsTiyrPDAZEewPq1OLpTmZO15NL10wNQ9uaG7OIlc
-Amqta9w7ZDRqLUyQNoKVDBmI4qO/jhtWmDcyh+3wvd3iDfmA3PKltO7Zu4P+uDoT
-4Nr8BHf7zvnFKlSn8DQJKrI9RhtI5ugWx6E8iIxyzcxT3PhUYx+5i+os5SbFtKSf
-i+FshZvGY28vrhjFaiPwWCeFXA8BBNrSi96eq0YAIgco4e9GkZAGWJUFaGdYbmeo
-CwYac9kEGMmj5OPWlKPhXOUIG7Odqz7qILEE5ZDhQlSyWLtRGkiHYLCVSi7OoE+M
-F21rTDeqTdc=
------END X509 CRL-----
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/crl_expired.pem b/src/mongo/gotools/test/legacy28/jstests/libs/crl_expired.pem
deleted file mode 100644
index c9b3abb05a7..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/crl_expired.pem
+++ /dev/null
@@ -1,38 +0,0 @@
-Certificate Revocation List (CRL):
- Version 2 (0x1)
- Signature Algorithm: sha256WithRSAEncryption
- Issuer: /CN=Kernel Test CA/OU=Kernel/O=MongoDB/L=New York City/ST=New York/C=US
- Last Update: Jul 21 19:45:56 2014 GMT
- Next Update: Jul 21 20:45:56 2014 GMT
- CRL extensions:
- X509v3 CRL Number:
- 4096
-No Revoked Certificates.
- Signature Algorithm: sha256WithRSAEncryption
- 14:e8:6d:51:fc:0e:66:08:22:b2:4d:fb:da:7a:5f:4d:d1:a0:
- 80:f0:18:f3:c5:ca:c7:05:6c:70:59:fa:d5:96:68:fa:c7:1d:
- 7e:fb:53:3b:4a:8f:ed:bb:51:04:e8:fb:db:d7:b8:96:d9:e2:
- 8d:bb:54:cc:11:60:c8:20:ea:81:28:5f:e1:eb:d6:8c:94:bf:
- 42:e0:7f:a3:13:0c:76:05:f2:f0:34:98:a3:e8:64:74:4c:cb:
- bf:39:bb:fa:d5:2d:72:02:d1:fa:56:15:59:12:b7:ff:a3:cc:
- c9:d6:14:ca:4a:1e:0b:b4:47:cf:58:b0:e5:24:d2:21:71:0d:
- 2d:09:77:5c:2f:ef:40:f8:74:90:03:cc:37:2e:ea:6a:25:59:
- c0:bf:48:90:00:55:9c:db:bf:1f:f0:7b:b6:5a:90:94:b6:8d:
- 7c:7d:bb:2d:11:5f:0c:f5:4a:9b:c5:ed:ab:e3:fd:35:c8:76:
- 3b:2e:41:cb:df:76:b5:f4:e9:05:72:f6:56:7a:fc:34:07:d6:
- a2:55:eb:7c:58:33:5b:9d:3e:b2:03:89:01:c6:d1:54:75:1a:
- 5c:73:3f:5e:2e:fd:3b:38:ed:d4:e1:fa:ec:ff:84:f0:55:ee:
- 83:e0:f0:13:97:e7:f0:55:8c:00:a3:1a:31:e4:31:9e:68:d0:
- 6d:3e:81:b0
------BEGIN X509 CRL-----
-MIIBzjCBtwIBATANBgkqhkiG9w0BAQsFADB0MRcwFQYDVQQDEw5LZXJuZWwgVGVz
-dCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQH
-Ew1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMX
-DTE0MDcyMTE5NDU1NloXDTE0MDcyMTIwNDU1NlqgDzANMAsGA1UdFAQEAgIQADAN
-BgkqhkiG9w0BAQsFAAOCAQEAFOhtUfwOZggisk372npfTdGggPAY88XKxwVscFn6
-1ZZo+scdfvtTO0qP7btRBOj729e4ltnijbtUzBFgyCDqgShf4evWjJS/QuB/oxMM
-dgXy8DSYo+hkdEzLvzm7+tUtcgLR+lYVWRK3/6PMydYUykoeC7RHz1iw5STSIXEN
-LQl3XC/vQPh0kAPMNy7qaiVZwL9IkABVnNu/H/B7tlqQlLaNfH27LRFfDPVKm8Xt
-q+P9Nch2Oy5By992tfTpBXL2Vnr8NAfWolXrfFgzW50+sgOJAcbRVHUaXHM/Xi79
-Ozjt1OH67P+E8FXug+DwE5fn8FWMAKMaMeQxnmjQbT6BsA==
------END X509 CRL-----
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_bad_first.journal b/src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_bad_first.journal
deleted file mode 100644
index 687317844a7..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_bad_first.journal
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_bad_last.journal b/src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_bad_last.journal
deleted file mode 100644
index 7dd98e2c97b..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_bad_last.journal
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_good.journal b/src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_good.journal
deleted file mode 100644
index d76790d2451..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/dur_checksum_good.journal
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/expired.pem b/src/mongo/gotools/test/legacy28/jstests/libs/expired.pem
deleted file mode 100644
index e1d2ceb8de8..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/expired.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDfzCCAmegAwIBAgIBEDANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzEwMTYwMDAwWhcNMTQwNzE2MTYwMDAwWjBtMRAwDgYDVQQD
-EwdleHBpcmVkMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoTB01vbmdvREIxFjAU
-BgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQG
-EwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPFSQZlHvJpi3dmA
-1X5U1qaUN/O/EQy5IZ5Rw+cfFHWOZ84EsLZxehWyqDZRH49Rg06xSYdO2WZOopP8
-OnUVCLGL819K83ikZ5sCbvB/gKCSCenwveEN992gJfs70HaZfiJNC7/cFigSb5Jg
-5G77E1/Uml4hIThfYG2NbCsTuP/P4JLwuzCkfgEUWRbCioMPEpIpxQw2LCx5DCy6
-Llhct0Hp14N9dZ4nA1h1621wOckgGJHw9DXdt9rGzulY1UgOOPczyqT08CdpaVxK
-VzrJCcUxfUjhO4ukHz+LBFQY+ZEm+tVboDbinbiHxY24urP46/u+BwRvBvjOovJi
-NVUh5GsCAwEAAaMjMCEwHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEw
-DQYJKoZIhvcNAQEFBQADggEBAG3rRSFCSG3hilGK9SMtTpFnrquJNlL+yG0TP8VG
-1qVt1JGaDJ8YUc5HXXtKBeLnRYii7EUx1wZIKn78RHRdITo5OJvlmcwwh0bt+/eK
-u9XFgR3z35w5UPr/YktgoX39SOzAZUoorgNw500pfxfneqCZtcRufVvjtk8TUdlN
-lcd2HfIxtUHWJeTcVM18g0JdHMYdMBXDKuXOW9VWLIBC2G6nAL/8SZJtUaDllPb4
-NisuIGjfjGgNxMpEXn+sQjFTupAoJru21OtAgERWFJhKQ0hbO0kucEPKEfxHDBVG
-dKSRIl6b0XSDLfxEXPv5ZhdrK4KEw1dYYXySvIVXtn0Ys38=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEA8VJBmUe8mmLd2YDVflTWppQ3878RDLkhnlHD5x8UdY5nzgSw
-tnF6FbKoNlEfj1GDTrFJh07ZZk6ik/w6dRUIsYvzX0rzeKRnmwJu8H+AoJIJ6fC9
-4Q333aAl+zvQdpl+Ik0Lv9wWKBJvkmDkbvsTX9SaXiEhOF9gbY1sKxO4/8/gkvC7
-MKR+ARRZFsKKgw8SkinFDDYsLHkMLLouWFy3QenXg311nicDWHXrbXA5ySAYkfD0
-Nd232sbO6VjVSA449zPKpPTwJ2lpXEpXOskJxTF9SOE7i6QfP4sEVBj5kSb61Vug
-NuKduIfFjbi6s/jr+74HBG8G+M6i8mI1VSHkawIDAQABAoIBAGAO1QvVkU6HAjX8
-4X6a+KJwJ2F/8aJ14trpQyixp2wv1kQce9bzjpwqdGjCm+RplvHxAgq5KTJfJLnx
-UbefOsmpoqOQ6x9fmdoK+uwCZMoFt6qGaJ63960hfVzm71D2Qk4XCxFA4xTqWb0T
-knpWuNyRfSzw1Q9ib7jL7X2sKRyx9ZP+1a41ia/Ko6iYPUUnRb1Ewo10alYVWVIE
-upeIlWqv+1DGfda9f34pGVh3ldIDh1LHqaAZhdn6sKtcgIUGcWatZRmQiA5kSflP
-VBpOI2c2tkQv0j5cPGwD7GGaJ2aKayHG0EwnoNmxCeR0Ay3MO0vBAsxn7Wy6yqrS
-EfkYhFkCgYEA/OA2AHFIH7mE0nrMwegXrEy7BZUgLRCRFWTjxwnCKFQj2Uo2dtYD
-2QQKuQWeiP+LD2nHj4n1KXuSJiB1GtmEF3JkYV4Wd7mPWEVNDHa0G8ZndquPK40s
-YSjh9u0KesUegncBFfIiwzxsk9724iaXq3aXOexc0btQB2xltRzj6/0CgYEA9E2A
-QU6pnCOzGDyOV7+TFr0ha7TXaMOb5aIVz6tJ7r5Nb7oZP9T9UCdUnw2Tls5Ce5tI
-J23O7JqwT4CudnWnk5ZtVtGBYA23mUryrgf/Utfg08hU2uRyq9LOxVaVqfV/AipN
-62GmfuxkK4PatOcAOhKqmS/zGfZqIg7V6rtX2ocCgYEAlY1ogpR8ij6mvfBgPmGr
-9nues+uBDwXYOCXlzCYKTN2OIgkQ8vEZb3RDfy9CllVDgccWfd6iPnlVcvUJLOrt
-gwxlL2x8ryvwCc1ahv+A/1g0gmtuDdy9HW0XTnjcFMWViKUm4DrGsl5+/GkF67PV
-SVOmllwifOthpjJGaHmAlmUCgYB6EFMZzlzud+PfIzqX20952Avfzd6nKL03EjJF
-rbbmA82bGmfNPfVHXC9qvRTWD76mFeMKWFJAY9XeE1SYOZb+JfYBn/I9dP0cKZdx
-nutSkCx0hK7pI6Wr9kt7zBRBdDj+cva1ufe/iQtPtrTLGHRDj9oPaibT/Qvwcmst
-umdd9wKBgQDM7j6Rh7v8AeLy2bw73Qtk0ORaHqRBHSQw87srOLwtfQzE92zSGMj+
-FVt/BdPgzyaddegKvJ9AFCPAxbA8Glnmc89FO7pcXn9Wcy+ZoZIF6YwgUPhPCp/4
-r9bKuXuQiutFbKyes/5PTXqbJ/7xKRZIpQCvxg2syrW3hxx8LIx/kQ==
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/fts.js b/src/mongo/gotools/test/legacy28/jstests/libs/fts.js
deleted file mode 100644
index 73b7d339ba5..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/fts.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-function queryIDS( coll, search, filter, extra ){
- var cmd = { search : search }
- if ( filter )
- cmd.filter = filter;
- if ( extra )
- Object.extend( cmd, extra );
- lastCommadResult = coll.runCommand( "text" , cmd);
-
- return getIDS( lastCommadResult );
-}
-
-function getIDS( commandResult ){
- if ( ! ( commandResult && commandResult.results ) )
- return []
-
- return commandResult.results.map( function(z){ return z.obj._id; } )
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/geo_near_random.js b/src/mongo/gotools/test/legacy28/jstests/libs/geo_near_random.js
deleted file mode 100644
index 248f5e49a6c..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/geo_near_random.js
+++ /dev/null
@@ -1,101 +0,0 @@
-GeoNearRandomTest = function(name) {
- this.name = name;
- this.t = db[name];
- this.nPts = 0;
-
- // reset state
- this.t.drop();
- Random.srand(1234);
-
- print("starting test: " + name);
-}
-
-
-GeoNearRandomTest.prototype.mkPt = function mkPt(scale, indexBounds){
- if(!indexBounds){
- scale = scale || 1; // scale is good for staying away from edges
- return [((Random.rand() * 359.8) - 179.9) * scale, ((Random.rand() * 180) - 90) * scale];
- }
- else{
- var range = indexBounds.max - indexBounds.min;
- var eps = Math.pow(2, -40);
- // Go very close to the borders but not quite there.
- return [( Random.rand() * (range - eps) + eps) + indexBounds.min, ( Random.rand() * (range - eps) + eps ) + indexBounds.min];
- }
-
-}
-
-GeoNearRandomTest.prototype.insertPts = function(nPts, indexBounds, scale) {
- assert.eq(this.nPts, 0, "insertPoints already called");
- this.nPts = nPts;
-
- var bulk = this.t.initializeUnorderedBulkOp();
- for (var i=0; i<nPts; i++){
- bulk.insert({ _id: i, loc: this.mkPt(scale, indexBounds) });
- }
- assert.writeOK(bulk.execute());
-
- if(!indexBounds)
- this.t.ensureIndex({loc: '2d'});
- else
- this.t.ensureIndex({loc: '2d'}, indexBounds)
-}
-
-GeoNearRandomTest.prototype.assertIsPrefix = function(short, long) {
- for (var i=0; i < short.length; i++){
-
- var xS = short[i].obj ? short[i].obj.loc[0] : short[i].loc[0]
- var yS = short[i].obj ? short[i].obj.loc[1] : short[i].loc[1]
- var dS = short[i].obj ? short[i].dis : 1
-
- var xL = long[i].obj ? long[i].obj.loc[0] : long[i].loc[0]
- var yL = long[i].obj ? long[i].obj.loc[1] : long[i].loc[1]
- var dL = long[i].obj ? long[i].dis : 1
-
- assert.eq([xS, yS, dS], [xL, yL, dL]);
- }
-}
-
-GeoNearRandomTest.prototype.testPt = function(pt, opts) {
- assert.neq(this.nPts, 0, "insertPoints not yet called");
-
- opts = opts || {};
- opts['sphere'] = opts['sphere'] || 0;
- opts['nToTest'] = opts['nToTest'] || this.nPts; // be careful, test is O( N^2 )
-
- print("testing point: " + tojson(pt) + " opts: " + tojson(opts));
-
-
- var cmd = {geoNear:this.t.getName(), near: pt, num: 1, spherical:opts.sphere};
-
- var last = db.runCommand(cmd).results;
- for (var i=2; i <= opts.nToTest; i++){
- //print(i); // uncomment to watch status
- cmd.num = i
- var ret = db.runCommand(cmd).results;
-
- try {
- this.assertIsPrefix(last, ret);
- } catch (e) {
- print("*** failed while compairing " + (i-1) + " and " + i);
- printjson(cmd);
- throw e; // rethrow
- }
-
- last = ret;
- }
-
-
- if (!opts.sharded){
- last = last.map(function(x){return x.obj});
-
- var query = {loc:{}};
- query.loc[ opts.sphere ? '$nearSphere' : '$near' ] = pt;
- var near = this.t.find(query).limit(opts.nToTest).toArray();
-
- this.assertIsPrefix(last, near);
- assert.eq(last, near);
- }
-}
-
-
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/host_ipaddr.js b/src/mongo/gotools/test/legacy28/jstests/libs/host_ipaddr.js
deleted file mode 100644
index 7db1417e977..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/host_ipaddr.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// Returns non-localhost ipaddr of host running the mongo shell process
-function get_ipaddr() {
- // set temp path, if it exists
- var path = "";
- try {
- path = TestData.tmpPath;
- if (typeof path == "undefined") {
- path = "";
- } else if (path.slice(-1) != "/") {
- // Terminate path with / if defined
- path += "/";
- }
- }
- catch (err) {}
-
- var ipFile = path+"ipaddr.log";
- var windowsCmd = "ipconfig > "+ipFile;
- var unixCmd = "/sbin/ifconfig | grep inet | grep -v '127.0.0.1' > "+ipFile;
- var ipAddr = null;
- var hostType = null;
-
- try {
- hostType = getBuildInfo().sysInfo.split(' ')[0];
-
- // os-specific methods
- if (hostType == "windows") {
- runProgram('cmd.exe', '/c', windowsCmd);
- ipAddr = cat(ipFile).match(/IPv4.*: (.*)/)[1];
- } else {
- runProgram('bash', '-c', unixCmd);
- ipAddr = cat(ipFile).replace(/addr:/g, "").match(/inet (.[^ ]*) /)[1];
- }
- }
- finally {
- removeFile(ipFile);
- }
- return ipAddr;
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/key1 b/src/mongo/gotools/test/legacy28/jstests/libs/key1
deleted file mode 100644
index b5c19e4092f..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/key1
+++ /dev/null
@@ -1 +0,0 @@
-foop de doop
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/key2 b/src/mongo/gotools/test/legacy28/jstests/libs/key2
deleted file mode 100644
index cbde8212841..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/key2
+++ /dev/null
@@ -1 +0,0 @@
-other key
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/localhostnameCN.pem b/src/mongo/gotools/test/legacy28/jstests/libs/localhostnameCN.pem
deleted file mode 100644
index e6aca6a217d..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/localhostnameCN.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDgTCCAmmgAwIBAgIBBTANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBvMRIwEAYDVQQD
-EwkxMjcuMC4wLjExDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMHTW9uZ29EQjEW
-MBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlvcmsxCzAJBgNV
-BAYTAlVTMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAiqQNGgQggL8S
-LlxRgpM3qoktt3J9Pd3WXkknz7OjQr4dyj9wd0VNuD6+Ycg1mBbopO+M/K/ZWv8c
-7WDMM13DDZtpWjm+Q6uFc+vlI9Q9bLHgsZF9kVGRb2bNAfV2bC5/UnO1ulQdHoIB
-p3e/Jtko2WmruzVQFGVMBGCY7RlMRyxe3R8rwNNFvooMRg8yq8tq0qBkVhh85kct
-HHPggXD4/uM2Yc/Q94t5MhDFabewdzsFOLqvI/Y5eIeZLhdIzAv37kolr8AuyqIR
-qcJKztXIrFbLgEZBjoCNkOYZOQE+l8iwwiZxoZN9Jv7x5LyiA+ijtQ+5aI/kMPqG
-nox+/bNFCQIDAQABoyMwITAfBgNVHREEGDAWgglsb2NhbGhvc3SCCTEyNy4wLjAu
-MTANBgkqhkiG9w0BAQUFAAOCAQEAVJJNuUVzMRaft17NH6AzMSTiJxMFWoafmYgx
-jZnzA42XDPoPROuN7Bst6WVYDNpPb1AhPDco9qDylSZl0d341nHAuZNc84fD0omN
-Mbqieu8WseRQ300cbnS8p11c9aYpO/fNQ5iaYhGsRT7pnLs9MIgR468KVjY2xt49
-V0rshG6RxZj83KKuJd0T4X+5UeYz4B677y+SR0aoK2I2Sh+cffrMX2LotHc2I+JI
-Y9SDLvQT7chD9GzaWz634kmy3EEY0LreMm6AxhMOsr0lbZx5O8wLTScSjKARJ6OH
-nPxM1gYT07mkNmfyEnl1ChAN0MPgcLHQqEfe7x7ZQSbAv2gWfA==
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpQIBAAKCAQEAiqQNGgQggL8SLlxRgpM3qoktt3J9Pd3WXkknz7OjQr4dyj9w
-d0VNuD6+Ycg1mBbopO+M/K/ZWv8c7WDMM13DDZtpWjm+Q6uFc+vlI9Q9bLHgsZF9
-kVGRb2bNAfV2bC5/UnO1ulQdHoIBp3e/Jtko2WmruzVQFGVMBGCY7RlMRyxe3R8r
-wNNFvooMRg8yq8tq0qBkVhh85kctHHPggXD4/uM2Yc/Q94t5MhDFabewdzsFOLqv
-I/Y5eIeZLhdIzAv37kolr8AuyqIRqcJKztXIrFbLgEZBjoCNkOYZOQE+l8iwwiZx
-oZN9Jv7x5LyiA+ijtQ+5aI/kMPqGnox+/bNFCQIDAQABAoIBAQAMiUT+Az2FJsHY
-G1Trf7Ba5UiS+/FDNNn7cJX++/lZQaOj9BSRVFzwuguw/8+Izxl+QIL5HlWDGupc
-tJICWwoWIuVl2S7RI6NPlhcEJF7hgzwUElnOWBfUgPEsqitpINM2e2wFSzHO3maT
-5AoO0zgUYK+8n9d74KT9CFcLqWvyS3iksK/FXfCZt0T1EoJ4LsDjeCTfVKqrku2U
-+fCnZZYNkrgUI7Hku94EJfOh462V4KQAUGsvllwb1lfmR5NR86G6VX6oyMGctL5e
-1M6XQv+JQGEmAe6uULtCUGh32fzwJ9Un3j2GXOHT0LWrVc5iLuXwwzQvCGaMYtKm
-FAIDpPxhAoGBAMtwzpRyhf2op/REzZn+0aV5FWKjeq69Yxd62RaOf2EetcPwvUOs
-yQXcP0KZv15VWU/XhZUmTkPf52f0YHV/b1Sm6wUOiMNQ4XpnRj2THf0N7RS4idMm
-VwtMf1pxqttxQVKPpOvPEiTyIh2Nx/juyfD4CWkOVNTvOCd1w+av6ukNAoGBAK51
-gIXDuwJ2e5h3IJyewN/HOZqlgPKyMjnACaeXQ5wPJSrz4+UkJkuXT2dYKhv6u7K/
-GtucTdvBIJeq61+LjjkYk7OVDzoqP/uWU7p1y7gU9LZq+7tgq7r8cgeaC3IBQe7X
-jdFPEy1+zAEBh6MfFjnLZ2Kop9qbH3cNih/g9pTtAoGBAJ8dmdUtRXNByCsa7Rv2
-243qiDlf14J4CdrBcK1dwm75j/yye7VEnO2Cd8/lZHGpm3MBBC/FiA06QElkL1V2
-2GKDMun/liP9TH1p7NwYBqp3i+ha9SE6qXXi3PCmWpXLnOWwB7OPf4d6AgjPbYpb
-aYKY3PNYDC2G9IqYZyI0kSy5AoGBAJ5Fe5PfPom9c+OeL7fnTpO16kyiWZnUkDxU
-PG4OjQfHtbCCEv6PDS8G1sKq+Yjor+A5/+O8qeX0D92I8oB720txQI5rbKUYL3PP
-raY7t9YJLPlRlY8o5KN+4vSCjF+hRG+qnr6FPqDHp8xB1wvl6AQGxIR8/csVcDZR
-0j2ZmhsBAoGAO1Cpk/hWXOLAhSj8P8Q/+3439HEctTZheVBd8q/TtdwXocaZMLi8
-MXURuVTw0GtS9TmdqOFXzloFeaMhJx6TQzZ2aPcxu95b7RjEDtVHus3ed2cSJ2El
-AuRvFT2RCVvTu1mM0Ti7id+d8QBcpbIpPjNjK2Wxir/19gtEawlqlkA=
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/localhostnameSAN.pem b/src/mongo/gotools/test/legacy28/jstests/libs/localhostnameSAN.pem
deleted file mode 100644
index 480300f29e1..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/localhostnameSAN.pem
+++ /dev/null
@@ -1,49 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDpDCCAoygAwIBAgIBBjANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjB5MRwwGgYDVQQD
-ExNzYW50ZXN0aG9zdG5hbWUuY29tMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoT
-B01vbmdvREIxFjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZ
-b3JrMQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
-AJKOLTNEPv08IVmhfkv6Xq1dT6pki76ggpJ7UpwdUSsTsWDKO2o1c7wnzEjfhYQ+
-CtlEvbYyL3O7f8AaO15WJdi53SMuWS+QfCKs6b0symYbinSXlZGb4oZYFSrodSxH
-+G8u+TUxyeaXgTHowMWArmTRi2LgtIwXwwHJawfhFDxji3cSmLAr5YQMAaXUynq3
-g0DEAGMaeOlyn1PkJ2ZfJsX2di+sceKb+KK1xT+2vUSsvnIumBCYqMhU6y3WjBWK
-6WrmOcsldWo4IcgyzwVRlZiuuYoe6ZsxZ4nMyTdYebALPqgkt8QVXqkgcjWK8F18
-nuqWIAn1ISTjj73H4cnzYv0CAwEAAaM8MDowOAYDVR0RBDEwL4INKi5leGFtcGxl
-LmNvbYIJMTI3LjAuMC4xgghtb3JlZnVuIYIJbG9jYWxob3N0MA0GCSqGSIb3DQEB
-BQUAA4IBAQA5M3U4wvQYI3jz/+Eh4POrJAs9eSRGkUhz1lP7D6Fcyp+BbbXB1fa9
-5qpD4bp1ZoDP2R2zca2uwwfd3DTWPbmwFMNqs2D7d0hgX71Vg9DCAwExFjoeRo44
-cCE9kakZtE3kT/tiH6SpYpnBa3dizxTmiY48z212Pw813SSXSPMN1myx5sMJof5I
-whJNQhSQOw6WHw5swZJZT4FkzxjQMrTWdF6r0d5EU9K2WWk5DTwq4QaysplB5l0H
-8qm+fnC6xI+2qgqMO9xqc6qMtHHICXtdUOup6wj/bdeo7bAQdVDyKlFKiYivDXvO
-RJNp2cwsBgxU+qdrtOLp7/j/0R3tUqWb
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAko4tM0Q+/TwhWaF+S/perV1PqmSLvqCCkntSnB1RKxOxYMo7
-ajVzvCfMSN+FhD4K2US9tjIvc7t/wBo7XlYl2LndIy5ZL5B8IqzpvSzKZhuKdJeV
-kZvihlgVKuh1LEf4by75NTHJ5peBMejAxYCuZNGLYuC0jBfDAclrB+EUPGOLdxKY
-sCvlhAwBpdTKereDQMQAYxp46XKfU+QnZl8mxfZ2L6xx4pv4orXFP7a9RKy+ci6Y
-EJioyFTrLdaMFYrpauY5yyV1ajghyDLPBVGVmK65ih7pmzFniczJN1h5sAs+qCS3
-xBVeqSByNYrwXXye6pYgCfUhJOOPvcfhyfNi/QIDAQABAoIBADqGMkClwS2pJHxB
-hEjc+4/pklWt/ywpttq+CpgzEOXN4GiRebaJD+WUUvzK3euYRwbKb6PhWJezyWky
-UID0j/qDBm71JEJdRWUnfdPAnja2Ss0Sd3UFNimF5TYUTC5ZszjbHkOC1WiTGdGP
-a+Oy5nF2SF4883x6RLJi963W0Rjn3jIW9LoLeTgm9bjWXg3iqonCo3AjREdkR/SG
-BZaCvulGEWl/A3a7NmW5EGGNUMvzZOxrqQz4EX+VnYdb7SPrH3pmQJyJpAqUlvD5
-y7pO01fI0wg9kOWiIR0vd3Gbm9NaFmlH9Gr2oyan3CWt1h1gPzkH/V17rZzVYb5L
-RnjLdyECgYEA6X16A5Gpb5rOVR/SK/JZGd+3z52+hRR8je4WhXkZqRZmbn2deKha
-LKZi1eVl11t8zitLg/OSN1uZ/873iESKtp/R6vcGcriUCd87cDh7KTyW/7ZW5jdj
-o6Y3Liai3Xrf6dL+V2xYw964Map9oK9qatYw/L+Ke6b9wbGi+hduf1kCgYEAoK8n
-pzctajS3Ntmk147n4ZVtcv78nWItBNH2B8UaofdkBlSRyUURsEY9nA34zLNWI0f3
-k59+cR13iofkQ0rKqJw1HbTTncrSsFqptyEDt23iWSmmaU3/9Us8lcNGqRm7a35V
-Km0XBFLnE0mGFGFoTpNt8oiR4WGASJPi482xkEUCgYEAwPmQn2SDCheDEr2zAdlR
-pN3O2EwCi5DMBK3TdUsKV0KJNCajwHY72Q1HQItQ6XXWp7sGta7YmOIfXFodIUWs
-85URdMXnUWeWCrayNGSp/gHytrNoDOuYcUfN8VnDX5PPfjyBM5X7ox7vUzUakXSJ
-WnVelXZlKR9yOOTs0xAMpjkCgYAbF61N6mXD5IOHwgajObsrM/CyVP/u4WDJ0UT0
-Zm1pJbc9wgCauQSUfiNhLpHmoc5CQJ4jy96b3+YJ+4OnPPMSntPt4FFV557CkWbQ
-M8bWpLZnZjhixP4FM9xRPA2r8WTCaRifAKnC1t+TRvBOe2YE6aK+I/zEzZW9pwG4
-ezQXKQKBgQAIBSJLa6xWbfbzqyPsvmRNgiEjIamF7wcb1sRjgqWM6sCzYwYv8f5v
-9C4YhNXEn+c5V2KevgYeg6iPSQuzEAfJx64QV7JD8kEBf5GNETnuW45Yg7KwKPD6
-ZCealfpy/o9iiNqbWqDNND91pj2/g5oZnac3misJg5tGCJbJsBFXag==
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/mockkrb5.conf b/src/mongo/gotools/test/legacy28/jstests/libs/mockkrb5.conf
deleted file mode 100644
index 0f004f2de8a..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/mockkrb5.conf
+++ /dev/null
@@ -1,13 +0,0 @@
-[libdefaults]
- default_realm = 10GEN.ME
-
-[realms]
- 10GEN.ME = {
- kdc = kdc.10gen.me
- admin_server = kdc.10gen.me
- default_domain = 10gen.me
- }
-
-[domain_realm]
- .10gen.me = 10GEN.ME
- 10gen.me = 10GEN.ME
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/mockservice.keytab b/src/mongo/gotools/test/legacy28/jstests/libs/mockservice.keytab
deleted file mode 100644
index 3529d5fcbc6..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/mockservice.keytab
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/mockuser.keytab b/src/mongo/gotools/test/legacy28/jstests/libs/mockuser.keytab
deleted file mode 100644
index 35fd2ff06e7..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/mockuser.keytab
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/not_yet_valid.pem b/src/mongo/gotools/test/legacy28/jstests/libs/not_yet_valid.pem
deleted file mode 100644
index 7c021c0becd..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/not_yet_valid.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDhTCCAm2gAwIBAgIBETANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMjAwNzE3MTYwMDAwWhcNMjUwNzE3MTYwMDAwWjBzMRYwFAYDVQQD
-Ew1ub3RfeWV0X3ZhbGlkMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoTB01vbmdv
-REIxFjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQsw
-CQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM2gF+Fo
-CeBKVlPyDAaEA7cjK75CxnzQy+oqw1j/vcfe/CfKL9MvDDXauR/9v1RRlww5zlxQ
-XJJtcMJtxN1EpP21cHrHCpJ/fRsCdMfJdD9MO6gcnclEI0Odwy5YI/57rAgxEuDC
-7z4d+M6z7PLq8DIwvRuhAZVTszeyTsCCkwfTJ/pisD2Ace75pS37t/ttQp+kQ+Vl
-QrfccHYxrScQ9i0JqBfrTULDl6ST76aINOaFKWqrLLkRUvE6pEkL/iP6xXUSKOsm
-uyc0yb0PK5Y/IVdrzwWUkabWEM27RAMH+CAx2iobk6REj0fsGySBzT2CaETZPjck
-vn/LYKqr+CvYjc8CAwEAAaMjMCEwHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcu
-MC4wLjEwDQYJKoZIhvcNAQEFBQADggEBADw37jpmhj/fgCZdF1NrDKLmWxb4hovQ
-Y9PRe6GsBOc1wH8Gbe4UkYAE41WUuT3xW9YpfCHLXxC7da6dhaBISWryX7n72abM
-xbfAghV3je5JAmC0E/OzQz8tTgENxJN/c4oqCQ9nVOOLjwWiim5kF0/NY8HCc/Sg
-OG9IdseRX72CavDaPxcqR9/5KKY/pxARMeyy3/D0FIB1Fwu5h9vjHEi5fGOqcizf
-S1KHfzAmTxVtjw6HWRGKmkPX0W0/lURWVkKRxvC8KkJIeKx3fl9U1PqCw0AVi5d/
-whYn4qHNFFp4OiVzXq3b5YoBy0dlHUePCIPT2GkGlV4NQKosZMJUkKo=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEowIBAAKCAQEAzaAX4WgJ4EpWU/IMBoQDtyMrvkLGfNDL6irDWP+9x978J8ov
-0y8MNdq5H/2/VFGXDDnOXFBckm1wwm3E3USk/bVwescKkn99GwJ0x8l0P0w7qByd
-yUQjQ53DLlgj/nusCDES4MLvPh34zrPs8urwMjC9G6EBlVOzN7JOwIKTB9Mn+mKw
-PYBx7vmlLfu3+21Cn6RD5WVCt9xwdjGtJxD2LQmoF+tNQsOXpJPvpog05oUpaqss
-uRFS8TqkSQv+I/rFdRIo6ya7JzTJvQ8rlj8hV2vPBZSRptYQzbtEAwf4IDHaKhuT
-pESPR+wbJIHNPYJoRNk+NyS+f8tgqqv4K9iNzwIDAQABAoIBAFWTmjyyOuIArhrz
-snOHv7AZUBw32DmcADGtqG1Cyi4DrHe22t6ORwumwsMArP8fkbiB2lNrEovSRkp0
-uqjH5867E1vVuJ2tt1hlVkrLmbi6Nl3JwxU/aVm7r7566kgAGmGyYsPt/PmiKamF
-Ekkq49pPlHSKNol6My0r5UCTVzO6uwW7dAa4GOQRI7bM7PVlxRVVeNzPH3yOsTzk
-smrkRgf8HbjtY7m/EHG281gu14ZQRCqzLshO2BtWbkx9dMXnNU5dRRaZ8Pe8XN0Z
-umsStcX6So6VFAqlwknZTi1/sqyIuQLfE+S9DocVQkvKFUgKpFddK8Nmqc8xPCKt
-UwR9hEECgYEA9kZ5KmUbzxQrF8Kn9G18AbZ/Cf6rE9fhs/J8OGcuuJ9QTjPO7pxV
-T7lGrIOX3dVu3+iHrYXZUZv+UTOePWx+ghqJ8ML7RdVsxAWMqh+1J0eBJKIdc9mt
-0hGkLEyyBbAlfNmvw8JugTUeZH2gA+VK9HoMTAjD+LvH164rrktauKECgYEA1b6z
-lZypAbAqnuCndcetcgatdd/bYNH5WWTgdZHqInt3k94EsUEHFNMQUbO+FNkOJ4qJ
-Jp7xrqkOUX+MPrzV5XYVapamlht9gvUtyxGq7DYndlq4mIsN5kReH++lqONBnWoG
-ZlbxvadkvPo+bK003hsl+E4F8X7xUssGGLvygG8CgYEAm/yLJkUgVgsqOER86R6n
-mtYipQv/A/SK6tU9xOPl/d46mS3LderjRjnN/9rhyAo1zfCUb14GBeDONlSBd9pO
-Ts3MbQiy6sqBt67kJ6UpspVhwPhFu2k25YVy/PQfFec591hSMaXnJEOm2nOPdKg4
-z5y2STqMFfGqZHvXAvCLp8ECgYA8oVGTmNKf9fbBBny5/iAG/jnp+8vg1O7kGqdI
-8lD14wvyV8IA/a8iixRP+Kpsg31uXe+1ktR/dNjo6UNA8JPD+RDuITmzzqx1n1KU
-DbjsNBhRjD5cluUkcjQ43uOg2oXcPxz9nqAH6hm7OUjHzwH2FsFYg9lPvXB6ybg6
-/+Uz5QKBgBxvTtLsZ3Cvvb3qezn4DdpLjlsrT6HWaTGqwEx8NYVBTFX/lT8P04tv
-NqFuQsDJ4gw0AZF7HqF49qdpnHEJ8tdHgBc/xDLFUMuKjON4IZtr0/j407K6V530
-m4q3ziHOu/lORDcZTz/YUjEzT8r7Qiv7QusWncvIWEiLSCC2dvvb
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/parallelTester.js b/src/mongo/gotools/test/legacy28/jstests/libs/parallelTester.js
deleted file mode 100644
index 8c44d2df553..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/parallelTester.js
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- * The ParallelTester class is used to test more than one test concurrently
- */
-
-
-if ( typeof _threadInject != "undefined" ){
- //print( "fork() available!" );
-
- Thread = function(){
- this.init.apply( this, arguments );
- }
- _threadInject( Thread.prototype );
-
- ScopedThread = function() {
- this.init.apply( this, arguments );
- }
- ScopedThread.prototype = new Thread( function() {} );
- _scopedThreadInject( ScopedThread.prototype );
-
- fork = function() {
- var t = new Thread( function() {} );
- Thread.apply( t, arguments );
- return t;
- }
-
- // Helper class to generate a list of events which may be executed by a ParallelTester
- EventGenerator = function( me, collectionName, mean, host ) {
- this.mean = mean;
- if (host == undefined) host = db.getMongo().host;
- this.events = new Array( me, collectionName, host );
- }
-
- EventGenerator.prototype._add = function( action ) {
- this.events.push( [ Random.genExp( this.mean ), action ] );
- }
-
- EventGenerator.prototype.addInsert = function( obj ) {
- this._add( "t.insert( " + tojson( obj ) + " )" );
- }
-
- EventGenerator.prototype.addRemove = function( obj ) {
- this._add( "t.remove( " + tojson( obj ) + " )" );
- }
-
- EventGenerator.prototype.addUpdate = function( objOld, objNew ) {
- this._add( "t.update( " + tojson( objOld ) + ", " + tojson( objNew ) + " )" );
- }
-
- EventGenerator.prototype.addCheckCount = function( count, query, shouldPrint, checkQuery ) {
- query = query || {};
- shouldPrint = shouldPrint || false;
- checkQuery = checkQuery || false;
- var action = "assert.eq( " + count + ", t.count( " + tojson( query ) + " ) );"
- if ( checkQuery ) {
- action += " assert.eq( " + count + ", t.find( " + tojson( query ) + " ).toArray().length );"
- }
- if ( shouldPrint ) {
- action += " print( me + ' ' + " + count + " );";
- }
- this._add( action );
- }
-
- EventGenerator.prototype.getEvents = function() {
- return this.events;
- }
-
- EventGenerator.dispatch = function() {
- var args = argumentsToArray( arguments );
- var me = args.shift();
- var collectionName = args.shift();
- var host = args.shift();
- var m = new Mongo( host );
- var t = m.getDB( "test" )[ collectionName ];
- for( var i in args ) {
- sleep( args[ i ][ 0 ] );
- eval( args[ i ][ 1 ] );
- }
- }
-
- // Helper class for running tests in parallel. It assembles a set of tests
- // and then calls assert.parallelests to run them.
- ParallelTester = function() {
- assert.neq(db.getMongo().writeMode(), "legacy", "wrong shell write mode")
- this.params = new Array();
- }
-
- ParallelTester.prototype.add = function( fun, args ) {
- args = args || [];
- args.unshift( fun );
- this.params.push( args );
- }
-
- ParallelTester.prototype.run = function( msg, newScopes ) {
- newScopes = newScopes || false;
- assert.parallelTests( this.params, msg, newScopes );
- }
-
- // creates lists of tests from jstests dir in a format suitable for use by
- // ParallelTester.fileTester. The lists will be in random order.
- // n: number of lists to split these tests into
- ParallelTester.createJstestsLists = function( n ) {
- var params = new Array();
- for( var i = 0; i < n; ++i ) {
- params.push( [] );
- }
-
- var makeKeys = function( a ) {
- var ret = {};
- for( var i in a ) {
- ret[ a[ i ] ] = 1;
- }
- return ret;
- }
-
- // some tests can't run in parallel with most others
- var skipTests = makeKeys([ "dbadmin.js",
- "repair.js",
- "cursor8.js",
- "recstore.js",
- "extent.js",
- "indexb.js",
-
- // tests turn on profiling
- "profile1.js",
- "profile3.js",
- "profile4.js",
- "profile5.js",
-
- "mr_drop.js",
- "mr3.js",
- "indexh.js",
- "apitest_db.js",
- "evalb.js",
- "evald.js",
- "evalf.js",
- "killop.js",
- "run_program1.js",
- "notablescan.js",
- "drop2.js",
- "dropdb_race.js",
- "fsync2.js", // May be placed in serialTestsArr once SERVER-4243 is fixed.
- "bench_test1.js",
- "padding.js",
- "queryoptimizera.js",
- "loglong.js",// log might overflow before
- // this has a chance to see the message
- "connections_opened.js", // counts connections, globally
- "opcounters_write_cmd.js",
- "currentop.js", // SERVER-8673, plus rwlock yielding issues
- "set_param1.js", // changes global state
- "geo_update_btree2.js", // SERVER-11132 test disables table scans
- "update_setOnInsert.js", // SERVER-9982
- ] );
-
- var parallelFilesDir = "jstests/core";
-
- // some tests can't be run in parallel with each other
- var serialTestsArr = [ parallelFilesDir + "/fsync.js",
- parallelFilesDir + "/auth1.js",
-
- // These tests expect the profiler to be on or off at specific points
- // during the test run.
- parallelFilesDir + "/cursor6.js",
- parallelFilesDir + "/profile2.js",
- parallelFilesDir + "/updatee.js"
- ];
- var serialTests = makeKeys( serialTestsArr );
-
- // prefix the first thread with the serialTests
- // (which we will exclude from the rest of the threads below)
- params[ 0 ] = serialTestsArr;
- var files = listFiles( parallelFilesDir );
- files = Array.shuffle( files );
-
- var i = 0;
- files.forEach(
- function(x) {
- if ( ( /[\/\\]_/.test(x.name) ) ||
- ( ! /\.js$/.test(x.name) ) ||
- ( x.name.match(parallelFilesDir + "/(.*\.js)")[1] in skipTests ) || //
- ( x.name in serialTests )) {
- print(" >>>>>>>>>>>>>>> skipping " + x.name);
- return;
- }
- // add the test to run in one of the threads.
- params[ i % n ].push( x.name );
- ++i;
- }
- );
-
- // randomize ordering of the serialTests
- params[ 0 ] = Array.shuffle( params[ 0 ] );
-
- for( var i in params ) {
- params[ i ].unshift( i );
- }
-
- return params;
- }
-
- // runs a set of test files
- // first argument is an identifier for this tester, remaining arguments are file names
- ParallelTester.fileTester = function() {
- var args = argumentsToArray( arguments );
- var suite = args.shift();
- args.forEach(
- function( x ) {
- print(" S" + suite + " Test : " + x + " ...");
- var time = Date.timeFunc( function() { load(x); }, 1);
- print(" S" + suite + " Test : " + x + " " + time + "ms" );
- }
- );
- }
-
- // params: array of arrays, each element of which consists of a function followed
- // by zero or more arguments to that function. Each function and its arguments will
- // be called in a separate thread.
- // msg: failure message
- // newScopes: if true, each thread starts in a fresh scope
- assert.parallelTests = function( params, msg, newScopes ) {
- newScopes = newScopes || false;
- var wrapper = function( fun, argv ) {
- eval (
- "var z = function() {" +
- "var __parallelTests__fun = " + fun.toString() + ";" +
- "var __parallelTests__argv = " + tojson( argv ) + ";" +
- "var __parallelTests__passed = false;" +
- "try {" +
- "__parallelTests__fun.apply( 0, __parallelTests__argv );" +
- "__parallelTests__passed = true;" +
- "} catch ( e ) {" +
- "print('');" +
- "print( '********** Parallel Test FAILED: ' + tojson(e) );" +
- "print('');" +
- "}" +
- "return __parallelTests__passed;" +
- "}"
- );
- return z;
- }
- var runners = new Array();
- for( var i in params ) {
- var param = params[ i ];
- var test = param.shift();
- var t;
- if ( newScopes )
- t = new ScopedThread( wrapper( test, param ) );
- else
- t = new Thread( wrapper( test, param ) );
- runners.push( t );
- }
-
- runners.forEach( function( x ) { x.start(); } );
- var nFailed = 0;
- // v8 doesn't like it if we exit before all threads are joined (SERVER-529)
- runners.forEach( function( x ) { if( !x.returnData() ) { ++nFailed; } } );
- assert.eq( 0, nFailed, msg );
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/password_protected.pem b/src/mongo/gotools/test/legacy28/jstests/libs/password_protected.pem
deleted file mode 100644
index 25e47bc2402..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/password_protected.pem
+++ /dev/null
@@ -1,51 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDezCCAmOgAwIBAgIBCTANBgkqhkiG9w0BAQUFADB4MRswGQYDVQQDExJwYXNz
-d29yZF9wcm90ZWN0ZWQxDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMHTW9uZ29E
-QjEWMBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlvcmsxCzAJ
-BgNVBAYTAlVTMB4XDTE0MDcxNzE2MDAwMFoXDTIwMDcxNzE2MDAwMFoweDEbMBkG
-A1UEAxMScGFzc3dvcmRfcHJvdGVjdGVkMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNV
-BAoTB01vbmdvREIxFjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5l
-dyBZb3JrMQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBALT4r3Hcou2auIOHeihBSjk4bKQTVqI6r/stnkul359SRfKuzVA9gMQaRRDi
-MJoxczHJzS2FX+wElzBt2EUhfu3qpUJ4gJw7H4WjLx+mNnj/+6b4HUO4eRzH5hTE
-A+qgDH40qYjFDEjiARvybWo3IlDLeI/uFwlyUj5PZBUBc1LBBzNtCBfJ2MmHLhIx
-jzTFhkJZll673LL6BPHtJclXCazqKUZDLqObW4Ei6X4hdBOdC8v8Q6GMgC4BxLe0
-wsOpKYYeM3il4BtfiqDQB5ZPG0lgo1Y7OOyFHFXBA7oNkK8lykhdyH4iLt5L9mWo
-VKyZ79VqSODFuCqWo8n8kUTgA/0CAwEAAaMQMA4wDAYDVR0TBAUwAwEB/zANBgkq
-hkiG9w0BAQUFAAOCAQEAntxk8a0HcuPG8Fdjckp6WL+HKQQnUKdvSk06rPF0SHpN
-Ma4eZcaumROdtAYnPKvtbpq3DRCQlj59dlWPksEcYiXqf56TgcyAz5K5g5z9RbFi
-ArvAXJNRcDz1080NWGBUTPYyiKWR3PhtlYhJZ4r7fQIWLv4mifXHViw2roXXhsAY
-ubk9HOtrqE7x6NJXgR24aybxqI6TfAKfM+LJNtMwMFrPC+GHnhqMOs/jHJS38NIB
-TrKA63TdpYUroVu23/tGLQaJz352qgF4Di91RkUfnI528goj57pX78H8KRsSNVvs
-KHVNrxtZIez+pxxjBPnyfCH81swkiAPG9fdX+Hcu5A==
------END CERTIFICATE-----
------BEGIN ENCRYPTED PRIVATE KEY-----
-MIIFHzBJBgkqhkiG9w0BBQ0wPDAbBgkqhkiG9w0BBQwwDgQI6y3b7IxcANECAggA
-MB0GCWCGSAFlAwQBAgQQrTslOKC0GZZwq48v7niXYQSCBNBNKsTN7fyw60/EEDH0
-JUgxL83Wfb7pNP97/lV5qiclY1mwcKz44kXQaesFTzhiwzAMOpbI/ijEtsNU25wV
-wtTgjAC3Em/+5/ygrmAu7hgacIRssspovmsgw029E9iOkyBd1VIrDVMi7HLHf0iU
-2Zq18QF20az2pXNMDipmVJkpc9NvjSdqka5+375pJuisspEWCDBd11K10jzCWqB5
-q3Rm1IIeq+mql6KT1rJcUoeE0facDc9GDYBiF/MfIKQ3FrZy/psqheCfL1UDUMyc
-mnm9GJO5+bCuHkg8ni0Zo5XXsf2VEFt0yt6lSucoOP43flucQaHnFKcn+5DHjDXv
-S6Eb5wEG9qWtzwWy/9DfRbkj6FxUgT3SFgizo/uLmdqFCJCnYkHUD1OuYCDmoIXP
-VTinwgK4lO/vrGfoPQrgJmdlnwHRWYjlB8edMCbmItaj2Esh3FBS12y976+UT0Sk
-8n5HsZAEYScDyNArVhrLUZRgF+r+bgZ28TDFO0MISPCAbZjhvq6lygS3dEmdTUW3
-cFDe1deNknWxZcv4UpJW4Nq6ckxwXBfTB1VFzjp7/vXrK/Sd9t8zi6vKTO8OTqc4
-KrlLXBgz0ouP/cxhYDykUrKXE2Eb0TjeAN1txZWo3fIFzXUvDZCphQEZNUqsFUxH
-86V2lwqVzKrFq6UpTgKrfTw/2ePQn9dQgd7iFWDTWjRkbzA5aAgTSVP8xQRoIOeQ
-epXtP9202kEz3h28SZYK7QBOTTX9xNmV/dzDTsi9nXZ6KtsP/aGFE5hh95jvESx/
-wlOBAPW4HR33rSYalvQPE7RjjLZHOKuYIllUBGlTOfgdA+WUXR3KxiLNPdslPBPV
-+O6aDyerhWoQwE7TFwhP/FpxL/46hOu4iq4fgqfjddBTq8z5jG3c3zzogDjoDzBF
-LEQDcbenUCGbEQ7zxXsXtr3QinJ+aAejDO38hp1h9ROb5LF53/9H2j/16nby/jPX
-7kp2weRSKGJ0B6AVuS9pTsQz4+E3icsIgBWSU6qtcUz2GO2QxnFuvT9LEVnyMNN2
-IKMIEKi2FsUMddHGXLULTANlzUMocdHrd5j81eqcFPhMOFOiHpgwiwxqZyBYOLRl
-Fe7x5dLVWoLgjJagZj8uYnJbExDsfFLjEx8p4Z+rejJIC5CqZLbz9sDgCtIL+92k
-+x4mlT1Rfmz9pU+RQqik83nFFRBGWxeW9iWWEgocWtmezvnK6E241v78zkqxNkvF
-JJo7BsBw7DiEHEfLhBZYuqV2q6+kwqgYrzyGIwAJkBGrkYfalVzgR+3/uN04h005
-M3jQRpSkDVGYr3JKEAlh3Sc+JD9VPbu6/RXNwy5mY67UCgWGaFwRqJE3DC9aKfNC
-OET8m8+8oQgFzhw3pNpENsgwR+Sx3K4q0GI3YwxT02pieBFNQaw53O3B3TtoCjkk
-UsuyIWqcLonwo4I3z0kjU3gEFN+0m4E4/A1DNt0J3rsKN+toCk1FqbxQg9xTZzXu
-hYmA3HMMwugzXmCanqBhmMsniPg+dRxCIfiHZhLuEpjKxZWcMWcW4M6l/wbM+LbE
-oDcTuI9ezfPTZ3xA8hNIHBT3MhuI7EJQnvKKvJDJeyX5sAtmSsSFqhEr8QZD8RgV
-5H9eOyUdfcWxLlstcq982V0oGg==
------END ENCRYPTED PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/server.pem b/src/mongo/gotools/test/legacy28/jstests/libs/server.pem
deleted file mode 100644
index df2b49163d6..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/server.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDfjCCAmagAwIBAgIBBzANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBsMQ8wDQYDVQQD
-EwZzZXJ2ZXIxDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMHTW9uZ29EQjEWMBQG
-A1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlvcmsxCzAJBgNVBAYT
-AlVTMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp76KJeDczBqjSPJj
-5f8DHdtrWpQDK9AWNDlslWpi6+pL8hMqwbX0D7hC2r3kAgccMyFoNIudPqIXfXVd
-1LOh6vyY+jveRvqjKW/UZVzZeiL4Gy4bhke6R8JRC3O5aMKIAbaiQUAI1Nd8LxIt
-LGvH+ia/DFza1whgB8ym/uzVQB6igOifJ1qHWJbTtIhDKaW8gvjOhv5R3jzjfLEb
-R9r5Q0ZyE0lrO27kTkqgBnHKPmu54GSzU/r0HM3B+Sc/6UN+xNhNbuR+LZ+EvJHm
-r4de8jhW8wivmjTIvte33jlLibQ5nYIHrlpDLEwlzvDGaIio+OfWcgs2WuPk98MU
-tht0IQIDAQABoyMwITAfBgNVHREEGDAWgglsb2NhbGhvc3SCCTEyNy4wLjAuMTAN
-BgkqhkiG9w0BAQUFAAOCAQEANoYxvVFsIol09BQA0fwryAye/Z4dYItvKhmwB9VS
-t99DsmJcyx0P5meB3Ed8SnwkD0NGCm5TkUY/YLacPP9uJ4SkbPkNZ1fRISyShCCn
-SGgQUJWHbCbcIEj+vssFb91c5RFJbvnenDkQokRvD2VJWspwioeLzuwtARUoMH3Y
-qg0k0Mn7Bx1bW1Y6xQJHeVlnZtzxfeueoFO55ZRkZ0ceAD/q7q1ohTXi0vMydYgu
-1CB6VkDuibGlv56NdjbttPJm2iQoPaez8tZGpBo76N/Z1ydan0ow2pVjDXVOR84Y
-2HSZgbHOGBiycNw2W3vfw7uK0OmiPRTFpJCmewDjYwZ/6w==
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAp76KJeDczBqjSPJj5f8DHdtrWpQDK9AWNDlslWpi6+pL8hMq
-wbX0D7hC2r3kAgccMyFoNIudPqIXfXVd1LOh6vyY+jveRvqjKW/UZVzZeiL4Gy4b
-hke6R8JRC3O5aMKIAbaiQUAI1Nd8LxItLGvH+ia/DFza1whgB8ym/uzVQB6igOif
-J1qHWJbTtIhDKaW8gvjOhv5R3jzjfLEbR9r5Q0ZyE0lrO27kTkqgBnHKPmu54GSz
-U/r0HM3B+Sc/6UN+xNhNbuR+LZ+EvJHmr4de8jhW8wivmjTIvte33jlLibQ5nYIH
-rlpDLEwlzvDGaIio+OfWcgs2WuPk98MUtht0IQIDAQABAoIBACgi1ilECXCouwMc
-RDzm7Jb7Rk+Q9MVJ79YlG08Q+oRaNjvAzE03PSN5wj1WjDTUALJXPvi7oy82V4qE
-R6Q6Kvbv46aUJpYzKFEk2dw7ACpSLa1LNfjGNtMusnecA/QF/8bxLReRu8s5mBQn
-NDnZvCqllLbfjNlAvsF+/UIn5sqFZpAZPMtPwkTAeh5ge8H9JvrG8y8aXsiFGAhV
-Z7tMZyn8wPCUrRi14NLvVB4hxM66G/tuTp8r9AmeTU+PV+qbCnKXd+v0IS52hvX9
-z75OPfAc66nm4bbPCapb6Yx7WaewPXXU0HDxeaT0BeQ/YfoNa5OT+ZOX1KndSfHa
-VhtmEsECgYEA3m86yYMsNOo+dkhqctNVRw2N+8gTO28GmWxNV9AC+fy1epW9+FNR
-yTQXpBkRrR7qrd5mF7WBc7vAIiSfVs021RMofzn5B1x7jzkH34VZtlviNdE3TZhx
-lPinqo0Yy3UEksgsCBJFIofuCmeTLk4ZtqoiZnXr35RYibaZoQdUT4kCgYEAwQ6Y
-xsKFYFks1+HYl29kR0qUkXFlVbKOhQIlj/dPm0JjZ0xYkUxmzoXD68HrOWgz7hc2
-hZaQTgWf+8cRaZNfh7oL+Iglczc2UXuwuUYguYssD/G6/ZPY15PhItgCghaU5Ewy
-hMwIJ81NENY2EQTgk/Z1KZitXdVJfHl/IPMQgdkCgYASdqkqkPjaa5dDuj8byO8L
-NtTSUYlHJbAmjBbfcyTMG230/vkF4+SmDuznci1FcYuJYyyWSzqzoKISM3gGfIJQ
-rYZvCSDiu4qGGPXOWANaX8YnMXalukGzW/CO96dXPB9lD7iX8uxKMX5Q3sgYz+LS
-hszUNHWf2XB//ehCtZkKAQKBgQCxL2luepeZHx82H9T+38BkYgHLHw0HQzLkxlyd
-LjlE4QCEjSB4cmukvkZbuYXfEVEgAvQKVW6p/SWhGkpT4Gt8EXftKV9dyF21GVXQ
-JZnhUOcm1xBsrWYGLXYi2agrpvgONBTlprERfq5tdnz2z8giZL+RZswu45Nnh8bz
-AcKzuQKBgQCGOQvKvNL5XKKmws/KRkfJbXgsyRT2ubO6pVL9jGQG5wntkeIRaEpT
-oxFtWMdPx3b3cxtgSP2ojllEiISk87SFIN1zEhHZy/JpTF0GlU1qg3VIaA78M1p2
-ZdpUsuqJzYmc3dDbQMepIaqdW4xMoTtZFyenUJyoezz6eWy/NlZ/XQ==
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/servers.js b/src/mongo/gotools/test/legacy28/jstests/libs/servers.js
deleted file mode 100755
index b752b820eae..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/servers.js
+++ /dev/null
@@ -1,961 +0,0 @@
-// Wrap whole file in a function to avoid polluting the global namespace
-(function() {
-
-_parsePath = function() {
- var dbpath = "";
- for( var i = 0; i < arguments.length; ++i )
- if ( arguments[ i ] == "--dbpath" )
- dbpath = arguments[ i + 1 ];
-
- if ( dbpath == "" )
- throw Error("No dbpath specified");
-
- return dbpath;
-}
-
-_parsePort = function() {
- var port = "";
- for( var i = 0; i < arguments.length; ++i )
- if ( arguments[ i ] == "--port" )
- port = arguments[ i + 1 ];
-
- if ( port == "" )
- throw Error("No port specified");
- return port;
-}
-
-connectionURLTheSame = function( a , b ){
-
- if ( a == b )
- return true;
-
- if ( ! a || ! b )
- return false;
-
- if( a.host ) return connectionURLTheSame( a.host, b )
- if( b.host ) return connectionURLTheSame( a, b.host )
-
- if( a.name ) return connectionURLTheSame( a.name, b )
- if( b.name ) return connectionURLTheSame( a, b.name )
-
- if( a.indexOf( "/" ) < 0 && b.indexOf( "/" ) < 0 ){
- a = a.split( ":" )
- b = b.split( ":" )
-
- if( a.length != b.length ) return false
-
- if( a.length == 2 && a[1] != b[1] ) return false
-
- if( a[0] == "localhost" || a[0] == "127.0.0.1" ) a[0] = getHostName()
- if( b[0] == "localhost" || b[0] == "127.0.0.1" ) b[0] = getHostName()
-
- return a[0] == b[0]
- }
- else {
- var a0 = a.split( "/" )[0]
- var b0 = b.split( "/" )[0]
- return a0 == b0
- }
-}
-
-assert( connectionURLTheSame( "foo" , "foo" ) )
-assert( ! connectionURLTheSame( "foo" , "bar" ) )
-
-assert( connectionURLTheSame( "foo/a,b" , "foo/b,a" ) )
-assert( ! connectionURLTheSame( "foo/a,b" , "bar/a,b" ) )
-
-createMongoArgs = function( binaryName , args ){
- var fullArgs = [ binaryName ];
-
- if ( args.length == 1 && isObject( args[0] ) ){
- var o = args[0];
- for ( var k in o ){
- if ( o.hasOwnProperty(k) ){
- if ( k == "v" && isNumber( o[k] ) ){
- var n = o[k];
- if ( n > 0 ){
- if ( n > 10 ) n = 10;
- var temp = "-";
- while ( n-- > 0 ) temp += "v";
- fullArgs.push( temp );
- }
- }
- else {
- fullArgs.push( "--" + k );
- if ( o[k] != "" )
- fullArgs.push( "" + o[k] );
- }
- }
- }
- }
- else {
- for ( var i=0; i<args.length; i++ )
- fullArgs.push( args[i] )
- }
-
- return fullArgs;
-}
-
-
-MongoRunner = function(){}
-
-MongoRunner.dataDir = "/data/db"
-MongoRunner.dataPath = "/data/db/"
-MongoRunner.usedPortMap = {}
-
-MongoRunner.VersionSub = function(regex, version) {
- this.regex = regex;
- this.version = version;
-}
-
-// These patterns allow substituting the binary versions used for each
-// version string to support the dev/stable MongoDB release cycle.
-MongoRunner.binVersionSubs = [ new MongoRunner.VersionSub(/^latest$/, ""),
- new MongoRunner.VersionSub(/^oldest-supported$/, "1.8"),
- // To-be-updated when 2.8 becomes available
- new MongoRunner.VersionSub(/^last-stable$/, "2.6"),
- // Latest unstable and next stable are effectively the
- // same release
- new MongoRunner.VersionSub(/^2\.7(\..*){0,1}/, ""),
- new MongoRunner.VersionSub(/^2\.8(\..*){0,1}/, "") ];
-
-MongoRunner.getBinVersionFor = function(version) {
-
- // If this is a version iterator, iterate the version via toString()
- if (version instanceof MongoRunner.versionIterator.iterator) {
- version = version.toString();
- }
-
- // No version set means we use no suffix, this is *different* from "latest"
- // since latest may be mapped to a different version.
- if (version == null) version = "";
- version = version.trim();
- if (version === "") return "";
-
- // See if this version is affected by version substitutions
- for (var i = 0; i < MongoRunner.binVersionSubs.length; i++) {
- var sub = MongoRunner.binVersionSubs[i];
- if (sub.regex.test(version)) {
- version = sub.version;
- }
- }
-
- return version;
-}
-
-MongoRunner.areBinVersionsTheSame = function(versionA, versionB) {
-
- versionA = MongoRunner.getBinVersionFor(versionA);
- versionB = MongoRunner.getBinVersionFor(versionB);
-
- if (versionA === "" || versionB === "") {
- return versionA === versionB;
- }
-
- return versionA.startsWith(versionB) ||
- versionB.startsWith(versionA);
-}
-
-MongoRunner.logicalOptions = { runId : true,
- pathOpts : true,
- remember : true,
- noRemember : true,
- appendOptions : true,
- restart : true,
- noCleanData : true,
- cleanData : true,
- startClean : true,
- forceLock : true,
- useLogFiles : true,
- logFile : true,
- useHostName : true,
- useHostname : true,
- noReplSet : true,
- forgetPort : true,
- arbiter : true,
- noJournalPrealloc : true,
- noJournal : true,
- binVersion : true,
- waitForConnect : true }
-
-MongoRunner.toRealPath = function( path, pathOpts ){
-
- // Replace all $pathOptions with actual values
- pathOpts = pathOpts || {}
- path = path.replace( /\$dataPath/g, MongoRunner.dataPath )
- path = path.replace( /\$dataDir/g, MongoRunner.dataDir )
- for( key in pathOpts ){
- path = path.replace( RegExp( "\\$" + RegExp.escape(key), "g" ), pathOpts[ key ] )
- }
-
- // Relative path
- // Detect Unix and Windows absolute paths
- // as well as Windows drive letters
- // Also captures Windows UNC paths
-
- if( ! path.match( /^(\/|\\|[A-Za-z]:)/ ) ){
- if( path != "" && ! path.endsWith( "/" ) )
- path += "/"
-
- path = MongoRunner.dataPath + path
- }
-
- return path
-
-}
-
-MongoRunner.toRealDir = function( path, pathOpts ){
-
- path = MongoRunner.toRealPath( path, pathOpts )
-
- if( path.endsWith( "/" ) )
- path = path.substring( 0, path.length - 1 )
-
- return path
-}
-
-MongoRunner.toRealFile = MongoRunner.toRealDir
-
-MongoRunner.nextOpenPort = function(){
-
- var i = 0;
- while( MongoRunner.usedPortMap[ "" + ( 27000 + i ) ] ) i++;
- MongoRunner.usedPortMap[ "" + ( 27000 + i ) ] = true
-
- return 27000 + i
-
-}
-
-/**
- * Returns an iterator object which yields successive versions on toString(), starting from a
- * random initial position, from an array of versions.
- *
- * If passed a single version string or an already-existing version iterator, just returns the
- * object itself, since it will yield correctly on toString()
- *
- * @param {Array.<String>}|{String}|{versionIterator}
- */
-MongoRunner.versionIterator = function( arr, isRandom ){
-
- // If this isn't an array of versions, or is already an iterator, just use it
- if( typeof arr == "string" ) return arr
- if( arr.isVersionIterator ) return arr
-
- if (isRandom == undefined) isRandom = false;
-
- // Starting pos
- var i = isRandom ? parseInt( Random.rand() * arr.length ) : 0;
-
- return new MongoRunner.versionIterator.iterator(i, arr);
-}
-
-MongoRunner.versionIterator.iterator = function(i, arr) {
-
- this.toString = function() {
- i = ( i + 1 ) % arr.length
- print( "Returning next version : " + i +
- " (" + arr[i] + ") from " + tojson( arr ) + "..." );
- return arr[ i ]
- }
-
- this.isVersionIterator = true;
-
-}
-
-/**
- * Converts the args object by pairing all keys with their value and appending
- * dash-dash (--) to the keys. The only exception to this rule are keys that
- * are defined in MongoRunner.logicalOptions, of which they will be ignored.
- *
- * @param {string} binaryName
- * @param {Object} args
- *
- * @return {Array.<String>} an array of parameter strings that can be passed
- * to the binary.
- */
-MongoRunner.arrOptions = function( binaryName , args ){
-
- var fullArgs = [ "" ]
-
- // isObject returns true even if "args" is an array, so the else branch of this statement is
- // dead code. See SERVER-14220.
- if ( isObject( args ) || ( args.length == 1 && isObject( args[0] ) ) ){
-
- var o = isObject( args ) ? args : args[0]
-
- // If we've specified a particular binary version, use that
- if (o.binVersion && o.binVersion != "") {
- binaryName += "-" + o.binVersion;
- }
-
- // Manage legacy options
- var isValidOptionForBinary = function( option, value ){
-
- if( ! o.binVersion ) return true
-
- // Version 1.x options
- if( o.binVersion.startsWith( "1." ) ){
-
- return [ "nopreallocj" ].indexOf( option ) < 0
- }
-
- return true
- }
-
- for ( var k in o ){
-
- // Make sure our logical option should be added to the array of options
- if( ! o.hasOwnProperty( k ) ||
- k in MongoRunner.logicalOptions ||
- ! isValidOptionForBinary( k, o[k] ) ) continue
-
- if ( ( k == "v" || k == "verbose" ) && isNumber( o[k] ) ){
- var n = o[k]
- if ( n > 0 ){
- if ( n > 10 ) n = 10
- var temp = "-"
- while ( n-- > 0 ) temp += "v"
- fullArgs.push( temp )
- }
- }
- else {
- if( o[k] == undefined || o[k] == null ) continue
- fullArgs.push( "--" + k )
- if ( o[k] != "" )
- fullArgs.push( "" + o[k] )
- }
- }
- }
- else {
- for ( var i=0; i<args.length; i++ )
- fullArgs.push( args[i] )
- }
-
- fullArgs[ 0 ] = binaryName
- return fullArgs
-}
-
-MongoRunner.arrToOpts = function( arr ){
-
- var opts = {}
- for( var i = 1; i < arr.length; i++ ){
- if( arr[i].startsWith( "-" ) ){
- var opt = arr[i].replace( /^-/, "" ).replace( /^-/, "" )
-
- if( arr.length > i + 1 && ! arr[ i + 1 ].startsWith( "-" ) ){
- opts[ opt ] = arr[ i + 1 ]
- i++
- }
- else{
- opts[ opt ] = ""
- }
-
- if( opt.replace( /v/g, "" ) == "" ){
- opts[ "verbose" ] = opt.length
- }
- }
- }
-
- return opts
-}
-
-MongoRunner.savedOptions = {}
-
-MongoRunner.mongoOptions = function( opts ){
-
- // Don't remember waitForConnect
- var waitForConnect = opts.waitForConnect;
- delete opts.waitForConnect;
-
- // If we're a mongo object
- if( opts.getDB ){
- opts = { restart : opts.runId }
- }
-
- // Initialize and create a copy of the opts
- opts = Object.merge( opts || {}, {} )
-
- if( ! opts.restart ) opts.restart = false
-
- // RunId can come from a number of places
- // If restart is passed as an old connection
- if( opts.restart && opts.restart.getDB ){
- opts.runId = opts.restart.runId
- opts.restart = true
- }
- // If it's the runId itself
- else if( isObject( opts.restart ) ){
- opts.runId = opts.restart
- opts.restart = true
- }
-
- if( isObject( opts.remember ) ){
- opts.runId = opts.remember
- opts.remember = true
- }
- else if( opts.remember == undefined ){
- // Remember by default if we're restarting
- opts.remember = opts.restart
- }
-
- // If we passed in restart : <conn> or runId : <conn>
- if( isObject( opts.runId ) && opts.runId.runId ) opts.runId = opts.runId.runId
-
- if( opts.restart && opts.remember ) opts = Object.merge( MongoRunner.savedOptions[ opts.runId ], opts )
-
- // Create a new runId
- opts.runId = opts.runId || ObjectId()
-
- // Save the port if required
- if( ! opts.forgetPort ) opts.port = opts.port || MongoRunner.nextOpenPort()
-
- var shouldRemember = ( ! opts.restart && ! opts.noRemember ) || ( opts.restart && opts.appendOptions )
-
- // Normalize and get the binary version to use
- opts.binVersion = MongoRunner.getBinVersionFor(opts.binVersion);
-
- if ( shouldRemember ){
- MongoRunner.savedOptions[ opts.runId ] = Object.merge( opts, {} )
- }
-
- // Default for waitForConnect is true
- opts.waitForConnect = (waitForConnect == undefined || waitForConnect == null) ?
- true : waitForConnect;
-
- if( jsTestOptions().useSSL ) {
- if (!opts.sslMode) opts.sslMode = "requireSSL";
- if (!opts.sslPEMKeyFile) opts.sslPEMKeyFile = "jstests/libs/server.pem";
- if (!opts.sslCAFile) opts.sslCAFile = "jstests/libs/ca.pem";
-
- // Needed for jstest/ssl/upgrade_to_ssl.js
- opts.sslWeakCertificateValidation = "";
-
- // Needed for jstest/ssl/ssl_hostname_validation.js
- opts.sslAllowInvalidHostnames = "";
- }
-
- if ( jsTestOptions().useX509 && !opts.clusterAuthMode ) {
- opts.clusterAuthMode = "x509";
- }
-
- opts.port = opts.port || MongoRunner.nextOpenPort()
- MongoRunner.usedPortMap[ "" + parseInt( opts.port ) ] = true
-
- opts.pathOpts = Object.merge( opts.pathOpts || {}, { port : "" + opts.port, runId : "" + opts.runId } )
-
- return opts
-}
-
-/**
- * @option {object} opts
- *
- * {
- * dbpath {string}
- * useLogFiles {boolean}: use with logFile option.
- * logFile {string}: path to the log file. If not specified and useLogFiles
- * is true, automatically creates a log file inside dbpath.
- * noJournalPrealloc {boolean}
- * noJournal {boolean}
- * keyFile
- * replSet
- * oplogSize
- * }
- */
-MongoRunner.mongodOptions = function( opts ){
-
- opts = MongoRunner.mongoOptions( opts )
-
- opts.dbpath = MongoRunner.toRealDir( opts.dbpath || "$dataDir/mongod-$port",
- opts.pathOpts )
-
- opts.pathOpts = Object.merge( opts.pathOpts, { dbpath : opts.dbpath } )
-
- if( ! opts.logFile && opts.useLogFiles ){
- opts.logFile = opts.dbpath + "/mongod.log"
- }
- else if( opts.logFile ){
- opts.logFile = MongoRunner.toRealFile( opts.logFile, opts.pathOpts )
- }
-
- if ( opts.logFile !== undefined ) {
- opts.logpath = opts.logFile;
- }
-
- if( jsTestOptions().noJournalPrealloc || opts.noJournalPrealloc )
- opts.nopreallocj = ""
-
- if( jsTestOptions().noJournal || opts.noJournal )
- opts.nojournal = ""
-
- if( jsTestOptions().keyFile && !opts.keyFile) {
- opts.keyFile = jsTestOptions().keyFile
- }
-
- if( jsTestOptions().useSSL ) {
- if (!opts.sslMode) opts.sslMode = "requireSSL";
- if (!opts.sslPEMKeyFile) opts.sslPEMKeyFile = "jstests/libs/server.pem";
- if (!opts.sslCAFile) opts.sslCAFile = "jstests/libs/ca.pem";
-
- // Needed for jstest/ssl/upgrade_to_ssl.js
- opts.sslWeakCertificateValidation = "";
-
- // Needed for jstest/ssl/ssl_hostname_validation.js
- opts.sslAllowInvalidHostnames = "";
- }
-
- if ( jsTestOptions().useX509 && !opts.clusterAuthMode ) {
- opts.clusterAuthMode = "x509";
- }
-
- if( opts.noReplSet ) opts.replSet = null
- if( opts.arbiter ) opts.oplogSize = 1
-
- return opts
-}
-
-MongoRunner.mongosOptions = function( opts ){
-
- opts = MongoRunner.mongoOptions( opts )
-
- // Normalize configdb option to be host string if currently a host
- if( opts.configdb && opts.configdb.getDB ){
- opts.configdb = opts.configdb.host
- }
-
- opts.pathOpts = Object.merge( opts.pathOpts,
- { configdb : opts.configdb.replace( /:|,/g, "-" ) } )
-
- if( ! opts.logFile && opts.useLogFiles ){
- opts.logFile = MongoRunner.toRealFile( "$dataDir/mongos-$configdb-$port.log",
- opts.pathOpts )
- }
- else if( opts.logFile ){
- opts.logFile = MongoRunner.toRealFile( opts.logFile, opts.pathOpts )
- }
-
- if ( opts.logFile !== undefined ){
- opts.logpath = opts.logFile;
- }
-
- if( jsTestOptions().keyFile && !opts.keyFile) {
- opts.keyFile = jsTestOptions().keyFile
- }
-
- return opts
-}
-
-/**
- * Starts a mongod instance.
- *
- * @param {Object} opts
- *
- * {
- * useHostName {boolean}: Uses hostname of machine if true
- * forceLock {boolean}: Deletes the lock file if set to true
- * dbpath {string}: location of db files
- * cleanData {boolean}: Removes all files in dbpath if true
- * startClean {boolean}: same as cleanData
- * noCleanData {boolean}: Do not clean files (cleanData takes priority)
- *
- * @see MongoRunner.mongodOptions for other options
- * }
- *
- * @return {Mongo} connection object to the started mongod instance.
- *
- * @see MongoRunner.arrOptions
- */
-MongoRunner.runMongod = function( opts ){
-
- opts = opts || {}
- var useHostName = false;
- var runId = null;
- var waitForConnect = true;
- var fullOptions = opts;
-
- if( isObject( opts ) ) {
-
- opts = MongoRunner.mongodOptions( opts );
- fullOptions = opts;
-
- useHostName = opts.useHostName || opts.useHostname;
- runId = opts.runId;
- waitForConnect = opts.waitForConnect;
-
- if( opts.forceLock ) removeFile( opts.dbpath + "/mongod.lock" )
- if( ( opts.cleanData || opts.startClean ) || ( ! opts.restart && ! opts.noCleanData ) ){
- print( "Resetting db path '" + opts.dbpath + "'" )
- resetDbpath( opts.dbpath )
- }
-
- opts = MongoRunner.arrOptions( "mongod", opts )
- }
-
- var mongod = MongoRunner.startWithArgs(opts, waitForConnect);
- if (!waitForConnect) mongos = {};
- if (!mongod) return null;
-
- mongod.commandLine = MongoRunner.arrToOpts( opts )
- mongod.name = (useHostName ? getHostName() : "localhost") + ":" + mongod.commandLine.port
- mongod.host = mongod.name
- mongod.port = parseInt( mongod.commandLine.port )
- mongod.runId = runId || ObjectId()
- mongod.savedOptions = MongoRunner.savedOptions[ mongod.runId ];
- mongod.fullOptions = fullOptions;
-
- return mongod
-}
-
-MongoRunner.runMongos = function( opts ){
-
- opts = opts || {}
- var useHostName = false;
- var runId = null;
- var waitForConnect = true;
- var fullOptions = opts;
-
- if( isObject( opts ) ) {
-
- opts = MongoRunner.mongosOptions( opts );
- fullOptions = opts;
-
- useHostName = opts.useHostName || opts.useHostname;
- runId = opts.runId;
- waitForConnect = opts.waitForConnect;
-
- opts = MongoRunner.arrOptions( "mongos", opts )
- }
-
- var mongos = MongoRunner.startWithArgs(opts, waitForConnect);
- if (!waitForConnect) mongos = {};
- if (!mongos) return null;
-
- mongos.commandLine = MongoRunner.arrToOpts( opts )
- mongos.name = (useHostName ? getHostName() : "localhost") + ":" + mongos.commandLine.port
- mongos.host = mongos.name
- mongos.port = parseInt( mongos.commandLine.port )
- mongos.runId = runId || ObjectId()
- mongos.savedOptions = MongoRunner.savedOptions[ mongos.runId ]
- mongos.fullOptions = fullOptions;
-
- return mongos
-}
-
-/**
- * Kills a mongod process.
- *
- * @param {number} port the port of the process to kill
- * @param {number} signal The signal number to use for killing
- * @param {Object} opts Additional options. Format:
- * {
- * auth: {
- * user {string}: admin user name
- * pwd {string}: admin password
- * }
- * }
- *
- * Note: The auth option is required in a authenticated mongod running in Windows since
- * it uses the shutdown command, which requires admin credentials.
- */
-MongoRunner.stopMongod = function( port, signal, opts ){
-
- if( ! port ) {
- print( "Cannot stop mongo process " + port )
- return
- }
-
- signal = signal || 15
-
- if( port.port )
- port = parseInt( port.port )
-
- if( port instanceof ObjectId ){
- var opts = MongoRunner.savedOptions( port )
- if( opts ) port = parseInt( opts.port )
- }
-
- var exitCode = stopMongod( parseInt( port ), parseInt( signal ), opts )
-
- delete MongoRunner.usedPortMap[ "" + parseInt( port ) ]
-
- return exitCode
-}
-
-MongoRunner.stopMongos = MongoRunner.stopMongod
-
-MongoRunner.isStopped = function( port ){
-
- if( ! port ) {
- print( "Cannot detect if process " + port + " is stopped." )
- return
- }
-
- if( port.port )
- port = parseInt( port.port )
-
- if( port instanceof ObjectId ){
- var opts = MongoRunner.savedOptions( port )
- if( opts ) port = parseInt( opts.port )
- }
-
- return MongoRunner.usedPortMap[ "" + parseInt( port ) ] ? false : true
-}
-
-/**
- * Starts an instance of the specified mongo tool
- *
- * @param {String} binaryName The name of the tool to run
- * @param {Object} opts options to pass to the tool
- * {
- * binVersion {string}: version of tool to run
- * }
- *
- * @see MongoRunner.arrOptions
- */
-MongoRunner.runMongoTool = function( binaryName, opts ){
-
- var opts = opts || {}
- // Normalize and get the binary version to use
- opts.binVersion = MongoRunner.getBinVersionFor(opts.binVersion);
-
- var argsArray = MongoRunner.arrOptions(binaryName, opts)
-
- return runMongoProgram.apply(null, argsArray);
-
-}
-
-// Given a test name figures out a directory for that test to use for dump files and makes sure
-// that directory exists and is empty.
-MongoRunner.getAndPrepareDumpDirectory = function(testName) {
- var dir = MongoRunner.dataPath + testName + "_external/";
- resetDbpath(dir);
- return dir;
-}
-
-// Start a mongod instance and return a 'Mongo' object connected to it.
-// This function's arguments are passed as command line arguments to mongod.
-// The specified 'dbpath' is cleared if it exists, created if not.
-// var conn = startMongodEmpty("--port", 30000, "--dbpath", "asdf");
-startMongodEmpty = function () {
- var args = createMongoArgs("mongod", arguments);
-
- var dbpath = _parsePath.apply(null, args);
- resetDbpath(dbpath);
-
- return startMongoProgram.apply(null, args);
-}
-startMongod = function () {
- print("startMongod WARNING DELETES DATA DIRECTORY THIS IS FOR TESTING ONLY");
- return startMongodEmpty.apply(null, arguments);
-}
-startMongodNoReset = function(){
- var args = createMongoArgs( "mongod" , arguments );
- return startMongoProgram.apply( null, args );
-}
-
-startMongos = function(args){
- return MongoRunner.runMongos(args);
-}
-
-/**
- * Returns a new argArray with any test-specific arguments added.
- */
-function appendSetParameterArgs(argArray) {
- var programName = argArray[0];
- if (programName.endsWith('mongod') || programName.endsWith('mongos')) {
- if (jsTest.options().enableTestCommands) {
- argArray.push.apply(argArray, ['--setParameter', "enableTestCommands=1"]);
- }
- if (jsTest.options().authMechanism && jsTest.options().authMechanism != "SCRAM-SHA-1") {
- var hasAuthMechs = false;
- for (i in argArray) {
- if (typeof argArray[i] === 'string' &&
- argArray[i].indexOf('authenticationMechanisms') != -1) {
- hasAuthMechs = true;
- break;
- }
- }
- if (!hasAuthMechs) {
- argArray.push.apply(argArray,
- ['--setParameter',
- "authenticationMechanisms=" + jsTest.options().authMechanism]);
- }
- }
- if (jsTest.options().auth) {
- argArray.push.apply(argArray, ['--setParameter', "enableLocalhostAuthBypass=false"]);
- }
-
- if ( jsTestOptions().useSSL ) {
- if ( argArray.indexOf('--sslMode') < 0 ) {
- argArray.push.apply(argArray, [ '--sslMode', 'requireSSL', '--sslPEMKeyFile', 'jstests/libs/server.pem', '--sslCAFile', 'jstests/libs/ca.pem', '--sslWeakCertificateValidation' ] );
- }
- }
-
- // mongos only options
- if (programName.endsWith('mongos')) {
- // apply setParameters for mongos
- if (jsTest.options().setParametersMongos) {
- var params = jsTest.options().setParametersMongos.split(",");
- if (params && params.length > 0) {
- params.forEach(function(p) {
- if (p) argArray.push.apply(argArray, ['--setParameter', p])
- });
- }
- }
- }
- // mongod only options
- else if (programName.endsWith('mongod')) {
- // set storageEngine for mongod
- if (jsTest.options().storageEngine) {
- argArray.push.apply(argArray, ['--storageEngine', jsTest.options().storageEngine]);
- }
- // apply setParameters for mongod
- if (jsTest.options().setParameters) {
- var params = jsTest.options().setParameters.split(",");
- if (params && params.length > 0) {
- params.forEach(function(p) {
- if (p) argArray.push.apply(argArray, ['--setParameter', p])
- });
- }
- }
- }
- }
- return argArray;
-};
-
-/**
- * Start a mongo process with a particular argument array. If we aren't waiting for connect,
- * return null.
- */
-MongoRunner.startWithArgs = function(argArray, waitForConnect) {
- // TODO: Make there only be one codepath for starting mongo processes
-
- argArray = appendSetParameterArgs(argArray);
- var port = _parsePort.apply(null, argArray);
- var pid = _startMongoProgram.apply(null, argArray);
-
- var conn = null;
- if (waitForConnect) {
- assert.soon( function() {
- try {
- conn = new Mongo("127.0.0.1:" + port);
- return true;
- } catch( e ) {
- if (!checkProgram(pid)) {
-
- print("Could not start mongo program at " + port + ", process ended")
-
- // Break out
- return true;
- }
- }
- return false;
- }, "unable to connect to mongo program on port " + port, 600 * 1000);
- }
-
- return conn;
-}
-
-/**
- * DEPRECATED
- *
- * Start mongod or mongos and return a Mongo() object connected to there.
- * This function's first argument is "mongod" or "mongos" program name, \
- * and subsequent arguments to this function are passed as
- * command line arguments to the program.
- */
-startMongoProgram = function(){
- var port = _parsePort.apply( null, arguments );
-
- // Enable test commands.
- // TODO: Make this work better with multi-version testing so that we can support
- // enabling this on 2.4 when testing 2.6
- var args = argumentsToArray( arguments );
- args = appendSetParameterArgs(args);
- var pid = _startMongoProgram.apply( null, args );
-
- var m;
- assert.soon
- ( function() {
- try {
- m = new Mongo( "127.0.0.1:" + port );
- return true;
- } catch( e ) {
- if (!checkProgram(pid)) {
-
- print("Could not start mongo program at " + port + ", process ended")
-
- // Break out
- m = null;
- return true;
- }
- }
- return false;
- }, "unable to connect to mongo program on port " + port, 600 * 1000 );
-
- return m;
-}
-
-runMongoProgram = function() {
- var args = argumentsToArray( arguments );
- var progName = args[0];
-
- if ( jsTestOptions().auth ) {
- args = args.slice(1);
- args.unshift( progName,
- '-u', jsTestOptions().authUser,
- '-p', jsTestOptions().authPassword,
- '--authenticationMechanism', DB.prototype._defaultAuthenticationMechanism,
- '--authenticationDatabase=admin'
- );
- }
-
- if (progName !== "bsondump" && args.indexOf("--dialTimeout") === -1) {
- args.push("--dialTimeout", "30");
- }
-
- if ( jsTestOptions().useSSL ) {
- args.push("--ssl", "--sslPEMKeyFile", "jstests/libs/server.pem", "--sslCAFile", "jstests/libs/ca.pem", "--sslAllowInvalidHostnames");
- }
-
- if (progName == 'mongo' && !_useWriteCommandsDefault()) {
- progName = args[0];
- args = args.slice(1);
- args.unshift(progName, '--useLegacyWriteOps');
- }
-
- return _runMongoProgram.apply( null, args );
-}
-
-// Start a mongo program instance. This function's first argument is the
-// program name, and subsequent arguments to this function are passed as
-// command line arguments to the program. Returns pid of the spawned program.
-startMongoProgramNoConnect = function() {
- var args = argumentsToArray( arguments );
- var progName = args[0];
-
- if ( jsTestOptions().auth ) {
- args = args.slice(1);
- args.unshift(progName,
- '-u', jsTestOptions().authUser,
- '-p', jsTestOptions().authPassword,
- '--authenticationMechanism', DB.prototype._defaultAuthenticationMechanism,
- '--authenticationDatabase=admin');
- }
-
- if (progName == 'mongo' && !_useWriteCommandsDefault()) {
- args = args.slice(1);
- args.unshift(progName, '--useLegacyWriteOps');
- }
-
- return _startMongoProgram.apply( null, args );
-}
-
-myPort = function() {
- var m = db.getMongo();
- if ( m.host.match( /:/ ) )
- return m.host.match( /:(.*)/ )[ 1 ];
- else
- return 27017;
-}
-
-}());
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/servers_misc.js b/src/mongo/gotools/test/legacy28/jstests/libs/servers_misc.js
deleted file mode 100644
index 4f6d3f9b9ef..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/servers_misc.js
+++ /dev/null
@@ -1,357 +0,0 @@
-/**
- * Run a mongod process.
- *
- * After initializing a MongodRunner, you must call start() on it.
- * @param {int} port port to run db on, use allocatePorts(num) to requision
- * @param {string} dbpath path to use
- * @param {boolean} peer pass in false (DEPRECATED, was used for replica pair host)
- * @param {boolean} arbiter pass in false (DEPRECATED, was used for replica pair host)
- * @param {array} extraArgs other arguments for the command line
- * @param {object} options other options include no_bind to not bind_ip to 127.0.0.1
- * (necessary for replica set testing)
- */
-MongodRunner = function( port, dbpath, peer, arbiter, extraArgs, options ) {
- this.port_ = port;
- this.dbpath_ = dbpath;
- this.peer_ = peer;
- this.arbiter_ = arbiter;
- this.extraArgs_ = extraArgs;
- this.options_ = options ? options : {};
-};
-
-/**
- * Start this mongod process.
- *
- * @param {boolean} reuseData If the data directory should be left intact (default is to wipe it)
- */
-MongodRunner.prototype.start = function( reuseData ) {
- var args = [];
- if ( reuseData ) {
- args.push( "mongod" );
- }
- args.push( "--port" );
- args.push( this.port_ );
- args.push( "--dbpath" );
- args.push( this.dbpath_ );
- args.push( "--nohttpinterface" );
- args.push( "--noprealloc" );
- args.push( "--smallfiles" );
- if (!this.options_.no_bind) {
- args.push( "--bind_ip" );
- args.push( "127.0.0.1" );
- }
- if ( this.extraArgs_ ) {
- args = args.concat( this.extraArgs_ );
- }
- removeFile( this.dbpath_ + "/mongod.lock" );
- if ( reuseData ) {
- return startMongoProgram.apply( null, args );
- } else {
- return startMongod.apply( null, args );
- }
-}
-
-MongodRunner.prototype.port = function() { return this.port_; }
-
-MongodRunner.prototype.toString = function() { return [ this.port_, this.dbpath_, this.peer_, this.arbiter_ ].toString(); }
-
-ToolTest = function( name, extraOptions ){
- this.name = name;
- this.options = extraOptions;
- this.port = allocatePorts(1)[0];
- this.baseName = "jstests_tool_" + name;
- this.root = MongoRunner.dataPath + this.baseName;
- this.dbpath = this.root + "/";
- this.ext = this.root + "_external/";
- this.extFile = this.root + "_external/a";
- this.useSSL = jsTestOptions().useSSL
- resetDbpath( this.dbpath );
- resetDbpath( this.ext );
-}
-
-ToolTest.prototype.startDB = function( coll ){
- assert( ! this.m , "db already running" );
-
- var options = {port : this.port,
- dbpath : this.dbpath,
- nohttpinterface : "",
- noprealloc : "",
- smallfiles : "",
- bind_ip : "127.0.0.1"};
-
- Object.extend(options, this.options);
-
- if ( this.useSSL ) {
- Object.extend(options, { sslMode: "requireSSL", sslPEMKeyFile: "jstests/libs/server.pem", sslCAFile: "jstests/libs/ca.pem", sslWeakCertificateValidation: "" } );
- }
-
- this.m = startMongoProgram.apply(null, MongoRunner.arrOptions("mongod", options));
- this.db = this.m.getDB( this.baseName );
- if ( coll )
- return this.db.getCollection( coll );
- return this.db;
-}
-
-ToolTest.prototype.stop = function(){
- if ( ! this.m )
- return;
- stopMongod( this.port );
- this.m = null;
- this.db = null;
-
- print('*** ' + this.name + " completed successfully ***");
-}
-
-ToolTest.prototype.runTool = function(){
- var a = [ "mongo" + arguments[0] ];
-
- var hasdbpath = false;
-
- for ( var i=1; i<arguments.length; i++ ){
- a.push( arguments[i] );
- if ( arguments[i] == "--dbpath" )
- hasdbpath = true;
- }
-
- if ( this.useSSL ) {
- a = a.concat(["--ssl", "--sslPEMKeyFile", "jstests/libs/server.pem", "--sslCAFile", "jstests/libs/ca.pem", "--sslAllowInvalidHostnames"]);
- }
-
- if ( ! hasdbpath ){
- a.push( "--host" );
- a.push( "127.0.0.1:" + this.port );
- }
-
- return runMongoProgram.apply( null , a );
-}
-
-
-ReplTest = function( name, ports ){
- this.name = name;
- this.ports = ports || allocatePorts( 2 );
-}
-
-ReplTest.prototype.getPort = function( master ){
- if ( master )
- return this.ports[ 0 ];
- return this.ports[ 1 ]
-}
-
-ReplTest.prototype.getPath = function( master ){
- var p = MongoRunner.dataPath + this.name + "-";
- if ( master )
- p += "master";
- else
- p += "slave"
- return p;
-}
-
-ReplTest.prototype.getOptions = function( master , extra , putBinaryFirst, norepl ){
-
- if ( ! extra )
- extra = {};
-
- if ( ! extra.oplogSize )
- extra.oplogSize = "40";
-
- var a = []
- if ( putBinaryFirst )
- a.push( "mongod" )
- a.push( "--nohttpinterface", "--noprealloc", "--bind_ip" , "127.0.0.1" , "--smallfiles" );
-
- a.push( "--port" );
- a.push( this.getPort( master ) );
-
- a.push( "--dbpath" );
- a.push( this.getPath( master ) );
-
- if( jsTestOptions().noJournal ) a.push( "--nojournal" )
- if( jsTestOptions().noJournalPrealloc ) a.push( "--nopreallocj" )
- if( jsTestOptions().keyFile ) {
- a.push( "--keyFile" )
- a.push( jsTestOptions().keyFile )
- }
-
- if( jsTestOptions().useSSL ) {
- if (!a.contains("--sslMode")) {
- a.push( "--sslMode" )
- a.push( "requireSSL" )
- }
- if (!a.contains("--sslPEMKeyFile")) {
- a.push( "--sslPEMKeyFile" )
- a.push( "jstests/libs/server.pem" )
- }
- if (!a.contains("--sslCAFile")) {
- a.push( "--sslCAFile" )
- a.push( "jstests/libs/ca.pem" )
- }
- a.push( "--sslWeakCertificateValidation" )
- }
- if( jsTestOptions().useX509 && !a.contains("--clusterAuthMode")) {
- a.push( "--clusterAuthMode" )
- a.push( "x509" )
- }
-
- if ( !norepl ) {
- if ( master ){
- a.push( "--master" );
- }
- else {
- a.push( "--slave" );
- a.push( "--source" );
- a.push( "127.0.0.1:" + this.ports[0] );
- }
- }
-
- for ( var k in extra ){
- var v = extra[k];
- if( k in MongoRunner.logicalOptions ) continue
- a.push( "--" + k );
- if ( v != null )
- a.push( v );
- }
-
- return a;
-}
-
-ReplTest.prototype.start = function( master , options , restart, norepl ){
- var lockFile = this.getPath( master ) + "/mongod.lock";
- removeFile( lockFile );
- var o = this.getOptions( master , options , restart, norepl );
-
- if (restart) {
- return startMongoProgram.apply(null, o);
- } else {
- var conn = startMongod.apply(null, o);
- if (jsTestOptions().keyFile || jsTestOptions().auth || jsTestOptions().useX509) {
- jsTest.authenticate(conn);
- }
- return conn;
- }
-}
-
-ReplTest.prototype.stop = function( master , signal ){
- if ( arguments.length == 0 ){
- this.stop( true );
- this.stop( false );
- return;
- }
-
- print('*** ' + this.name + " completed successfully ***");
- return stopMongod( this.getPort( master ) , signal || 15 );
-}
-
-allocatePorts = function( n , startPort ) {
- var ret = [];
- var start = startPort || 31000;
- for( var i = start; i < start + n; ++i )
- ret.push( i );
- return ret;
-}
-
-
-SyncCCTest = function( testName , extraMongodOptions ){
- this._testName = testName;
- this._connections = [];
-
- for ( var i=0; i<3; i++ ){
- this._connections.push( startMongodTest( 30000 + i , testName + i , false, extraMongodOptions ) );
- }
-
- this.url = this._connections.map( function(z){ return z.name; } ).join( "," );
- this.conn = new Mongo( this.url );
-}
-
-SyncCCTest.prototype.stop = function(){
- for ( var i=0; i<this._connections.length; i++){
- stopMongod( 30000 + i );
- }
-
- print('*** ' + this._testName + " completed successfully ***");
-}
-
-SyncCCTest.prototype.checkHashes = function( dbname , msg ){
- var hashes = this._connections.map(
- function(z){
- return z.getDB( dbname ).runCommand( "dbhash" );
- }
- );
-
- for ( var i=1; i<hashes.length; i++ ){
- assert.eq( hashes[0].md5 , hashes[i].md5 , "checkHash on " + dbname + " " + msg + "\n" + tojson( hashes ) )
- }
-}
-
-SyncCCTest.prototype.tempKill = function( num ){
- num = num || 0;
- stopMongod( 30000 + num );
-}
-
-SyncCCTest.prototype.tempStart = function( num ){
- num = num || 0;
- this._connections[num] = startMongodTest( 30000 + num , this._testName + num , true );
-}
-
-
-function startParallelShell( jsCode, port, noConnect ){
- var x;
-
- var args = ["mongo"];
-
- // Convert function into call-string
- if (typeof(jsCode) == "function") {
- var id = Math.floor(Math.random() * 100000);
- jsCode = "var f" + id + " = " + jsCode.toString() + ";f" + id + "();";
- }
- else if(typeof(jsCode) == "string") {}
- // do nothing
- else {
- throw Error("bad first argument to startParallelShell");
- }
-
- if (noConnect) {
- args.push("--nodb");
- } else if (typeof(db) == "object") {
- jsCode = "db = db.getSiblingDB('" + db.getName() + "');" + jsCode;
- }
-
- if (TestData) {
- jsCode = "TestData = " + tojson(TestData) + ";" + jsCode;
- }
-
- args.push("--eval", jsCode);
-
- if (typeof db == "object") {
- var hostAndPort = db.getMongo().host.split(':');
- var host = hostAndPort[0];
- args.push("--host", host);
- if (!port && hostAndPort.length >= 2) {
- var port = hostAndPort[1];
- }
- }
- if (port) {
- args.push("--port", port);
- }
-
- if( jsTestOptions().useSSL ) {
- args.push( "--ssl" )
- args.push( "--sslPEMKeyFile" )
- args.push( "jstests/libs/client.pem" )
- args.push( "--sslCAFile" )
- args.push( "jstests/libs/ca.pem" )
- }
-
- x = startMongoProgramNoConnect.apply(null, args);
- return function(){
- waitProgram( x );
- };
-}
-
-var testingReplication = false;
-
-function skipIfTestingReplication(){
- if (testingReplication) {
- print("skipIfTestingReplication skipping");
- quit(0);
- }
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/slow_weekly_util.js b/src/mongo/gotools/test/legacy28/jstests/libs/slow_weekly_util.js
deleted file mode 100644
index f5f89643f16..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/slow_weekly_util.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-SlowWeeklyMongod = function( name ) {
- this.name = name;
- this.port = 30201;
-
- this.start = new Date();
-
- this.conn = startMongodEmpty("--port", this.port, "--dbpath", MongoRunner.dataPath + this.name , "--smallfiles", "--nojournal" );
-};
-
-SlowWeeklyMongod.prototype.getDB = function( name ) {
- return this.conn.getDB( name );
-}
-
-SlowWeeklyMongod.prototype.stop = function(){
- stopMongod( this.port );
- var end = new Date();
- print( "slowWeekly test: " + this.name + " completed successfully in " + ( ( end.getTime() - this.start.getTime() ) / 1000 ) + " seconds" );
-};
-
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/smoke.pem b/src/mongo/gotools/test/legacy28/jstests/libs/smoke.pem
deleted file mode 100644
index 7dddf222386..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/smoke.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDYTCCAkmgAwIBAgIBCDANBgkqhkiG9w0BAQUFADBrMQ4wDAYDVQQDEwVzbW9r
-ZTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQHEw1O
-ZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMwHhcN
-MTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBrMQ4wDAYDVQQDEwVzbW9rZTEP
-MA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQHEw1OZXcg
-WW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMwggEiMA0G
-CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb4fOWDomCPyYesh42pQ/bEHdK7r73
-06x1hdku9i+nytCSxhhuITGC1FA4ZIbYdQC/jgfzC0D+SDFKCCyNZA/2Pxam9y3F
-QHrueNtD9bw/OB98D6hC2fCow5OxUqWDkee2hQRTwLKDzec+H72AkwURh8oTfJsl
-LL/1YITZs9kfs59r8HG2YAT7QBbg3xBmK0wZvL4V/FY/OeeR92pIgjUU/6xm/1LU
-bhNHl5JTrXQxPpmvDb1ysiI0mMLeUz7UI+Pe/9mn91dHwgkprWyFi6VnV3/aW7DC
-nW/DklOPD8vMWu2A6iYU0fZbcj4vGM607vst5QLDMoD5Y2ilrKLiTRa5AgMBAAGj
-EDAOMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAJc64d76+eyNGX6C
-5r4IdFF3zJjkLs/NcSMReUTEv4zAdJCn7c1FNRkQBS3Ky2CeSGmiyYOhWZ7usv7x
-EvprmHouWsrQXV+o5EIW366e5wzg0c5KWO3oBIRjx4hDkRSQSjJjy5NFrc8fAW9x
-eeaHFWdqk3CHvqBhd32QYEs4+7v8hBYM3PBkj8qghXta4ZZS89cTMSjhu5s4Opje
-qUzGzoHat2VBdYzIpVOorYMFXObwCeQkCAXO5epuGZ0QhML66hc7FuOsW75kI9aW
-QXVoM/z2Gb1wbBYnwHOXtClK783S3RdV0uJun/pVj+VeHb6fyIQRmC5d0eJ0C8mY
-X+acnvA=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpQIBAAKCAQEA2+Hzlg6Jgj8mHrIeNqUP2xB3Su6+99OsdYXZLvYvp8rQksYY
-biExgtRQOGSG2HUAv44H8wtA/kgxSggsjWQP9j8WpvctxUB67njbQ/W8PzgffA+o
-QtnwqMOTsVKlg5HntoUEU8Cyg83nPh+9gJMFEYfKE3ybJSy/9WCE2bPZH7Ofa/Bx
-tmAE+0AW4N8QZitMGby+FfxWPznnkfdqSII1FP+sZv9S1G4TR5eSU610MT6Zrw29
-crIiNJjC3lM+1CPj3v/Zp/dXR8IJKa1shYulZ1d/2luwwp1vw5JTjw/LzFrtgOom
-FNH2W3I+LxjOtO77LeUCwzKA+WNopayi4k0WuQIDAQABAoIBAQDRFgAaDcLGfqQS
-Bk/iqHz2U6cMMxCW+sqAioGmPWW9iYdiOkra1meNP7T0mur7A+9tN3LpsybfZeiw
-vCsZXDAteXph1KPKcPE0uOnPqumRuB2ATCc1Qqas5CUaNju7a8/J6Jzfw1o9KVud
-4HLDw4nLTLNkalXhOLdkbp6FoZZypAgc8OnSdw7z9Kri6VndkddX3fWv4t203XwT
-AvBxvy4Qfblz6VKYRnjj2CPvo/kD+ncFEg+S6u8/LkghTX7CYeMHdTC0P9jOcEK2
-PMm3kS3sX7VkypsAirYK5QtBWxur+mINxfOBDtRlA2RaJQnikRiGb14bMkLx8Liy
-JNjEHSLdAoGBAP9+KpjniozZIbrcS79wdRrW+ARyDp1Plzyd4nQxfWmQ//nsnK5T
-EYCFXWTR/ldkAoHpD+bGGU02p1+1u4vmWqw/x+Qy56Gh/eylhe0RvYEjkVLyreuc
-bXu0BFlKVgRlBq1ZyXnr2lz3bAIZxvZs13lZn6qVPMt7w2/JTCal9jw7AoGBANxR
-sGik9mq/678nzLiNlf/LcwIz7siuyISoWDOaVEVva0uorqctVqL95w0f+3FXqBO/
-5BiJRFo5D8SfzRjkNkJ7V+rm+7/CjtsjEw2Ue+ZJYPlm+Wr545GYmhU9QH9NLZIN
-JBwTVWjLgdsyQyi0Gc+xMraBwEwoyS8cO17uHO2bAoGBANRmO91/6BPt0ve4epR9
-Vi1o9yki9PlcmHtBOmikWAFyFQvd4+eckVlKBflyBkL6locPjTOqDpC9VengeDj2
-2PyHzZLtqtkZhbK9bJhIfkWknwTZUTMliXMkldTxUo82uZVVpoRgSdmtq7IXYeut
-UnjExFMY3EDB9BizvUYIBKvPAoGAViQ6bS/SiPpxGlRdXus88r6BQSM9AYoVLIkF
-s2dr+5oMwZA6eXLopOHRLPiMP0yekto8PLuu1ffpil9QuaLA9E11moqlc9yGLngQ
-QwcDSo72M41nh8Qcjhi0ZgmE5kEuyCQLMk783fRz2VhVmdyRGvuVcHZa0WxA/QJ0
-1DEVbnECgYEA3i2PGHUvU2TIFNvubw3qdH5y7FXafF+O0ulQ8e6r/CbVAG14Z6xP
-RHLc7/JIYK9CG1PWCbkjiHZ4MsKFuRWFrUMrwSj8M3euCaEIxa/Co60qQ/CnZiZ6
-geleTtUcTZ2T0pqGLnrHwlzhLpCkPJPyjcfQjjEZRwd0bVFX6b3C/rw=
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/test_background_ops.js b/src/mongo/gotools/test/legacy28/jstests/libs/test_background_ops.js
deleted file mode 100644
index b3f6f593947..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/test_background_ops.js
+++ /dev/null
@@ -1,340 +0,0 @@
-//
-// Utilities related to background operations while other operations are working
-//
-
-/**
- * Allows synchronization between background ops and the test operations
- */
-var waitForLock = function( mongo, name ){
-
- var ts = new ObjectId()
- var lockColl = mongo.getCollection( "config.testLocks" )
-
- lockColl.update({ _id : name, state : 0 }, { $set : { state : 0 } }, true)
-
- //
- // Wait until we can set the state to 1 with our id
- //
-
- var startTime = new Date().getTime()
-
- assert.soon( function() {
- lockColl.update({ _id : name, state : 0 }, { $set : { ts : ts, state : 1 } })
- var gleObj = lockColl.getDB().getLastErrorObj()
-
- if( new Date().getTime() - startTime > 20 * 1000 ){
- print( "Waiting for..." )
- printjson( gleObj )
- printjson( lockColl.findOne() )
- printjson( ts )
- }
-
- return gleObj.n == 1 || gleObj.updatedExisting
- }, "could not acquire lock", 30 * 1000, 100 )
-
- print( "Acquired lock " + tojson( { _id : name, ts : ts } ) + " curr : " +
- tojson( lockColl.findOne({ _id : name }) ) )
-
- // Set the state back to 0
- var unlock = function(){
- print( "Releasing lock " + tojson( { _id : name, ts : ts } ) + " curr : " +
- tojson( lockColl.findOne({ _id : name }) ) )
- lockColl.update({ _id : name, ts : ts }, { $set : { state : 0 } })
- }
-
- // Return an object we can invoke unlock on
- return { unlock : unlock }
-}
-
-/**
- * Allows a test or background op to say it's finished
- */
-var setFinished = function( mongo, name, finished ){
- if( finished || finished == undefined )
- mongo.getCollection( "config.testFinished" ).update({ _id : name }, { _id : name }, true )
- else
- mongo.getCollection( "config.testFinished" ).remove({ _id : name })
-}
-
-/**
- * Checks whether a test or background op is finished
- */
-var isFinished = function( mongo, name ){
- return mongo.getCollection( "config.testFinished" ).findOne({ _id : name }) != null
-}
-
-/**
- * Sets the result of a background op
- */
-var setResult = function( mongo, name, result, err ){
- mongo.getCollection( "config.testResult" ).update({ _id : name }, { _id : name, result : result, err : err }, true )
-}
-
-/**
- * Gets the result for a background op
- */
-var getResult = function( mongo, name ){
- return mongo.getCollection( "config.testResult" ).findOne({ _id : name })
-}
-
-/**
- * Overrides the parallel shell code in mongo
- */
-function startParallelShell( jsCode, port ){
-
- var x;
- if ( port ) {
- x = startMongoProgramNoConnect( "mongo" , "--port" , port , "--eval" , jsCode );
- } else {
- x = startMongoProgramNoConnect( "mongo" , "--eval" , jsCode , db ? db.getMongo().host : null );
- }
-
- return function(){
- jsTestLog( "Waiting for shell " + x + "..." )
- waitProgram( x );
- jsTestLog( "Shell " + x + " finished." )
- };
-}
-
-startParallelOps = function( mongo, proc, args, context ){
-
- var procName = proc.name + "-" + new ObjectId()
- var seed = new ObjectId( new ObjectId().valueOf().split("").reverse().join("") )
- .getTimestamp().getTime()
-
- // Make sure we aren't finished before we start
- setFinished( mongo, procName, false )
- setResult( mongo, procName, undefined, undefined )
-
- // TODO: Make this a context of its own
- var procContext = { procName : procName,
- seed : seed,
- waitForLock : waitForLock,
- setFinished : setFinished,
- isFinished : isFinished,
- setResult : setResult,
-
- setup : function( context, stored ){
-
- waitForLock = function(){
- return context.waitForLock( db.getMongo(), context.procName )
- }
- setFinished = function( finished ){
- return context.setFinished( db.getMongo(), context.procName, finished )
- }
- isFinished = function(){
- return context.isFinished( db.getMongo(), context.procName )
- }
- setResult = function( result, err ){
- return context.setResult( db.getMongo(), context.procName, result, err )
- }
- }}
-
- var bootstrapper = function( stored ){
-
- var procContext = stored.procContext
- procContext.setup( procContext, stored )
-
- var contexts = stored.contexts
- eval( "contexts = " + contexts )
-
- for( var i = 0; i < contexts.length; i++ ){
- if( typeof( contexts[i] ) != "undefined" ){
- // Evaluate all contexts
- contexts[i]( procContext )
- }
- }
-
- var operation = stored.operation
- eval( "operation = " + operation )
-
- var args = stored.args
- eval( "args = " + args )
-
- result = undefined
- err = undefined
-
- try{
- result = operation.apply( null, args )
- }
- catch( e ){
- err = e
- }
-
- setResult( result, err )
- }
-
- var contexts = [ RandomFunctionContext, context ]
-
- var testDataColl = mongo.getCollection( "config.parallelTest" )
-
- testDataColl.insert({ _id : procName,
- bootstrapper : tojson( bootstrapper ),
- operation : tojson( proc ),
- args : tojson( args ),
- procContext : procContext,
- contexts : tojson( contexts ) })
-
- assert.eq( null, testDataColl.getDB().getLastError() )
-
- var bootstrapStartup =
- "{ var procName = '" + procName + "'; " +
- "var stored = db.getMongo().getCollection( '" + testDataColl + "' )" +
- ".findOne({ _id : procName }); " +
- "var bootstrapper = stored.bootstrapper; " +
- "eval( 'bootstrapper = ' + bootstrapper ); " +
- "bootstrapper( stored ); " +
- "}"
-
-
- var oldDB = db
- db = mongo.getDB( "test" )
-
- jsTest.log( "Starting " + proc.name + " operations..." )
-
- var rawJoin = startParallelShell( bootstrapStartup )
-
- db = oldDB
-
-
- var join = function(){
- setFinished( mongo, procName, true )
-
- rawJoin();
- result = getResult( mongo, procName )
-
- assert.neq( result, null )
-
- if( result.err ) throw Error("Error in parallel ops " + procName + " : "
- + tojson( result.err ) )
-
- else return result.result
- }
-
- join.isFinished = function(){
- return isFinished( mongo, procName )
- }
-
- join.setFinished = function( finished ){
- return setFinished( mongo, procName, finished )
- }
-
- join.waitForLock = function( name ){
- return waitForLock( mongo, name )
- }
-
- return join
-}
-
-var RandomFunctionContext = function( context ){
-
- Random.srand( context.seed );
-
- Random.randBool = function(){ return Random.rand() > 0.5 }
-
- Random.randInt = function( min, max ){
-
- if( max == undefined ){
- max = min
- min = 0
- }
-
- return min + Math.floor( Random.rand() * max )
- }
-
- Random.randShardKey = function(){
-
- var numFields = 2 //Random.randInt(1, 3)
-
- var key = {}
- for( var i = 0; i < numFields; i++ ){
- var field = String.fromCharCode( "a".charCodeAt() + i )
- key[ field ] = 1
- }
-
- return key
- }
-
- Random.randShardKeyValue = function( shardKey ){
-
- var keyValue = {}
- for( field in shardKey ){
- keyValue[ field ] = Random.randInt(1, 100)
- }
-
- return keyValue
- }
-
- Random.randCluster = function(){
-
- var numShards = 2 //Random.randInt( 1, 10 )
- var rs = false //Random.randBool()
- var st = new ShardingTest({ shards : numShards,
- mongos : 4,
- other : { separateConfig : true, rs : rs } })
-
- return st
- }
-}
-
-
-//
-// Some utility operations
-//
-
-function moveOps( collName, options ){
-
- options = options || {}
-
- var admin = db.getMongo().getDB( "admin" )
- var config = db.getMongo().getDB( "config" )
- var shards = config.shards.find().toArray()
- var shardKey = config.collections.findOne({ _id : collName }).key
-
- while( ! isFinished() ){
-
- var findKey = Random.randShardKeyValue( shardKey )
- var toShard = shards[ Random.randInt( shards.length ) ]._id
-
- try {
- printjson( admin.runCommand({ moveChunk : collName,
- find : findKey,
- to : toShard }) )
- }
- catch( e ){
- printjson( e )
- }
-
- sleep( 1000 )
- }
-
- jsTest.log( "Stopping moveOps..." )
-}
-
-function splitOps( collName, options ){
-
- options = options || {}
-
- var admin = db.getMongo().getDB( "admin" )
- var config = db.getMongo().getDB( "config" )
- var shards = config.shards.find().toArray()
- var shardKey = config.collections.findOne({ _id : collName }).key
-
- while( ! isFinished() ){
-
- var middleKey = Random.randShardKeyValue( shardKey )
-
- try {
- printjson( admin.runCommand({ split : collName,
- middle : middleKey }) )
- }
- catch( e ){
- printjson( e )
- }
-
- sleep( 1000 )
- }
-
- jsTest.log( "Stopping splitOps..." )
-}
-
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/testconfig b/src/mongo/gotools/test/legacy28/jstests/libs/testconfig
deleted file mode 100644
index 4b09f37ad13..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/testconfig
+++ /dev/null
@@ -1,6 +0,0 @@
-fastsync = true
-#comment line
-#commentedflagwithan = false
-version = false
-help = false
-sysinfo = false
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/testconfig.json b/src/mongo/gotools/test/legacy28/jstests/libs/testconfig.json
deleted file mode 100644
index 5af32aad7d3..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/testconfig.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "fastsync" : true,
- "version" : false
-}
diff --git a/src/mongo/gotools/test/legacy28/jstests/libs/trace_missing_docs.js b/src/mongo/gotools/test/legacy28/jstests/libs/trace_missing_docs.js
deleted file mode 100644
index 3faf50b4606..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/libs/trace_missing_docs.js
+++ /dev/null
@@ -1,90 +0,0 @@
-
-//
-// On error inserting documents, traces back and shows where the document was dropped
-//
-
-function traceMissingDoc( coll, doc, mongos ) {
-
- if (mongos) coll = mongos.getCollection(coll + "");
- else mongos = coll.getMongo();
-
- var config = mongos.getDB( "config" );
- var shards = config.shards.find().toArray();
- for ( var i = 0; i < shards.length; i++ ) {
- shards[i].conn = new Mongo( shards[i].host );
- }
-
- var shardKeyPatt = config.collections.findOne({ _id : coll + "" }).key;
-
- // Project out the shard key
- var shardKey = {};
- for ( var k in shardKeyPatt ) {
- if ( doc[k] == undefined ) {
- jsTest.log( "Shard key " + tojson( shardKey ) +
- " not found in doc " + tojson( doc ) +
- ", falling back to _id search..." );
- shardKeyPatt = { _id : 1 };
- shardKey = { _id : doc['_id'] };
- break;
- }
- shardKey[k] = doc[k];
- }
-
- if ( doc['_id'] == undefined ) {
- jsTest.log( "Id not found in doc " + tojson( doc ) + " cannot trace oplog entries." );
- return;
- }
-
- jsTest.log( "Using shard key : " + tojson( shardKey ) );
-
- var allOps = [];
- for ( var i = 0; i < shards.length; i++ ) {
-
- var oplog = shards[i].conn.getCollection( "local.oplog.rs" );
- if ( !oplog.findOne() ) {
- oplog = shards[i].conn.getCollection( "local.oplog.$main" );
- }
-
- if ( !oplog.findOne() ) {
- jsTest.log( "No oplog was found on shard " + shards[i]._id );
- continue;
- }
-
- var addKeyQuery = function( query, prefix ) {
- for ( var k in shardKey ) {
- query[prefix + '.' + k] = shardKey[k];
- }
- return query;
- };
-
- var addToOps = function( cursor ) {
- cursor.forEach( function( doc ) {
- doc.shard = shards[i]._id;
- doc.realTime = new Date( doc.ts.getTime() * 1000 );
- allOps.push( doc );
- });
- };
-
- // Find ops
- addToOps( oplog.find( addKeyQuery( { op : 'i' }, 'o' ) ) );
- var updateQuery = { $or : [ addKeyQuery( { op : 'u' }, 'o2' ),
- { op : 'u', 'o2._id' : doc['_id'] } ] };
- addToOps( oplog.find( updateQuery ) );
- addToOps( oplog.find({ op : 'd', 'o._id' : doc['_id'] }) );
- }
-
- var compareOps = function( opA, opB ) {
- if ( opA.ts < opB.ts ) return -1;
- if ( opB.ts < opA.ts ) return 1;
- else return 0;
- }
-
- allOps.sort( compareOps );
-
- print( "Ops found for doc " + tojson( doc ) + " on each shard:\n" );
- for ( var i = 0; i < allOps.length; i++ ) {
- printjson( allOps[i] );
- }
-
- return allOps;
-} \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/replsets/rslib.js b/src/mongo/gotools/test/legacy28/jstests/replsets/rslib.js
deleted file mode 100644
index 6a16db232e4..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/replsets/rslib.js
+++ /dev/null
@@ -1,115 +0,0 @@
-
-var count = 0;
-var w = 0;
-
-var wait = function(f,msg) {
- w++;
- var n = 0;
- while (!f()) {
- if( n % 4 == 0 )
- print("waiting " + w);
- if (++n == 4) {
- print("" + f);
- }
- assert(n < 200, 'tried 200 times, giving up on ' + msg );
- sleep(1000);
- }
-};
-
-/**
- * Use this to do something once every 4 iterations.
- *
- * <pre>
- * for (i=0; i<1000; i++) {
- * occasionally(function() { print("4 more iterations"); });
- * }
- * </pre>
- */
-var occasionally = function(f, n) {
- var interval = n || 4;
- if (count % interval == 0) {
- f();
- }
- count++;
-};
-
-var reconnect = function(a) {
- wait(function() {
- try {
- // make this work with either dbs or connections
- if (typeof(a.getDB) == "function") {
- db = a.getDB('foo');
- }
- else {
- db = a;
- }
- db.bar.stats();
- if (jsTest.options().keyFile || jsTest.options().useX509) { // SERVER-4241: Shell connections don't re-authenticate on reconnect
- return jsTest.authenticate(db.getMongo());
- }
- return true;
- } catch(e) {
- print(e);
- return false;
- }
- });
-};
-
-
-var getLatestOp = function(server) {
- server.getDB("admin").getMongo().setSlaveOk();
- var log = server.getDB("local")['oplog.rs'];
- var cursor = log.find({}).sort({'$natural': -1}).limit(1);
- if (cursor.hasNext()) {
- return cursor.next();
- }
- return null;
-};
-
-
-var waitForAllMembers = function(master, timeout) {
- var failCount = 0;
-
- assert.soon( function() {
- var state = null
- try {
- state = master.getSisterDB("admin").runCommand({replSetGetStatus:1});
- failCount = 0;
- } catch ( e ) {
- // Connection can get reset on replica set failover causing a socket exception
- print( "Calling replSetGetStatus failed" );
- print( e );
- return false;
- }
- occasionally(function() { printjson(state); }, 10);
-
- for (var m in state.members) {
- if (state.members[m].state != 1 && // PRIMARY
- state.members[m].state != 2 && // SECONDARY
- state.members[m].state != 7) { // ARBITER
- return false;
- }
- }
- printjson( state );
- return true;
- }, "not all members ready", timeout || 60000);
-
- print( "All members are now in state PRIMARY, SECONDARY, or ARBITER" );
-};
-
-var reconfig = function(rs, config) {
- var admin = rs.getPrimary().getDB("admin");
-
- try {
- var ok = admin.runCommand({replSetReconfig : config});
- assert.eq(ok.ok,1);
- }
- catch(e) {
- print(e);
- }
-
- master = rs.getPrimary().getDB("admin");
- waitForAllMembers(master);
-
- return master;
-};
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/csv1.js b/src/mongo/gotools/test/legacy28/jstests/tool/csv1.js
deleted file mode 100644
index e95d8aa8b41..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/csv1.js
+++ /dev/null
@@ -1,43 +0,0 @@
-// csv1.js
-
-
-t = new ToolTest( "csv1" )
-
-c = t.startDB( "foo" );
-
-base = { a : 1 , b : "foo,bar\"baz,qux" , c: 5, 'd d': -6 , e: '-', f : "."};
-
-assert.eq( 0 , c.count() , "setup1" );
-c.insert( base );
-delete base._id
-assert.eq( 1 , c.count() , "setup2" );
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c,d d,e,f" )
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop" )
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c,d d,e,f" );
-assert.soon( "2 == c.count()" , "restore 2" );
-
-a = c.find().sort( { a : 1 } ).toArray();
-delete a[0]._id
-delete a[1]._id
-assert.docEq( { a : "a" , b : "b" , c : "c" , 'd d': "d d", e: 'e', f : "f"}, a[1], "csv parse 1" );
-assert.docEq( base, a[0], "csv parse 0" )
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop 2" )
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-
-x = c.findOne()
-delete x._id;
-assert.docEq( base, x, "csv parse 2" )
-
-
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/csvexport1.js b/src/mongo/gotools/test/legacy28/jstests/tool/csvexport1.js
deleted file mode 100644
index 2cd3c9c0447..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/csvexport1.js
+++ /dev/null
@@ -1,65 +0,0 @@
-// csvexport1.js
-
-
-t = new ToolTest( "csvexport1" )
-
-c = t.startDB( "foo" );
-
-assert.eq( 0 , c.count() , "setup1" );
-
-objId = ObjectId()
-
-c.insert({ a : new NumberInt(1) , b : objId , c: [1, 2, 3], d : {a : "hello", b : "world"} , e: '-'})
-c.insert({ a : -2.0, c : MinKey, d : "Then he said, \"Hello World!\"", e : new NumberLong(3)})
-c.insert({ a : new BinData(0, "1234"), b : ISODate("2009-08-27T12:34:56.789"),
- c : new Timestamp(1234, 9876), d : /foo*\"bar\"/i,
- e : function foo() { print("Hello World!"); }})
-
-assert.eq( 3 , c.count() , "setup2" );
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b,c,d,e")
-
-
-c.drop()
-
-assert.eq( 0 , c.count() , "after drop" )
-
-t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
-
-assert.soon ( 3 + " == c.count()", "after import");
-
-// Note: Exporting and Importing to/from CSV is not designed to be round-trippable
-expected = []
-expected.push({ a : 1, b : "ObjectId(" + objId.valueOf() + ")", c : [ 1, 2, 3 ], d : { "a" : "hello", "b" : "world" }, e : "-"})
-expected.push({ a : -2.0, b : "", c : "$MinKey", d : "Then he said, \"Hello World!\"", e : 3})
-// "t" should be 1234, but the shell interprets the first field of timestamps as milliseconds while
-// they are stored as seconds. See SERVER-7718.
-expected.push({ a : "D76DF8", b : "2009-08-27T12:34:56.789Z",
- c : { "$timestamp" : { "t" : 1234, "i" : 9876 } },
- d : "/foo*\\\"bar\\\"/i", e : tojson(function foo() { print("Hello World!"); })})
-
-actual = []
-actual.push(c.find({a : 1}).toArray()[0]);
-actual.push(c.find({a : -2.0}).toArray()[0]);
-actual.push(c.find({a : "D76DF8"}).toArray()[0]);
-
-for (i = 0; i < expected.length; i++) {
- delete actual[i]._id
- assert.eq(Object.keys(expected[i]).length, Object.keys(actual[i]).length)
- keys = Object.keys(expected[i])
- for(var j=0;j<keys.length;j++){
- expectedVal = expected[i][keys[j]]
- if((typeof expectedVal)== "object"){
- // For fields which contain arrays or objects, they have been
- // exported as JSON - parse the JSON in the output and verify
- // that it matches the original document's value
- assert.docEq(expectedVal, JSON.parse(actual[i][keys[j]]), "CSV export " + i)
- }else{
- // Otherwise just compare the values directly
- assert.eq(expectedVal, actual[i][keys[j]], "CSV export " + i)
- }
- }
-}
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/csvexport2.js b/src/mongo/gotools/test/legacy28/jstests/tool/csvexport2.js
deleted file mode 100644
index 2dc87b3c641..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/csvexport2.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// csvexport2.js
-
-
-t = new ToolTest( "csvexport2" )
-
-c = t.startDB( "foo" );
-
-// This test is designed to test exporting of a CodeWithScope object.
-// However, due to SERVER-3391, it is not possible to create a CodeWithScope object in the mongo shell,
-// therefore this test does not work. Once SERVER-3391 is resolved, this test should be un-commented out
-
-//assert.eq( 0 , c.count() , "setup1" );
-
-//c.insert({ a : 1 , b : Code("print(\"Hello \" + x);", {"x" : "World!"})})
-//assert.eq( 1 , c.count() , "setup2" );
-//t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b")
-
-
-//c.drop()
-
-//assert.eq( 0 , c.count() , "after drop" )
-//t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
-//assert.soon ( 1 + " == c.count()", "after import");
-
-//expected = { a : 1, b : "\"{ \"$code\" : print(\"Hello \" + x); , \"$scope\" : { \"x\" : \"World!\" } }"};
-//actual = c.findOne()
-
-//delete actual._id;
-//assert.eq( expected, actual );
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/csvimport1.js b/src/mongo/gotools/test/legacy28/jstests/tool/csvimport1.js
deleted file mode 100644
index 87320afec87..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/csvimport1.js
+++ /dev/null
@@ -1,41 +0,0 @@
-// csvimport1.js
-
-
-t = new ToolTest( "csvimport1" )
-
-c = t.startDB( "foo" );
-
-base = []
-base.push({ a : 1, b : "this is some text.\nThis text spans multiple lines, and just for fun\ncontains a comma", "c" : "This has leading and trailing whitespace!" })
-base.push({a : 2, b : "When someone says something you \"put it in quotes\"", "c" : "I like embedded quotes/slashes\\backslashes" })
-base.push({a : 3, b : " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", "c" : "" })
-base.push({a : 4, b : "", "c" : "How are empty entries handled?" })
-base.push({a : 5, b : "\"\"", c : "\"This string is in quotes and contains empty quotes (\"\")\""})
-base.push({ a : "a" , b : "b" , c : "c"})
-
-assert.eq( 0 , c.count() , "setup" );
-
-t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c" );
-assert.soon( base.length + " == c.count()" , "after import 1 " );
-
-a = c.find().sort( { a : 1 } ).toArray();
-for (i = 0; i < base.length; i++ ) {
- delete a[i]._id
- assert.docEq( base[i], a[i], "csv parse " + i)
-}
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop" )
-
-t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( base.length - 1 , c.count() , "after import 2" );
-
-x = c.find().sort( { a : 1 } ).toArray();
-for (i = 0; i < base.length - 1; i++ ) {
- delete x[i]._id
- assert.docEq( base[i], x[i], "csv parse with headerline " + i)
-}
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/data/a.tsv b/src/mongo/gotools/test/legacy28/jstests/tool/data/a.tsv
deleted file mode 100644
index 1e094179a63..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/data/a.tsv
+++ /dev/null
@@ -1,2 +0,0 @@
-a b c d e
- 1 foobar 5 -6
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/data/csvimport1.csv b/src/mongo/gotools/test/legacy28/jstests/tool/data/csvimport1.csv
deleted file mode 100644
index 256d40a9184..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/data/csvimport1.csv
+++ /dev/null
@@ -1,8 +0,0 @@
-a,b,c
-1,"this is some text.
-This text spans multiple lines, and just for fun
-contains a comma", "This has leading and trailing whitespace!"
-2, "When someone says something you ""put it in quotes""", I like embedded quotes/slashes\backslashes
- 3 , " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", ""
- "4" ,, How are empty entries handled?
-"5","""""", """This string is in quotes and contains empty quotes ("""")"""
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/data/dumprestore6/foo.bson b/src/mongo/gotools/test/legacy28/jstests/tool/data/dumprestore6/foo.bson
deleted file mode 100644
index b8f8f99e6bf..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/data/dumprestore6/foo.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/data/dumprestore6/system.indexes.bson b/src/mongo/gotools/test/legacy28/jstests/tool/data/dumprestore6/system.indexes.bson
deleted file mode 100644
index dde25da302a..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/data/dumprestore6/system.indexes.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumpauth.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumpauth.js
deleted file mode 100644
index af6706d107d..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumpauth.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// dumpauth.js
-// test mongodump with authentication
-
-
-port = allocatePorts( 1 )[ 0 ];
-baseName = "tool_dumpauth";
-
-m = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
-db = m.getDB( "admin" );
-
-db.createUser({user: "testuser" , pwd: "testuser", roles: jsTest.adminUserRoles});
-assert( db.auth( "testuser" , "testuser" ) , "auth failed" );
-
-t = db[ baseName ];
-t.drop();
-
-for(var i = 0; i < 100; i++) {
- t["testcol"].save({ "x": i });
-}
-
-x = runMongoProgram( "mongodump",
- "--db", "admin",
- "--authenticationDatabase=admin",
- "-u", "testuser",
- "-p", "testuser",
- "-h", "127.0.0.1:"+port,
- "-vvv",
- "--collection", baseName+".testcol" );
-assert.eq(x, 0, "mongodump should succeed with authentication");
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumpfilename1.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumpfilename1.js
deleted file mode 100644
index 38b430896bf..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumpfilename1.js
+++ /dev/null
@@ -1,13 +0,0 @@
-//dumpfilename1.js
-
-//Test designed to make sure error that dumping a collection with "/" fails
-
-t = new ToolTest( "dumpfilename1" );
-
-t.startDB( "foo" );
-
-c = t.db;
-assert.writeOK(c.getCollection("df/").insert({ a: 3 }));
-assert(t.runTool( "dump" , "--out" , t.ext ) != 0, "dump should fail with non-zero return code")
-t.stop();
-
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore1.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore1.js
deleted file mode 100644
index a0f6f844d9e..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore1.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// dumprestore1.js
-
-
-t = new ToolTest( "dumprestore1" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-c.save( { a : 22 } );
-assert.eq( 1 , c.count() , "setup2" );
-
-t.runTool( "dump" , "--out" , t.ext );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" );
-
-t.runTool( "restore" , "--dir" , t.ext );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-assert.eq( 22 , c.findOne().a , "after restore 2" );
-
-// ensure that --collection is used with --db. See SERVER-7721
-var ret = t.runTool( "dump" , "--collection" , "col" );
-assert.neq( ret, 0, "mongodump should return failure code" );
-t.stop();
-
-// Ensure that --db and --collection are provided when filename is "-" (stdin).
-ret = t.runTool( "restore" , "--collection" , "coll", "--dir", "-" );
-assert.neq( ret, 0, "mongorestore should return failure code" );
-t.stop();
-ret = t.runTool( "restore" , "--db" , "db", "--dir", "-" );
-assert.neq( ret, 0, "mongorestore should return failure code" );
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore10.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore10.js
deleted file mode 100644
index 5a9426dd7c4..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore10.js
+++ /dev/null
@@ -1,64 +0,0 @@
-// simple test to ensure write concern functions as expected
-
-
-var name = "dumprestore10";
-
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-step();
-
-var replTest = new ReplSetTest( {name: name, nodes: 2} );
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-var total = 1000;
-
-{
- step("store data");
- var foo = master.getDB("foo");
- for (i = 0; i < total; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-
-{
- step("wait");
- replTest.awaitReplication();
-}
-
-step("mongodump from replset");
-
-var data = MongoRunner.dataDir + "/dumprestore10-dump1/";
-
-runMongoProgram( "mongodump", "--host", "127.0.0.1:"+replTest.ports[0], "--out", data );
-
-
-{
- step("remove data after dumping");
- master.getDB("foo").getCollection("bar").drop();
-}
-
-{
- step("wait");
- replTest.awaitReplication();
-}
-
-step("try mongorestore with write concern");
-
-runMongoProgram( "mongorestore", "--writeConcern", "2", "--host", "127.0.0.1:"+replTest.ports[0], "--dir", data );
-
-var x = 0;
-
-// no waiting for replication
-x = master.getDB("foo").getCollection("bar").count();
-
-assert.eq(x, total, "mongorestore should have successfully restored the collection");
-
-step("stopSet");
-replTest.stopSet();
-
-step("SUCCESS");
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore3.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore3.js
deleted file mode 100644
index 45067c7ff06..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore3.js
+++ /dev/null
@@ -1,61 +0,0 @@
-// dumprestore3.js
-
-
-var name = "dumprestore3";
-
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-step();
-
-var replTest = new ReplSetTest( {name: name, nodes: 2} );
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-
-{
- step("populate master");
- var foo = master.getDB("foo");
- for (i = 0; i < 20; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-
-{
- step("wait for slaves");
- replTest.awaitReplication();
-}
-
-{
- step("dump & restore a db into a slave");
- var port = 30020;
- var conn = startMongodTest(port, name + "-other");
- var c = conn.getDB("foo").bar;
- c.save({ a: 22 });
- assert.eq(1, c.count(), "setup2");
-}
-
-step("try mongorestore to slave");
-
-var data = MongoRunner.dataDir + "/dumprestore3-other1/";
-resetDbpath(data);
-runMongoProgram( "mongodump", "--host", "127.0.0.1:"+port, "--out", data );
-
-var x = runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+replTest.ports[1], "--dir", data );
-assert.eq(x, 1, "mongorestore should exit w/ 1 on slave");
-
-step("try mongoimport to slave");
-
-dataFile = MongoRunner.dataDir + "/dumprestore3-other2.json";
-runMongoProgram( "mongoexport", "--host", "127.0.0.1:"+port, "--out", dataFile, "--db", "foo", "--collection", "bar" );
-
-x = runMongoProgram( "mongoimport", "--host", "127.0.0.1:"+replTest.ports[1], "--file", dataFile );
-assert.eq(x, 1, "mongoreimport should exit w/ 1 on slave"); // windows return is signed
-
-step("stopSet");
-replTest.stopSet();
-
-step("SUCCESS");
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore4.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore4.js
deleted file mode 100644
index 337d9c34265..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore4.js
+++ /dev/null
@@ -1,43 +0,0 @@
-// dumprestore4.js -- see SERVER-2186
-
-
-// The point of this test is to ensure that mongorestore successfully
-// constructs indexes when the database being restored into has a
-// different name than the database dumped from. There are 2
-// issues here: (1) if you dumped from database "A" and restore into
-// database "B", B should have exactly the right indexes; (2) if for
-// some reason you have another database called "A" at the time of the
-// restore, mongorestore shouldn't touch it.
-
-t = new ToolTest( "dumprestore4" );
-
-c = t.startDB( "dumprestore4" );
-
-db=t.db
-
-dbname = db.getName();
-dbname2 = "NOT_"+dbname;
-
-db2=db.getSisterDB( dbname2 );
-
-db.dropDatabase(); // make sure it's empty
-db2.dropDatabase(); // make sure everybody's empty
-
-assert.eq( 0 , c.getIndexes().length , "setup1" );
-c.ensureIndex({ x : 1} );
-assert.eq( 2 , c.getIndexes().length , "setup2" ); // _id and x_1
-
-assert.eq( 0, t.runTool( "dump" , "-d" , dbname, "--out", t.ext ), "dump")
-
-// to ensure issue (2), we have to clear out the first db.
-// By inspection, db.dropIndexes() doesn't get rid of the _id index on c,
-// so we have to drop the collection.
-c.drop();
-assert.eq( 0, t.runTool( "restore" , "--dir" , t.ext + "/" + dbname, "-d", dbname2 ), "restore" );
-
-// issue (1)
-assert.eq( 2 , db2.dumprestore4.getIndexes().length , "after restore 1" );
-// issue (2)
-assert.eq( 0 , db.dumprestore4.getIndexes().length , "after restore 2" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore6.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore6.js
deleted file mode 100644
index 1ea55e40f5c..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore6.js
+++ /dev/null
@@ -1,54 +0,0 @@
-// dumprestore6.js
-// Test restoring from a dump with an old index version
-
-
-t = new ToolTest( "dumprestore6" );
-
-c = t.startDB( "foo" );
-db = t.db
-if(db.serverStatus().storageEngine.name == "mmapv1") {
- assert.eq( 0 , c.count() , "setup1" );
-
- t.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6")
-
- assert.soon( "c.findOne()" , "no data after sleep" );
- assert.eq( 1 , c.count() , "after restore" );
-
- var indexes = c.getIndexes();
- assert.eq( 2, indexes.length, "there aren't the correct number of indexes" );
- var aIndex = null;
- indexes.forEach(function(index) {
- if (index.name === "a_1") {
- aIndex = index;
- }
- });
- assert.neq(null, aIndex, "index doesn't exist" );
- assert.eq( 1 , aIndex.v, "index version wasn't updated");
-
- assert.eq( 1, c.count({v:0}), "dropped the 'v' field from a non-index collection")
-
- db.dropDatabase()
- assert.eq( 0 , c.count() , "after drop" );
-
- t.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6", "--keepIndexVersion")
-
- assert.soon( "c.findOne()" , "no data after sleep2" );
- assert.eq( 1 , c.count() , "after restore2" );
-
- indexes = c.getIndexes();
- assert.eq( 2, indexes.length, "there aren't the correct number of indexes" );
- aIndex = null;
- indexes.forEach(function(index) {
- if (index.name === "a_1") {
- aIndex = index;
- }
- });
- assert.neq(null, aIndex, "index doesn't exist" );
- assert.eq( 0 , aIndex.v, "index version wasn't maintained")
-
- assert.eq( 1, c.count({v:0}), "dropped the 'v' field from a non-index collection")
-}else{
- print("skipping index version test on non-mmapv1 storage engine")
-}
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore7.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore7.js
deleted file mode 100644
index a71725f434b..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore7.js
+++ /dev/null
@@ -1,66 +0,0 @@
-var name = "dumprestore7";
-
-
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-step();
-
-var replTest = new ReplSetTest( {name: name, nodes: 1} );
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-
-{
- step("first chunk of data");
- var foo = master.getDB("foo");
- for (i = 0; i < 20; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-
-{
- step("wait");
- replTest.awaitReplication();
- var time = replTest.getPrimary().getDB("local").getCollection("oplog.rs").find().limit(1).sort({$natural:-1}).next();
- step(time.ts.t);
-}
-
-{
- step("second chunk of data");
- var foo = master.getDB("foo");
- for (i = 30; i < 50; i++) {
- foo.bar.insert({ x: i, y: "abc" });
- }
-}
-{
- var port = 30020;
- var conn = startMongodTest(port, name + "-other");
-}
-
-step("try mongodump with $timestamp");
-
-var data = MongoRunner.dataDir + "/dumprestore7-dump1/";
-var query = "{\"ts\":{\"$gt\":{\"$timestamp\":{\"t\":"+ time.ts.t + ",\"i\":" + time.ts.i +"}}}}";
-
-MongoRunner.runMongoTool( "mongodump",
- { "host": "127.0.0.1:"+replTest.ports[0],
- "db": "local", "collection": "oplog.rs",
- "query": query, "out": data });
-
-step("try mongorestore from $timestamp");
-
-runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+port, "--dir", data );
-var x = 9;
-x = conn.getDB("local").getCollection("oplog.rs").count();
-
-assert.eq(x, 20, "mongorestore should only have the latter 20 entries");
-
-step("stopSet");
-replTest.stopSet();
-
-step("SUCCESS");
-
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore8.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore8.js
deleted file mode 100644
index edc1a874343..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore8.js
+++ /dev/null
@@ -1,107 +0,0 @@
-// dumprestore8.js
-
-
-// This file tests that indexes and capped collection options get properly dumped and restored.
-// It checks that this works both when doing a full database dump/restore and when doing it just for a single db or collection
-
-t = new ToolTest( "dumprestore8" );
-
-t.startDB( "foo" );
-db = t.db;
-
-dbname = db.getName();
-dbname2 = "NOT_"+dbname;
-
-db.dropDatabase();
-
-assert.eq( 0 , db.foo.count() , "setup1" );
-db.foo.save( { a : 1, b : 1 } );
-db.foo.ensureIndex({a:1});
-db.foo.ensureIndex({b:1, _id:-1});
-assert.eq( 1 , db.foo.count() , "setup2" );
-
-
-assert.eq( 0 , db.bar.count() , "setup3" );
-db.createCollection("bar", {capped:true, size:1000, max:10});
-
-for (var i = 0; i < 1000; i++) {
- db.bar.save( { x : i } );
-}
-db.bar.ensureIndex({x:1});
-
-barDocCount = db.bar.count();
-assert.gt( barDocCount, 0 , "No documents inserted" );
-assert.lt( db.bar.count(), 1000 , "Capped collection didn't evict documents" );
-assert.eq( 5 , db.foo.getIndexes().length + db.bar.getIndexes().length, "Indexes weren't created right" );
-
-
-// Full dump/restore
-
-t.runTool( "dump" , "--out" , t.ext );
-
-db.dropDatabase();
-assert.eq( 0 , db.foo.count() , "foo not dropped" );
-assert.eq( 0 , db.bar.count() , "bar not dropped" );
-assert.eq( 0 , db.bar.getIndexes().length , "indexes on bar not dropped" );
-assert.eq( 0 , db.foo.getIndexes().length , "indexes on foo not dropped" );
-
-t.runTool( "restore" , "--dir" , t.ext );
-
-assert.soon( "db.foo.findOne()" , "no data after sleep" );
-assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo" );
-assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar" );
-for (var i = 0; i < 10; i++) {
- db.bar.save({x:i});
-}
-assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore." );
-assert.eq( 5 , db.foo.getIndexes().length + db.bar.getIndexes().length, "Indexes weren't created correctly by restore");
-
-// Dump/restore single DB
-
-dumppath = t.ext + "singledbdump/";
-mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
-
-db.dropDatabase();
-assert.eq( 0 , db.foo.count() , "foo not dropped2" );
-assert.eq( 0 , db.bar.count() , "bar not dropped2" );
-assert.eq( 0 , db.foo.getIndexes().length , "indexes on foo not dropped2" );
-assert.eq( 0 , db.bar.getIndexes().length , "indexes on bar not dropped2" );
-
-t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname );
-
-db = db.getSiblingDB(dbname2);
-
-assert.soon( "db.foo.findOne()" , "no data after sleep 2" );
-assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo 2" );
-assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar 2" );
-for (var i = 0; i < 10; i++) {
- db.bar.save({x:i});
-}
-assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore 2." );
-assert.eq( 5 , db.foo.getIndexes().length + db.bar.getIndexes().length, "Indexes weren't created correctly by restore 2");
-
-
-// Dump/restore single collection
-
-dumppath = t.ext + "singlecolldump/";
-mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname2, "-c", "bar", "--out" , dumppath );
-
-db.dropDatabase();
-assert.eq( 0 , db.bar.count() , "bar not dropped3" );
-assert.eq( 0 , db.bar.getIndexes().length , "indexes not dropped3" );
-
-t.runTool( "restore" , "-d", dbname, "-c", "baz", "--dir" , dumppath + dbname2 + "/bar.bson" );
-
-db = db.getSiblingDB(dbname);
-
-assert.soon( "db.baz.findOne()" , "no data after sleep 2" );
-assert.eq( barDocCount, db.baz.count(), "wrong number of docs restored to bar 2" );
-for (var i = 0; i < 10; i++) {
- db.baz.save({x:i});
-}
-assert.eq( barDocCount, db.baz.count(), "Capped collection didn't evict documents after restore 3." );
-assert.eq( 2 , db.baz.getIndexes().length , "Indexes weren't created correctly by restore 3" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore9.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore9.js
deleted file mode 100644
index cef9a623cf1..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore9.js
+++ /dev/null
@@ -1,79 +0,0 @@
-if (0) { // Test disabled until SERVER-3853 is finished.
-var name = "dumprestore9";
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-s = new ShardingTest( "dumprestore9a", 2, 0, 3, { chunksize : 1, enableBalancer : 1 } );
-
-step("Shard collection");
-
-s.adminCommand( { enablesharding : "aaa" } ); // Make this db alphabetically before 'config' so it gets restored first
-s.adminCommand( { shardcollection : "aaa.foo" , key : { x : 1 } } );
-
-db = s.getDB( "aaa" );
-coll = db.foo;
-
-step("insert data");
-
-str = 'a';
-while (str.length < 1024*512) {
- str += str;
-}
-
-numDocs = 20;
-for (var i = 0; i < numDocs; i++) {
- coll.insert({x:i, str:str});
-}
-
-step("Wait for balancing");
-
-assert.soon( function(){ var x = s.chunkDiff( "foo" , "aaa" ); print( "chunk diff: " + x ); return x < 2; } , "no balance happened" , 8 * 60 * 1000 , 2000 );
-
-assert.eq(numDocs, coll.count(), "Documents weren't inserted correctly");
-
-step("dump cluster");
-
-dumpdir = MongoRunner.dataDir + "/dumprestore9-dump1/";
-resetDbpath(dumpdir);
-runMongoProgram( "mongodump", "--host", s._mongos[0].host, "--out", dumpdir );
-
-step("Shutting down cluster");
-
-s.stop();
-
-step("Starting up clean cluster");
-s = new ShardingTest( "dumprestore9b", 2, 0, 3, {chunksize:1} );
-
-db = s.getDB( "aaa" );
-coll = db.foo;
-
-assert.eq(0, coll.count(), "Data wasn't cleaned up by restarting sharding test");
-
-step("Restore data and config");
-
-runMongoProgram( "mongorestore", dumpdir, "--host", s._mongos[1].host, "--restoreShardingConfig", "--forceConfigRestore");
-
-config = s.getDB("config");
-assert(config.databases.findOne({_id:'aaa'}).partitioned, "Config data wasn't restored properly");
-
-assert( s.chunkDiff( "foo" , "aaa" ) < 2, "Chunk data wasn't restored properly");
-
-assert.eq(numDocs, coll.count(), "Didn't restore all documents properly2");
-assert.eq(numDocs, coll.find().itcount(), "Didn't restore all documents properly");
-
-for (var i = 0; i < numDocs; i++) {
- doc = coll.findOne({x:i});
- assert.eq(i, doc.x, "Doc missing from the shard it should be on");
-}
-
-for (var i = 0; i < s._connections.length; i++) {
- assert(s._connections[i].getDB("aaa").foo.count() > 0, "No data on shard: " + s._connections[i].host);
-}
-
-step("Stop cluster");
-s.stop();
-step("SUCCESS");
-} \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestoreWithNoOptions.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestoreWithNoOptions.js
deleted file mode 100644
index bfd6f4fa579..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestoreWithNoOptions.js
+++ /dev/null
@@ -1,114 +0,0 @@
-// SERVER-6366
-// relates to SERVER-808
-//
-// This file tests that options are not restored upon
-// mongorestore with --noOptionsRestore
-//
-// It checks that this works both when doing a full
-// database dump/restore and when doing it just for a
-// single db or collection.
-
-
-t = new ToolTest( "dumprestoreWithNoOptions" );
-
-t.startDB( "foo" );
-db = t.db;
-
-// We turn this off to prevent the server from touching the 'options' field in system.namespaces.
-// This is important because we check exact values of the 'options' field in this test.
-db.adminCommand({setParameter:1, newCollectionsUsePowerOf2Sizes: false});
-
-dbname = db.getName();
-dbname2 = "NOT_"+dbname;
-
-db.dropDatabase();
-
-var defaultFlags = {}
-
-var options = { capped: true, size: 4096, autoIndexId: true };
-db.createCollection('capped', options);
-assert.eq( 1, db.capped.getIndexes().length, "auto index not created" );
-var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt],
- 'invalid option:' + tojson(options) + " " + tojson(cappedOptions));
-}
-assert.writeOK(db.capped.insert({ x: 1 }));
-
-// Full dump/restore
-
-t.runTool( "dump" , "--out" , t.ext );
-
-db.dropDatabase();
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.capped.getIndexes().length, "indexes not dropped" );
-
-t.runTool( "restore" , "--dir" , t.ext , "--noOptionsRestore");
-
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
-assert(true !== db.capped.stats().capped, "restore options were not ignored");
-assert.eq( defaultFlags, db.capped.exists().options,
- "restore options not ignored: " + tojson( db.capped.exists() ) );
-
-// Dump/restore single DB
-
-db.dropDatabase();
-var options = { capped: true, size: 4096, autoIndexId: true };
-db.createCollection('capped', options);
-assert.eq( 1, db.capped.getIndexes().length, "auto index not created" );
-var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt], 'invalid option')
-}
-assert.writeOK(db.capped.insert({ x: 1 }));
-
-dumppath = t.ext + "noOptionsSingleDump/";
-mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
-
-db.dropDatabase();
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.capped.getIndexes().length, "indexes not dropped" );
-
-t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname, "--noOptionsRestore");
-
-db = db.getSiblingDB(dbname2);
-
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
-assert(true !== db.capped.stats().capped, "restore options were not ignored");
-assert.eq( defaultFlags, db.capped.exists().options,
- "restore options not ignored: " + tojson( db.capped.exists() ) );
-
-// Dump/restore single collection
-
-db.dropDatabase();
-var options = { capped: true, size: 4096, autoIndexId: true };
-db.createCollection('capped', options);
-assert.eq( 1, db.capped.getIndexes().length, "auto index not created" );
-var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt], 'invalid option')
-}
-
-assert.writeOK(db.capped.insert({ x: 1 }));
-
-dumppath = t.ext + "noOptionsSingleColDump/";
-mkdir(dumppath);
-dbname = db.getName();
-t.runTool( "dump" , "-d", dbname, "-c", "capped", "--out" , dumppath );
-
-db.dropDatabase();
-
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.capped.getIndexes().length, "indexes not dropped" );
-
-t.runTool( "restore", "-d", dbname, "--drop", "--noOptionsRestore", dumppath + dbname );
-
-db = db.getSiblingDB(dbname);
-
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
-assert( true !== db.capped.stats().capped, "restore options were not ignored" );
-assert.eq( defaultFlags, db.capped.exists().options,
- "restore options not ignored: " + tojson( db.capped.exists() ) );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth.js
deleted file mode 100644
index 4bda54a5bdc..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// dumprestore_auth.js
-
-
-t = new ToolTest("dumprestore_auth", { auth : "" });
-
-c = t.startDB("foo");
-var dbName = c.getDB().toString();
-print("DB is ",dbName);
-
-adminDB = c.getDB().getSiblingDB('admin');
-adminDB.createUser({user: 'admin', pwd: 'password', roles: ['root']});
-adminDB.auth('admin','password');
-adminDB.createUser({user: 'backup', pwd: 'password', roles: ['backup']});
-adminDB.createUser({user: 'restore', pwd: 'password', roles: ['restore']});
-
-// Add user defined roles & users with those roles
-var testUserAdmin = c.getDB().getSiblingDB(dbName);
-var backupActions = ["find","listCollections", "listIndexes"];
-testUserAdmin.createRole({role: "backupFoo",
- privileges: [{resource: {db: dbName, collection: "foo"}, actions:backupActions},
- {resource: {db: dbName, collection: "" },
- actions: backupActions}],
- roles: []});
-testUserAdmin.createUser({user: 'backupFoo', pwd: 'password', roles: ['backupFoo']});
-
-var restoreActions = ["collMod", "createCollection","createIndex","dropCollection","insert","listCollections","listIndexes"];
-var restoreActionsFind = restoreActions;
-restoreActionsFind.push("find");
-testUserAdmin.createRole({role: "restoreChester",
- privileges: [{resource: {db: dbName, collection: "chester"}, actions: restoreActions},
- {resource: {db: dbName, collection: ""}, actions:["listCollections","listIndexes"]},
- ],
- roles: []});
-testUserAdmin.createRole({role: "restoreFoo",
- privileges: [{resource: {db: dbName, collection: "foo"}, actions:restoreActions},
- {resource: {db: dbName, collection: ""}, actions:["listCollections","listIndexes"]},
- ],
- roles: []});
-testUserAdmin.createUser({user: 'restoreChester', pwd: 'password', roles: ['restoreChester']});
-testUserAdmin.createUser({user: 'restoreFoo', pwd: 'password', roles: ['restoreFoo']});
-
-var sysUsers = adminDB.system.users.count();
-assert.eq(0 , c.count() , "setup1");
-c.save({ a : 22 });
-assert.eq(1 , c.count() , "setup2");
-
-assert.commandWorked(c.runCommand("collMod", {usePowerOf2Sizes: false}));
-
-var collections = c.getDB().getCollectionInfos();
-var fooColl = null;
-collections.forEach(function(coll) {
- if (coll.name === "foo") {
- fooColl = coll;
- }
-});
-assert.neq(null, fooColl, "foo collection doesn't exist");
-assert(!fooColl.options.flags, "find namespaces 1");
-
-t.runTool("dump" , "--out" , t.ext, "--username", "backup", "--password", "password");
-
-c.drop();
-assert.eq(0 , c.count() , "after drop");
-
-// Restore should fail without user & pass
-t.runTool("restore" , "--dir" , t.ext, "--writeConcern" ,"0");
-assert.eq(0 , c.count() , "after restore without auth");
-
-// Restore should pass with authorized user
-t.runTool("restore" , "--dir" , t.ext, "--username", "restore", "--password", "password", "--writeConcern", "0");
-assert.soon("c.findOne()" , "no data after sleep");
-assert.eq(1 , c.count() , "after restore 2");
-assert.eq(22 , c.findOne().a , "after restore 2");
-
-collections = c.getDB().getCollectionInfos();
-fooColl = null;
-collections.forEach(function(coll) {
- if (coll.name === "foo") {
- fooColl = coll;
- }
-});
-assert.neq(null, fooColl, "foo collection doesn't exist");
-assert(!fooColl.options.flags, "find namespaces 2");
-
-assert.eq(sysUsers, adminDB.system.users.count());
-
-// Dump & restore DB/colection with user defined roles
-t.runTool("dump" , "--out" , t.ext, "--username", "backupFoo", "--password", "password",
- "--db", dbName, "--collection", "foo");
-
-c.drop();
-assert.eq(0 , c.count() , "after drop");
-
-// Restore with wrong user
-t.runTool("restore" , "--username", "restoreChester", "--password", "password",
- "--db", dbName, "--collection", "foo", t.ext+dbName+"/foo.bson", "--writeConcern", "0");
-assert.eq(0 , c.count() , "after restore with wrong user");
-
-// Restore with proper user
-t.runTool("restore" , "--username", "restoreFoo", "--password", "password",
- "--db", dbName, "--collection", "foo", t.ext+dbName+"/foo.bson", "--writeConcern", "0");
-assert.soon("c.findOne()" , "no data after sleep");
-assert.eq(1 , c.count() , "after restore 3");
-assert.eq(22 , c.findOne().a , "after restore 3");
-
-collections = c.getDB().getCollectionInfos();
-fooColl = null;
-collections.forEach(function(coll) {
- if (coll.name === "foo") {
- fooColl = coll;
- }
-});
-assert.neq(null, fooColl, "foo collection doesn't exist");
-assert(!fooColl.options.flags, "find namespaces 3");
-
-assert.eq(sysUsers, adminDB.system.users.count());
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth2.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth2.js
deleted file mode 100644
index 0392d1be3db..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth2.js
+++ /dev/null
@@ -1,98 +0,0 @@
-// dumprestore_auth2.js
-// Tests that mongodump and mongorestore properly handle access control information
-// Tests that the default auth roles of backup and restore work properly.
-
-t = new ToolTest("dumprestore_auth2", {auth: ""});
-
-coll = t.startDB("foo");
-admindb = coll.getDB().getSiblingDB("admin")
-
-// Create the relevant users and roles.
-admindb.createUser({user: "root", pwd: "pass", roles: ["root"]});
-admindb.auth("root", "pass");
-
-admindb.createUser({user: "backup", pwd: "pass", roles: ["backup"]});
-admindb.createUser({user: "restore", pwd: "pass", roles: ["restore"]});
-
-admindb.createRole({role: "customRole",
- privileges:[{resource: {db: "jstests_tool_dumprestore_auth2",
- collection: "foo"},
- actions: ["find"]}],
- roles:[]});
-admindb.createUser({user: "test", pwd: "pass", roles: ["customRole"]});
-
-coll.insert({word: "tomato"});
-assert.eq(1, coll.count());
-
-assert.eq(4, admindb.system.users.count(), "setup users")
-assert.eq(2, admindb.system.users.getIndexes().length,
- "setup2: " + tojson( admindb.system.users.getIndexes() ) );
-assert.eq(1, admindb.system.roles.count(), "setup3")
-assert.eq(2, admindb.system.roles.getIndexes().length, "setup4")
-assert.eq(1, admindb.system.version.count());
-var versionDoc = admindb.system.version.findOne();
-
-// Logout root user.
-admindb.logout();
-
-// Verify that the custom role works as expected.
-admindb.auth("test", "pass");
-assert.eq("tomato", coll.findOne().word);
-admindb.logout();
-
-// Dump the database.
-t.runTool("dump", "--out", t.ext, "--username", "backup", "--password", "pass");
-
-// Drop the relevant data in the database.
-admindb.auth("root", "pass");
-coll.getDB().dropDatabase();
-admindb.dropUser("backup");
-admindb.dropUser("test");
-admindb.dropRole("customRole");
-
-assert.eq(2, admindb.system.users.count(), "didn't drop backup and test users");
-assert.eq(0, admindb.system.roles.count(), "didn't drop roles");
-assert.eq(0, coll.count(), "didn't drop foo coll");
-
-// This test depends on W=0 to mask unique index violations.
-// This should be fixed once we implement TOOLS-341
-t.runTool("restore", "--dir", t.ext, "--username", "restore", "--password", "pass", "--writeConcern", "0");
-
-assert.soon("admindb.system.users.findOne()", "no data after restore");
-assert.eq(4, admindb.system.users.count(), "didn't restore users");
-assert.eq(2, admindb.system.users.getIndexes().length,
- "didn't restore user indexes");
-assert.eq(1, admindb.system.roles.find({role:'customRole'}).count(), "didn't restore roles");
-assert.eq(2, admindb.system.roles.getIndexes().length,
- "didn't restore role indexes");
-
-admindb.logout();
-
-// Login as user with customRole to verify privileges are restored.
-admindb.auth("test", "pass");
-assert.eq("tomato", coll.findOne().word);
-admindb.logout();
-
-admindb.auth("root", "pass");
-admindb.createUser({user: "root2", pwd: "pass", roles: ["root"]});
-admindb.dropRole("customRole");
-admindb.createRole({role: "customRole2", roles: [], privileges:[]});
-admindb.dropUser("root");
-admindb.logout();
-
-t.runTool("restore", "--dir", t.ext, "--username", "restore", "--password", "pass", "--drop", "--writeConcern", "0");
-
-admindb.auth("root", "pass");
-assert.soon("1 == admindb.system.users.find({user:'root'}).count()", "didn't restore users 2");
-assert.eq(0, admindb.system.users.find({user:'root2'}).count(), "didn't drop users");
-assert.eq(0, admindb.system.roles.find({role:'customRole2'}).count(), "didn't drop roles");
-assert.eq(1, admindb.system.roles.find({role:'customRole'}).count(), "didn't restore roles");
-assert.eq(2, admindb.system.users.getIndexes().length,
- "didn't maintain user indexes");
-assert.eq(2, admindb.system.roles.getIndexes().length,
- "didn't maintain role indexes");
-assert.eq(1, admindb.system.version.count(), "didn't restore version");
-assert.docEq(versionDoc, admindb.system.version.findOne(), "version doc wasn't restored properly");
-admindb.logout();
-
-t.stop(); \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth3.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth3.js
deleted file mode 100644
index f65bed7abff..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_auth3.js
+++ /dev/null
@@ -1,200 +0,0 @@
-// dumprestore_auth3.js
-// Tests that mongodump and mongorestore properly handle access control information when doing
-// single-db dumps and restores
-
-// Runs the tool with the given name against the given mongod.
-function runTool(toolName, mongod, options) {
- var opts = {host: mongod.host};
- Object.extend(opts, options);
- MongoRunner.runMongoTool(toolName, opts);
-}
-
-var mongod = MongoRunner.runMongod();
-var admindb = mongod.getDB("admin");
-var db = mongod.getDB("foo");
-
-jsTestLog("Creating Admin user & initial data");
-admindb.createUser({user: 'root', pwd: 'pass', roles: ['root']});
-admindb.createUser({user: 'backup', pwd: 'pass', roles: ['backup']});
-admindb.createUser({user: 'restore', pwd: 'pass', roles: ['restore']});
-admindb.createRole({role: "dummyRole", roles: [], privileges:[]});
-db.createUser({user: 'user', pwd: 'pass', roles: jsTest.basicUserRoles});
-db.createRole({role: 'role', roles: [], privileges:[]});
-var backupActions = ['find'];
-db.createRole({role: 'backupFooChester',
- privileges: [{resource: {db: 'foo', collection: 'chester'}, actions: backupActions}],
- roles: []});
-db.createUser({user: 'backupFooChester', pwd: 'pass', roles: ['backupFooChester']});
-
-var userCount = db.getUsers().length;
-var rolesCount = db.getRoles().length;
-var adminUsersCount = admindb.getUsers().length;
-var adminRolesCount = admindb.getRoles().length;
-var systemUsersCount = admindb.system.users.count();
-var systemVersionCount = admindb.system.version.count();
-
-db.bar.insert({a:1});
-
-assert.eq(1, db.bar.findOne().a);
-assert.eq(userCount, db.getUsers().length, "setup");
-assert.eq(rolesCount, db.getRoles().length, "setup2");
-assert.eq(adminUsersCount, admindb.getUsers().length, "setup3");
-assert.eq(adminRolesCount, admindb.getRoles().length, "setup4");
-assert.eq(systemUsersCount, admindb.system.users.count(), "setup5");
-assert.eq(systemVersionCount, admindb.system.version.count(),"system version");
-assert.eq(1, admindb.system.users.count({user: "restore"}), "Restore user is missing");
-assert.eq(1, admindb.system.users.count({user: "backup"}), "Backup user is missing");
-var versionDoc = admindb.system.version.findOne();
-
-jsTestLog("Dump foo database without dumping user data");
-var dumpDir = MongoRunner.getAndPrepareDumpDirectory("dumprestore_auth3");
-runTool("mongodump", mongod, {out: dumpDir, db: "foo"});
-db = mongod.getDB('foo');
-
-db.dropDatabase();
-db.dropAllUsers();
-db.dropAllRoles();
-
-jsTestLog("Restore foo database from dump that doesn't contain user data ");
-// This test depends on W=0 to mask unique index violations.
-// This should be fixed once we implement TOOLS-341
-runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"});
-
-db = mongod.getDB('foo');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(0, db.getUsers().length, "Restore created users somehow");
-assert.eq(0, db.getRoles().length, "Restore created roles somehow");
-
-// Re-create user data
-db.createUser({user: 'user', pwd: 'password', roles: jsTest.basicUserRoles});
-db.createRole({role: 'role', roles: [], privileges:[]});
-userCount = 1;
-rolesCount = 1;
-
-assert.eq(1, db.bar.findOne().a);
-assert.eq(userCount, db.getUsers().length, "didn't create user");
-assert.eq(rolesCount, db.getRoles().length, "didn't create role");
-
-jsTestLog("Dump foo database *with* user data");
-runTool("mongodump", mongod, {out: dumpDir, db: "foo", dumpDbUsersAndRoles: ""});
-db = mongod.getDB('foo');
-
-db.dropDatabase();
-db.dropAllUsers();
-db.dropAllRoles();
-
-assert.eq(0, db.getUsers().length, "didn't drop users");
-assert.eq(0, db.getRoles().length, "didn't drop roles");
-assert.eq(0, db.bar.count(), "didn't drop 'bar' collection");
-
-jsTestLog("Restore foo database without restoring user data, even though it's in the dump");
-runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', writeConcern: "0"});
-db = mongod.getDB('foo');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(0, db.getUsers().length, "Restored users even though it shouldn't have");
-assert.eq(0, db.getRoles().length, "Restored roles even though it shouldn't have");
-
-jsTestLog("Restore foo database *with* user data");
-runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"});
-db = mongod.getDB('foo');
-admindb = mongod.getDB('admin');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(userCount, db.getUsers().length, "didn't restore users");
-assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
-assert.eq(1, admindb.system.users.count({user: "restore", db: "admin"}), "Restore user is missing");
-assert.docEq(versionDoc,
- db.getSiblingDB('admin').system.version.findOne(),
- "version doc was changed by restore");
-
-jsTestLog("Make modifications to user data that should be overridden by the restore");
-db.dropUser('user')
-db.createUser({user: 'user2', pwd: 'password2', roles: jsTest.basicUserRoles});
-db.dropRole('role')
-db.createRole({role: 'role2', roles: [], privileges:[]});
-
-jsTestLog("Restore foo database (and user data) with --drop so it overrides the changes made");
-// Restore with --drop to override the changes to user data
-runTool("mongorestore", mongod,
- {dir: dumpDir + "foo/", db: 'foo', drop: "", restoreDbUsersAndRoles: "", writeConcern: "0"});
-db = mongod.getDB('foo');
-admindb = mongod.getDB('admin');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(adminUsersCount, admindb.getUsers().length, "Admin users were dropped");
-assert.eq(adminRolesCount, admindb.getRoles().length, "Admin roles were dropped");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(userCount, db.getUsers().length, "didn't restore users");
-assert.eq("user", db.getUser('user').user, "didn't update user");
-assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
-assert.eq("role", db.getRole('role').role, "didn't update role");
-assert.docEq(versionDoc,
- db.getSiblingDB('admin').system.version.findOne(),
- "version doc was changed by restore");
-
-
-jsTestLog("Dump just the admin database. User data should be dumped by default");
-// Make a user in another database to make sure it is properly captured
-db.getSiblingDB('bar').createUser({user: "user", pwd: 'pwd', roles: []});
-db.getSiblingDB('admin').createUser({user: "user", pwd: 'pwd', roles: []});
-adminUsersCount += 1;
-runTool("mongodump", mongod, {out: dumpDir, db: "admin"});
-db = mongod.getDB('foo');
-
-// Change user data a bit.
-db.dropAllUsers();
-db.getSiblingDB('bar').createUser({user: "user2", pwd: 'pwd', roles: []});
-db.getSiblingDB('admin').dropAllUsers();
-
-jsTestLog("Restore just the admin database. User data should be restored by default");
-runTool("mongorestore", mongod, {dir: dumpDir + "admin/", db: 'admin', drop: "", writeConcern: "0"});
-db = mongod.getDB('foo');
-var otherdb = db.getSiblingDB('bar');
-var admindb = db.getSiblingDB('admin');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(userCount, db.getUsers().length, "didn't restore users");
-assert.eq("user", db.getUser('user').user, "didn't restore user");
-assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
-assert.eq("role", db.getRole('role').role, "didn't restore role");
-assert.eq(1, otherdb.getUsers().length, "didn't restore users for bar database");
-assert.eq("user", otherdb.getUsers()[0].user, "didn't restore user for bar database");
-assert.eq(adminUsersCount, admindb.getUsers().length, "didn't restore users for admin database");
-assert.eq("user", admindb.getUser("user").user, "didn't restore user for admin database");
-assert.eq(6, admindb.system.users.count(), "has the wrong # of users for the whole server");
-assert.eq(2, admindb.system.roles.count(), "has the wrong # of roles for the whole server");
-assert.docEq(versionDoc,
- db.getSiblingDB('admin').system.version.findOne(),
- "version doc was changed by restore");
-
-jsTestLog("Dump all databases");
-runTool("mongodump", mongod, {out: dumpDir});
-db = mongod.getDB('foo');
-
-db.dropDatabase();
-db.dropAllUsers();
-db.dropAllRoles();
-
-assert.eq(0, db.getUsers().length, "didn't drop users");
-assert.eq(0, db.getRoles().length, "didn't drop roles");
-assert.eq(0, db.bar.count(), "didn't drop 'bar' collection");
-
-jsTestLog("Restore all databases");
-runTool("mongorestore", mongod, {dir: dumpDir, writeConcern: "0"});
-db = mongod.getDB('foo');
-
-assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
-assert.eq(1, db.bar.findOne().a);
-assert.eq(1, db.getUsers().length, "didn't restore users");
-assert.eq(1, db.getRoles().length, "didn't restore roles");
-assert.docEq(versionDoc,
- db.getSiblingDB('admin').system.version.findOne(),
- "version doc was changed by restore");
-
-MongoRunner.stopMongod(mongod); \ No newline at end of file
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_excludecollections.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_excludecollections.js
deleted file mode 100644
index dcfab742053..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumprestore_excludecollections.js
+++ /dev/null
@@ -1,112 +0,0 @@
-// Tests for mongodump options for excluding collections
-
-
-var testBaseName = "jstests_tool_dumprestore_excludecollections";
-
-var dumpDir = MongoRunner.dataPath + testBaseName + "_dump_external/";
-
-var mongodSource = MongoRunner.runMongod();
-var sourceDB = mongodSource.getDB(testBaseName);
-var mongodDest = MongoRunner.runMongod();
-var destDB = mongodDest.getDB(testBaseName);
-
-jsTest.log("Inserting documents into source mongod");
-sourceDB.test.insert({x:1});
-sourceDB.test2.insert({x:2});
-sourceDB.test3.insert({x:3});
-sourceDB.foo.insert({f:1});
-sourceDB.foo2.insert({f:2});
-
-jsTest.log("Testing incompabible option combinations");
-resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- excludeCollection : "test",
- host : mongodSource.host });
-assert.neq(ret, 0, "mongodump started successfully with --excludeCollection but no --db option");
-
-resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- db : testBaseName,
- collection : "foo",
- excludeCollection : "test",
- host : mongodSource.host });
-assert.neq(ret, 0, "mongodump started successfully with --excludeCollection and --collection");
-
-resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- excludeCollectionsWithPrefix : "test",
- host : mongodSource.host });
-assert.neq(ret, 0, "mongodump started successfully with --excludeCollectionsWithPrefix but " +
- "no --db option");
-
-resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- db : testBaseName,
- collection : "foo",
- excludeCollectionsWithPrefix : "test",
- host : mongodSource.host });
-assert.neq(ret, 0, "mongodump started successfully with --excludeCollectionsWithPrefix and " +
- "--collection");
-
-jsTest.log("Testing proper behavior of collection exclusion");
-resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- db : testBaseName,
- excludeCollection : "test",
- host : mongodSource.host });
-
-ret = MongoRunner.runMongoTool("mongorestore", { dir : dumpDir, host : mongodDest.host });
-assert.eq(ret, 0, "failed to run mongodump on expected successful call");
-assert.eq(destDB.test.count(), 0, "Found documents in collection that we excluded");
-assert.eq(destDB.test2.count(), 1, "Did not find document in collection that we did not exclude");
-assert.eq(destDB.test2.findOne().x, 2, "Wrong value in document");
-assert.eq(destDB.test3.count(), 1, "Did not find document in collection that we did not exclude");
-assert.eq(destDB.test3.findOne().x, 3, "Wrong value in document");
-assert.eq(destDB.foo.count(), 1, "Did not find document in collection that we did not exclude");
-assert.eq(destDB.foo.findOne().f, 1, "Wrong value in document");
-assert.eq(destDB.foo2.count(), 1, "Did not find document in collection that we did not exclude");
-assert.eq(destDB.foo2.findOne().f, 2, "Wrong value in document");
-destDB.dropDatabase();
-
-resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- db : testBaseName,
- excludeCollectionsWithPrefix : "test",
- host : mongodSource.host });
-
-ret = MongoRunner.runMongoTool("mongorestore", { dir : dumpDir, host : mongodDest.host });
-assert.eq(ret, 0, "failed to run mongodump on expected successful call");
-assert.eq(destDB.test.count(), 0, "Found documents in collection that we excluded");
-assert.eq(destDB.test2.count(), 0, "Found documents in collection that we excluded");
-assert.eq(destDB.test3.count(), 0, "Found documents in collection that we excluded");
-assert.eq(destDB.foo.count(), 1, "Did not find document in collection that we did not exclude");
-assert.eq(destDB.foo.findOne().f, 1, "Wrong value in document");
-assert.eq(destDB.foo2.count(), 1, "Did not find document in collection that we did not exclude");
-assert.eq(destDB.foo2.findOne().f, 2, "Wrong value in document");
-destDB.dropDatabase();
-
-resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- db : testBaseName,
- excludeCollection : "foo",
- excludeCollectionsWithPrefix : "test",
- host : mongodSource.host });
-
-ret = MongoRunner.runMongoTool("mongorestore", { dir : dumpDir, host : mongodDest.host });
-assert.eq(ret, 0, "failed to run mongodump on expected successful call");
-assert.eq(destDB.test.count(), 0, "Found documents in collection that we excluded");
-assert.eq(destDB.test2.count(), 0, "Found documents in collection that we excluded");
-assert.eq(destDB.test3.count(), 0, "Found documents in collection that we excluded");
-assert.eq(destDB.foo.count(), 0, "Found documents in collection that we excluded");
-assert.eq(destDB.foo2.count(), 1, "Did not find document in collection that we did not exclude");
-assert.eq(destDB.foo2.findOne().f, 2, "Wrong value in document");
-destDB.dropDatabase();
-
-// The --excludeCollection and --excludeCollectionsWithPrefix options can be specified multiple
-// times, but that is not tested here because right now MongoRunners can only be configured using
-// javascript objects which do not allow duplicate keys. See SERVER-14220.
-
-MongoRunner.stopMongod(mongodDest.port);
-MongoRunner.stopMongod(mongodSource.port);
-
-print(testBaseName + " success!");
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/dumpsecondary.js b/src/mongo/gotools/test/legacy28/jstests/tool/dumpsecondary.js
deleted file mode 100644
index 92cd6b9fff1..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/dumpsecondary.js
+++ /dev/null
@@ -1,39 +0,0 @@
-
-var replTest = new ReplSetTest( {name: 'testSet', nodes: 2} );
-
-var nodes = replTest.startSet();
-replTest.initiate();
-
-var master = replTest.getPrimary();
-db = master.getDB("foo")
-db.foo.save({a: 1000});
-replTest.awaitReplication();
-replTest.awaitSecondaryNodes();
-
-assert.eq( 1 , db.foo.count() , "setup" );
-
-var slaves = replTest.liveNodes.slaves;
-assert( slaves.length == 1, "Expected 1 slave but length was " + slaves.length );
-slave = slaves[0];
-
-var args = ['mongodump', '-h', slave.host, '--out', MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
-var authargs = ['--username', jsTest.options().authUser, '--password', jsTest.options().authPassword];
-if (jsTest.options().keyFile) {
- args = args.concat(authargs);
-}
-runMongoProgram.apply(null, args);
-db.foo.drop()
-
-assert.eq( 0 , db.foo.count() , "after drop" );
-args = ['mongorestore', '-h', master.host, MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
-if (jsTest.options().keyFile) {
- args = args.concat(authargs);
-}
-runMongoProgram.apply(null, args)
-assert.soon( "db.foo.findOne()" , "no data after sleep" );
-assert.eq( 1 , db.foo.count() , "after restore" );
-assert.eq( 1000 , db.foo.findOne().a , "after restore 2" );
-
-resetDbpath(MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external')
-
-replTest.stopSet(15)
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport1.js b/src/mongo/gotools/test/legacy28/jstests/tool/exportimport1.js
deleted file mode 100644
index 5e206d8c40b..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport1.js
+++ /dev/null
@@ -1,67 +0,0 @@
-// exportimport1.js
-
-
-t = new ToolTest( "exportimport1" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-var arr = ["x", undefined, "y", undefined];
-c.save( { a : 22 , b : arr} );
-assert.eq( 1 , c.count() , "setup2" );
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-var doc = c.findOne();
-assert.eq( 22 , doc.a , "after restore 2" );
-for (var i=0; i<arr.length; i++) {
- if (typeof arr[i] == 'undefined') {
- // null should be { "$undefined" : true }, but this is a workaround for SERVER-6102
- assert.eq( null, doc.b[i] , "after restore array: "+i );
- } else {
- assert.eq( arr[i], doc.b[i] , "after restore array: "+i );
- }
-}
-
-// now with --jsonArray
-
-t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-assert.eq( 22 , c.findOne().a , "after restore 2" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-arr = ["a", undefined, "c"];
-c.save({a : arr});
-assert.eq( 1 , c.count() , "setup2" );
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-var doc = c.findOne();
-for (var i=0; i<arr.length; i++) {
- if (typeof arr[i] == 'undefined') {
- // null should be { "$undefined" : true }, but this is a workaround for SERVER-6102
- assert.eq( null, doc.a[i] , "after restore array: "+i );
- } else {
- assert.eq( arr[i], doc.a[i] , "after restore array: "+i );
- }
-}
-
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport3.js b/src/mongo/gotools/test/legacy28/jstests/tool/exportimport3.js
deleted file mode 100644
index 4f0fdd46609..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport3.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// exportimport3.js
-
-
-t = new ToolTest( "exportimport3" );
-
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-c.save({a:1})
-c.save({a:2})
-c.save({a:3})
-c.save({a:4})
-c.save({a:5})
-
-assert.eq( 5 , c.count() , "setup2" );
-
-
-t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
-
-t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 5 , c.count() , "after restore 2" );
-
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport4.js b/src/mongo/gotools/test/legacy28/jstests/tool/exportimport4.js
deleted file mode 100644
index c0d82a135bc..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport4.js
+++ /dev/null
@@ -1,57 +0,0 @@
-// exportimport4.js
-
-
-t = new ToolTest( "exportimport4" );
-c = t.startDB( "foo" );
-
-install_test_data = function() {
- c.drop();
-
- assert.eq( 0 , c.count() , "setup1" );
-
- c.save( { a : [1, 2, 3, NaN, 4, null, 5] } );
- c.save( { a : [1, 2, 3, 4, 5] } );
- c.save( { a : [ NaN ] } );
- c.save( { a : [1, 2, 3, 4, NaN, NaN, 5, NaN] } );
- c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
-
- assert.eq( 5 , c.count() , "setup2" );
-};
-
-// attempt to export fields without NaN
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[NaN]}}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 2 , c.count() , "after restore 1" );
-
-// attempt to export fields with NaN
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:NaN}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 3 , c.count() , "after restore 2" );
-
-// attempt to export everything
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 5 , c.count() , "after restore 3" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport5.js b/src/mongo/gotools/test/legacy28/jstests/tool/exportimport5.js
deleted file mode 100644
index 47dd98c2553..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport5.js
+++ /dev/null
@@ -1,82 +0,0 @@
-// exportimport4.js
-
-
-t = new ToolTest( "exportimport5" );
-c = t.startDB( "foo" );
-
-install_test_data = function() {
- c.drop();
-
- assert.eq( 0 , c.count() , "setup1" );
-
- c.save( { a : [1, 2, 3, Infinity, 4, null, 5] } );
- c.save( { a : [1, 2, 3, 4, 5] } );
- c.save( { a : [ Infinity ] } );
- c.save( { a : [1, 2, 3, 4, Infinity, Infinity, 5, -Infinity] } );
- c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
- c.save( { a : [ -Infinity ] } );
-
- assert.eq( 6 , c.count() , "setup2" );
-};
-
-// attempt to export fields without Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[Infinity]}}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 3 , c.count() , "after restore 1" );
-
-// attempt to export fields with Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:Infinity}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 3 , c.count() , "after restore 2" );
-
-// attempt to export fields without -Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[-Infinity]}}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 4 , c.count() , "after restore 3" );
-
-// attempt to export fields with -Infinity
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:-Infinity}" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 2 , c.count() , "after restore 4" );
-
-// attempt to export everything
-install_test_data();
-
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-
-c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
-
-assert.eq( 6 , c.count() , "after restore 5" );
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport6.js b/src/mongo/gotools/test/legacy28/jstests/tool/exportimport6.js
deleted file mode 100644
index a6406dfa880..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport6.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// exportimport6.js
-// test export with skip, limit and sort
-
-
-t = new ToolTest("exportimport6");
-
-c = t.startDB("foo");
-assert.eq(0, c.count(), "setup1");
-c.save({a:1, b:1})
-c.save({a:1, b:2})
-c.save({a:2, b:3})
-c.save({a:2, b:3})
-c.save({a:3, b:4})
-c.save({a:3, b:5})
-
-assert.eq(6, c.count(), "setup2");
-
-t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo",
- "--sort", "{a:1, b:-1}", "--skip", "4", "--limit", "1");
-
-c.drop();
-assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo");
-assert.eq(1, c.count(), "count should be 1");
-assert.eq(5, c.findOne().b, printjson(c.findOne()));
-
-t.stop();
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport_bigarray.js b/src/mongo/gotools/test/legacy28/jstests/tool/exportimport_bigarray.js
deleted file mode 100644
index e8bd4a468b4..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport_bigarray.js
+++ /dev/null
@@ -1,59 +0,0 @@
-// Test importing collections represented as a single line array above the maximum document size
-
-
-var tt = new ToolTest('exportimport_bigarray_test');
-
-var exportimport_db = tt.startDB();
-
-var src = exportimport_db.src;
-var dst = exportimport_db.dst;
-
-src.drop();
-dst.drop();
-
-// Calculate the number of documents it takes to get above 16MB (here using 20MB just to be safe)
-var bigString = new Array(1025).toString();
-var doc = {_id: new ObjectId(), x:bigString};
-var docSize = Object.bsonsize(doc);
-var numDocs = Math.floor(20*1024*1024 / docSize);
-
-print('Size of one document: ' + docSize)
-print('Number of documents to exceed maximum BSON size: ' + numDocs)
-
-print('About to insert ' + numDocs + ' documents into ' +
- exportimport_db.getName() + '.' + src.getName());
-var i;
-var bulk = src.initializeUnorderedBulkOp();
-for (i = 0; i < numDocs; ++i) {
- bulk.insert({ x: bigString });
-}
-assert.writeOK(bulk.execute());
-
-data = 'data/exportimport_array_test.json';
-
-print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
- ' with file: ' + data);
-tt.runTool('export', '--out' , data, '-d', exportimport_db.getName(), '-c', src.getName(),
- '--jsonArray');
-
-print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
- ' with file: ' + data);
-tt.runTool('import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.getName(),
- '--jsonArray');
-
-print('About to verify that source and destination collections match');
-
-src_cursor = src.find().sort({ _id : 1 });
-dst_cursor = dst.find().sort({ _id : 1 });
-
-var documentCount = 0;
-while (src_cursor.hasNext()) {
- assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
- 'Destination has ' + documentCount + ' documents.');
- assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
- ++documentCount;
-}
-assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
- 'Source has ' + documentCount + ' documents.');
-
-print('Verified that source and destination collections match');
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport_date.js b/src/mongo/gotools/test/legacy28/jstests/tool/exportimport_date.js
deleted file mode 100644
index 9dc6c275a96..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport_date.js
+++ /dev/null
@@ -1,50 +0,0 @@
-
-var tt = new ToolTest('exportimport_date_test');
-
-var exportimport_db = tt.startDB();
-
-var src = exportimport_db.src;
-var dst = exportimport_db.dst;
-
-src.drop();
-dst.drop();
-
-// Insert a date that we can format
-var formatable = ISODate("1970-01-02T05:00:00Z");
-assert.eq(formatable.valueOf(), 104400000);
-src.insert({ "_id" : formatable });
-
-// Insert a date that we cannot format as an ISODate string
-var nonformatable = ISODate("3001-01-01T00:00:00Z");
-assert.eq(nonformatable.valueOf(), 32535216000000);
-src.insert({ "_id" : nonformatable });
-
-// Verify number of documents inserted
-assert.eq(2, src.find().itcount());
-
-data = 'data/exportimport_date_test.json';
-
-print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
- ' with file: ' + data);
-tt.runTool('export', '--out' , data, '-d', exportimport_db.getName(), '-c', src.getName());
-
-print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
- ' with file: ' + data);
-tt.runTool('import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.getName());
-
-print('About to verify that source and destination collections match');
-
-src_cursor = src.find().sort({ _id : 1 });
-dst_cursor = dst.find().sort({ _id : 1 });
-
-var documentCount = 0;
-while (src_cursor.hasNext()) {
- assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
- 'Destination has ' + documentCount + ' documents.');
- assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
- ++documentCount;
-}
-assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
- 'Source has ' + documentCount + ' documents.');
-
-print('Verified that source and destination collections match');
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport_minkey_maxkey.js b/src/mongo/gotools/test/legacy28/jstests/tool/exportimport_minkey_maxkey.js
deleted file mode 100644
index a4705dc3ceb..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/exportimport_minkey_maxkey.js
+++ /dev/null
@@ -1,38 +0,0 @@
-
-var tt = new ToolTest('exportimport_minkey_maxkey_test');
-
-var exportimport_db = tt.startDB();
-
-var src = exportimport_db.src;
-var dst = exportimport_db.dst;
-
-src.drop();
-dst.drop();
-
-src.insert({ "_id" : MaxKey });
-src.insert({ "_id" : MinKey });
-
-print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
- ' with file: ' + tt.extFile);
-tt.runTool('export', '--out' , tt.extFile, '-d', exportimport_db.getName(), '-c', src.getName());
-
-print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
- ' with file: ' + tt.extFile);
-tt.runTool('import', '--file', tt.extFile, '-d', exportimport_db.getName(), '-c', dst.getName());
-
-print('About to verify that source and destination collections match');
-
-src_cursor = src.find().sort({ _id : 1 });
-dst_cursor = dst.find().sort({ _id : 1 });
-
-var documentCount = 0;
-while (src_cursor.hasNext()) {
- assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
- 'Destination has ' + documentCount + ' documents.');
- assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
- ++documentCount;
-}
-assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
- 'Source has ' + documentCount + ' documents.');
-
-print('Verified that source and destination collections match');
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/files1.js b/src/mongo/gotools/test/legacy28/jstests/tool/files1.js
deleted file mode 100644
index 3db783df19f..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/files1.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// files1.js
-
-
-t = new ToolTest( "files1" )
-
-db = t.startDB();
-
-filename = 'mongod'
-if ( _isWindows() )
- filename += '.exe'
-
-t.runTool( "files" , "-d" , t.baseName , "put" , filename );
-md5 = md5sumFile(filename);
-
-file_obj = db.fs.files.findOne()
-assert( file_obj , "A 0" );
-md5_stored = file_obj.md5;
-md5_computed = db.runCommand({filemd5: file_obj._id}).md5;
-assert.eq( md5 , md5_stored , "A 1" );
-assert.eq( md5 , md5_computed, "A 2" );
-
-mkdir(t.ext);
-
-t.runTool( "files" , "-d" , t.baseName , "get" , filename , '-l' , t.extFile );
-md5 = md5sumFile(t.extFile);
-assert.eq( md5 , md5_stored , "B" );
-
-t.stop()
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/oplog1.js b/src/mongo/gotools/test/legacy28/jstests/tool/oplog1.js
deleted file mode 100644
index 5beb1d697e0..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/oplog1.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// oplog1.js
-
-
-// very basic test for mongooplog
-// need a lot more, but test that it functions at all
-
-t = new ToolTest( "oplog1" );
-
-db = t.startDB();
-
-output = db.output
-
-doc = { _id : 5 , x : 17 };
-
-assert.commandWorked(db.createCollection(output.getName()));
-
-db.oplog.insert( { ts : new Timestamp() , "op" : "i" , "ns" : output.getFullName() , "o" : doc } );
-
-assert.eq( 0 , output.count() , "before" )
-
-t.runTool( "oplog" , "--oplogns" , db.getName() + ".oplog" , "--from" , "127.0.0.1:" + t.port , "-vv" );
-
-assert.eq( 1 , output.count() , "after" );
-
-assert.eq( doc , output.findOne() , "after check" );
-
-t.stop();
-
-
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/oplog_all_ops.js b/src/mongo/gotools/test/legacy28/jstests/tool/oplog_all_ops.js
deleted file mode 100644
index a0eb3e34dc9..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/oplog_all_ops.js
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Performs a simple test on mongooplog by doing different types of operations
- * that will show up in the oplog then replaying it on another replica set.
- * Correctness is verified using the dbhash command.
- */
-
-
-var repl1 = new ReplSetTest({ name: 'rs1', nodes: [{ nopreallocj: '' },
- { arbiter: true }, { arbiter: true }]});
-
-repl1.startSet({ oplogSize: 10 });
-repl1.initiate();
-repl1.awaitSecondaryNodes();
-
-var repl1Conn = new Mongo(repl1.getURL());
-var testDB = repl1Conn.getDB('test');
-var testColl = testDB.user;
-
-// op i
-testColl.insert({ x: 1 });
-testColl.insert({ x: 2 });
-
-// op c
-testDB.dropDatabase();
-
-testColl.insert({ y: 1 });
-testColl.insert({ y: 2 });
-testColl.insert({ y: 3 });
-
-// op u
-testColl.update({}, { $inc: { z: 1 }}, true, true);
-
-// op d
-testColl.remove({ y: 2 });
-
-// op n
-var oplogColl = repl1Conn.getCollection('local.oplog.rs');
-oplogColl.insert({ ts: new Timestamp(), op: 'n', ns: testColl.getFullName(), 'o': { x: 'noop' }});
-
-var repl2 = new ReplSetTest({ name: 'rs2', startPort: 31100, nodes: [{ nopreallocj: '' },
- { arbiter: true }, { arbiter: true }]});
-
-repl2.startSet({ oplogSize: 10 });
-repl2.initiate();
-repl2.awaitSecondaryNodes();
-
-var srcConn = repl1.getPrimary();
-runMongoProgram('mongooplog', '--from', repl1.getPrimary().host,
- '--host', repl2.getPrimary().host);
-
-var repl1Hash = testDB.runCommand({ dbhash: 1 });
-
-var repl2Conn = new Mongo(repl2.getURL());
-var testDB2 = repl2Conn.getDB(testDB.getName());
-var repl2Hash = testDB2.runCommand({ dbhash: 1 });
-
-assert(repl1Hash.md5);
-assert.eq(repl1Hash.md5, repl2Hash.md5);
-
-repl1.stopSet();
-repl2.stopSet();
-
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/restorewithauth.js b/src/mongo/gotools/test/legacy28/jstests/tool/restorewithauth.js
deleted file mode 100644
index d17769cf396..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/restorewithauth.js
+++ /dev/null
@@ -1,117 +0,0 @@
-/* SERVER-4972
- * Test for mongorestore on server with --auth allows restore without credentials of colls
- * with no index
- */
-/*
- * 1) Start mongo without auth.
- * 2) Write to collection
- * 3) Take dump of the collection using mongodump.
- * 4) Drop the collection.
- * 5) Stop mongod from step 1.
- * 6) Restart mongod with auth.
- * 7) Add admin user to kick authentication
- * 8) Try restore without auth credentials. The restore should fail
- * 9) Try restore with correct auth credentials. The restore should succeed this time.
- */
-
-
-var port = allocatePorts(1)[0];
-baseName = "jstests_restorewithauth";
-var conn = startMongod( "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface",
- "--nojournal", "--bind_ip", "127.0.0.1" );
-
-// write to ns foo.bar
-var foo = conn.getDB( "foo" );
-for( var i = 0; i < 4; i++ ) {
- foo["bar"].save( { "x": i } );
- foo["baz"].save({"x": i});
-}
-
-// make sure the collection exists
-var collNames = foo.getCollectionNames();
-assert.neq(-1, collNames.indexOf("bar"), "bar collection doesn't exist");
-
-//make sure it has no index except _id
-assert.eq(foo.bar.getIndexes().length, 1);
-assert.eq(foo.baz.getIndexes().length, 1);
-
-foo.bar.createIndex({x:1});
-assert.eq(foo.bar.getIndexes().length, 2);
-assert.eq(foo.baz.getIndexes().length, 1);
-
-// get data dump
-var dumpdir = MongoRunner.dataDir + "/restorewithauth-dump1/";
-resetDbpath( dumpdir );
-x = runMongoProgram("mongodump", "--db", "foo", "-h", "127.0.0.1:"+port, "--out", dumpdir);
-
-// now drop the db
-foo.dropDatabase();
-
-// stop mongod
-stopMongod( port );
-
-// start mongod with --auth
-conn = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface",
- "--nojournal", "--bind_ip", "127.0.0.1" );
-
-// admin user
-var admin = conn.getDB( "admin" )
-admin.createUser({user: "admin" , pwd: "admin", roles: jsTest.adminUserRoles});
-admin.auth( "admin" , "admin" );
-
-var foo = conn.getDB( "foo" )
-
-// make sure no collection with the same name exists
-collNames = foo.getCollectionNames();
-assert.eq(-1, collNames.indexOf("bar"), "bar collection already exists");
-assert.eq(-1, collNames.indexOf("baz"), "baz collection already exists");
-
-// now try to restore dump
-x = runMongoProgram( "mongorestore", "-h", "127.0.0.1:" + port, "--dir" , dumpdir, "-vvvvv" );
-
-// make sure that the collection isn't restored
-collNames = foo.getCollectionNames();
-assert.eq(-1, collNames.indexOf("bar"), "bar collection was restored");
-assert.eq(-1, collNames.indexOf("baz"), "baz collection was restored");
-
-// now try to restore dump with correct credentials
-x = runMongoProgram( "mongorestore",
- "-h", "127.0.0.1:" + port,
- "-d", "foo",
- "--authenticationDatabase=admin",
- "-u", "admin",
- "-p", "admin",
- "--dir", dumpdir + "foo/",
- "-vvvvv");
-
-// make sure that the collection was restored
-collNames = foo.getCollectionNames();
-assert.neq(-1, collNames.indexOf("bar"), "bar collection was not restored");
-assert.neq(-1, collNames.indexOf("baz"), "baz collection was not restored");
-
-// make sure the collection has 4 documents
-assert.eq(foo.bar.count(), 4);
-assert.eq(foo.baz.count(), 4);
-
-foo.dropDatabase();
-
-foo.createUser({user: 'user', pwd: 'password', roles: jsTest.basicUserRoles});
-
-// now try to restore dump with foo database credentials
-x = runMongoProgram("mongorestore",
- "-h", "127.0.0.1:" + port,
- "-d", "foo",
- "-u", "user",
- "-p", "password",
- "--dir", dumpdir + "foo/",
- "-vvvvv");
-
-// make sure that the collection was restored
-collNames = foo.getCollectionNames();
-assert.neq(-1, collNames.indexOf("bar"), "bar collection was not restored");
-assert.neq(-1, collNames.indexOf("baz"), "baz collection was not restored");
-assert.eq(foo.bar.count(), 4);
-assert.eq(foo.baz.count(), 4);
-assert.eq(foo.bar.getIndexes().length + foo.baz.getIndexes().length, 3); // _id on foo, _id on bar, x on foo
-
-stopMongod( port );
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/stat1.js b/src/mongo/gotools/test/legacy28/jstests/tool/stat1.js
deleted file mode 100644
index 3855d6c13c6..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/stat1.js
+++ /dev/null
@@ -1,18 +0,0 @@
-// stat1.js
-// test mongostat with authentication SERVER-3875
-
-
-port = allocatePorts( 1 )[ 0 ];
-baseName = "tool_stat1";
-
-m = startMongod( "--auth", "--port", port, "--dbpath", MongoRunner.dataPath + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
-db = m.getDB( "admin" );
-
-db.createUser({user: "eliot" , pwd: "eliot", roles: jsTest.adminUserRoles});
-assert( db.auth( "eliot" , "eliot" ) , "auth failed" );
-
-x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+port, "--username", "eliot", "--password", "eliot", "--rowcount", "1", "--authenticationDatabase=admin" );
-assert.eq(x, 0, "mongostat should exit successfully with eliot:eliot");
-
-x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+port, "--username", "eliot", "--password", "wrong", "--rowcount", "1", "--authenticationDatabase=admin" );
-assert.eq(x, 1, "mongostat should exit with -1 with eliot:wrong");
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/tool1.js b/src/mongo/gotools/test/legacy28/jstests/tool/tool1.js
deleted file mode 100644
index f7c6f769e72..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/tool1.js
+++ /dev/null
@@ -1,44 +0,0 @@
-// mongo tool tests, very basic to start with
-
-
-baseName = "jstests_tool_tool1";
-dbPath = MongoRunner.dataPath + baseName + "/";
-externalPath = MongoRunner.dataPath + baseName + "_external/";
-externalBaseName = "export.json";
-externalFile = externalPath + externalBaseName;
-
-function fileSize(){
- var l = listFiles( externalPath );
- for ( var i=0; i<l.length; i++ ){
- if ( l[i].baseName == externalBaseName )
- return l[i].size;
- }
- return -1;
-}
-
-
-port = allocatePorts( 1 )[ 0 ];
-resetDbpath( externalPath );
-
-m = startMongod( "--port", port, "--dbpath", dbPath, "--nohttpinterface", "--noprealloc" , "--bind_ip", "127.0.0.1" );
-c = m.getDB( baseName ).getCollection( baseName );
-c.save( { a: 1 } );
-assert( c.findOne() );
-
-runMongoProgram( "mongodump", "--host", "127.0.0.1:" + port, "--out", externalPath );
-c.drop();
-runMongoProgram( "mongorestore", "--host", "127.0.0.1:" + port, "--dir", externalPath );
-assert.soon( "c.findOne()" , "mongodump then restore has no data w/sleep" );
-assert( c.findOne() , "mongodump then restore has no data" );
-assert.eq( 1 , c.findOne().a , "mongodump then restore has no broken data" );
-
-resetDbpath( externalPath );
-
-assert.eq( -1 , fileSize() , "mongoexport prep invalid" );
-runMongoProgram( "mongoexport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--out", externalFile );
-assert.lt( 10 , fileSize() , "file size changed" );
-
-c.drop();
-runMongoProgram( "mongoimport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--file", externalFile );
-assert.soon( "c.findOne()" , "mongo import json A" );
-assert( c.findOne() && 1 == c.findOne().a , "mongo import json B" );
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/tool_replset.js b/src/mongo/gotools/test/legacy28/jstests/tool/tool_replset.js
deleted file mode 100644
index 934b380c464..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/tool_replset.js
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Test to ensure that (dump/restore/export/import/oplog) works with a replica set connection string
- * 1. Start a replica set.
- * 2. Add data to a collection.
- * 3. Take a dump of the database.
- * 4. Drop the db.
- * 5. Restore the db.
- * 6. Export a collection.
- * 7. Drop the collection.
- * 8. Import the collection.
- * 9. Add data to the oplog.rs collection.
- * 10. Ensure that the document doesn't exist yet.
- * 11. Now play the mongooplog tool.
- * 12. Make sure that the oplog was played
-*/
-
-// Load utility methods for replica set tests
-load("jstests/replsets/rslib.js");
-
-print("starting the replica set")
-
-var replTest = new ReplSetTest({ name: 'tool_replset', nodes: 2, oplogSize: 5 });
-var nodes = replTest.startSet();
-replTest.initiate();
-var master = replTest.getPrimary();
-for (var i = 0; i < 100; i++) {
- master.getDB("foo").bar.insert({ a: i });
-}
-replTest.awaitReplication();
-
-var replSetConnString = "tool_replset/127.0.0.1:" + replTest.ports[0] +
- ",127.0.0.1:" + replTest.ports[1];
-
-// Test with mongodump/mongorestore
-print("dump the db");
-var data = MongoRunner.dataDir + "/tool_replset-dump1/";
-runMongoProgram("mongodump", "--host", replSetConnString, "--out", data);
-
-print("db successfully dumped, dropping now");
-master.getDB("foo").dropDatabase();
-replTest.awaitReplication();
-
-print("restore the db");
-runMongoProgram("mongorestore", "--host", replSetConnString, "--dir", data);
-
-print("db successfully restored, checking count")
-var x = master.getDB("foo").getCollection("bar").count();
-assert.eq(x, 100, "mongorestore should have successfully restored the collection");
-
-replTest.awaitReplication();
-
-// Test with mongoexport/mongoimport
-print("export the collection");
-var extFile = MongoRunner.dataDir + "/tool_replset/export";
-runMongoProgram("mongoexport", "--host", replSetConnString, "--out", extFile,
- "-d", "foo", "-c", "bar");
-
-print("collection successfully exported, dropping now");
-master.getDB("foo").getCollection("bar").drop();
-replTest.awaitReplication();
-
-print("import the collection");
-runMongoProgram("mongoimport", "--host", replSetConnString, "--file", extFile,
- "-d", "foo", "-c", "bar");
-
-var x = master.getDB("foo").getCollection("bar").count();
-assert.eq(x, 100, "mongoimport should have successfully imported the collection");
-
-// Test with mongooplog
-var doc = { _id : 5, x : 17 };
-master.getDB("local").oplog.rs.insert({ ts : new Timestamp(), "op" : "i", "ns" : "foo.bar",
- "o" : doc, "v" : NumberInt(2) });
-
-assert.eq(100, master.getDB("foo").getCollection("bar").count(), "count before running mongooplog " +
- "was not 100 as expected");
-
-runMongoProgram("mongooplog" , "--from", "127.0.0.1:" + replTest.ports[0],
- "--host", replSetConnString);
-
-print("running mongooplog to replay the oplog")
-
-assert.eq(101, master.getDB("foo").getCollection("bar").count(), "count after running mongooplog " +
- "was not 101 as expected")
-
-print("all tests successful, stopping replica set")
-
-replTest.stopSet();
-
-print("replica set stopped, test complete")
diff --git a/src/mongo/gotools/test/legacy28/jstests/tool/tsv1.js b/src/mongo/gotools/test/legacy28/jstests/tool/tsv1.js
deleted file mode 100644
index 677bec2af9c..00000000000
--- a/src/mongo/gotools/test/legacy28/jstests/tool/tsv1.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// tsv1.js
-
-
-t = new ToolTest( "tsv1" )
-
-c = t.startDB( "foo" );
-
-base = { a : "", b : 1 , c : "foobar" , d: 5, e: -6 };
-
-t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "-f" , "a,b,c,d,e" );
-assert.soon( "2 == c.count()" , "restore 2" );
-
-a = c.find().sort( { a : 1 } ).toArray();
-delete a[0]._id
-delete a[1]._id
-
-assert.docEq( { a : "a" , b : "b" , c : "c" , d: "d", e: "e"} , a[1] , "tsv parse 1" );
-assert.docEq( base , a[0] , "tsv parse 0" )
-
-c.drop()
-assert.eq( 0 , c.count() , "after drop 2" )
-
-t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "--headerline" )
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-
-x = c.findOne()
-delete x._id;
-assert.docEq( base , x , "tsv parse 2" )
-
-
-
-t.stop()
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/__init__.py b/src/mongo/gotools/test/qa-tests/buildscripts/__init__.py
deleted file mode 100644
index 8b137891791..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/buildlogger.py b/src/mongo/gotools/test/qa-tests/buildscripts/buildlogger.py
deleted file mode 100644
index d2466e495c0..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/buildlogger.py
+++ /dev/null
@@ -1,479 +0,0 @@
-"""
-buildlogger.py
-
-Wrap a command (specified on the command line invocation of buildlogger.py)
-and send output in batches to the buildlogs web application via HTTP POST.
-
-The script configures itself from environment variables:
-
- required env vars:
- MONGO_BUILDER_NAME (e.g. "Nightly Linux 64-bit")
- MONGO_BUILD_NUMBER (an integer)
- MONGO_TEST_FILENAME (not required when invoked with -g)
-
- optional env vars:
- MONGO_PHASE (e.g. "core", "slow nightly", etc)
- MONGO_* (any other environment vars are passed to the web app)
- BUILDLOGGER_CREDENTIALS (see below)
-
-This script has two modes: a "test" mode, intended to wrap the invocation of
-an individual test file, and a "global" mode, intended to wrap the mongod
-instances that run throughout the duration of a mongo test phase (the logs
-from "global" invocations are displayed interspersed with the logs of each
-test, in order to let the buildlogs web app display the full output sensibly.)
-
-If the BUILDLOGGER_CREDENTIALS environment variable is set, it should be a
-path to a valid Python file containing "username" and "password" variables,
-which should be valid credentials for authenticating to the buildlogger web
-app. For example:
-
- username = "hello"
- password = "world"
-
-If BUILDLOGGER_CREDENTIALS is a relative path, then the working directory
-and the directories one, two, and three levels up, are searched, in that
-order.
-"""
-
-import functools
-import os
-import os.path
-import re
-import signal
-import socket
-import subprocess
-import sys
-import time
-import traceback
-import urllib2
-import utils
-
-# suppress deprecation warnings that happen when
-# we import the 'buildbot.tac' file below
-import warnings
-warnings.simplefilter('ignore', DeprecationWarning)
-
-try:
- import json
-except:
- try:
- import simplejson as json
- except:
- json = None
-
-# try to load the shared secret from settings.py
-# which will be one, two, or three directories up
-# from this file's location
-credentials_file = os.environ.get('BUILDLOGGER_CREDENTIALS', 'buildbot.tac')
-credentials_loc, credentials_name = os.path.split(credentials_file)
-if not credentials_loc:
- here = os.path.abspath(os.path.dirname(__file__))
- possible_paths = [
- os.path.abspath(os.path.join(here, '..')),
- os.path.abspath(os.path.join(here, '..', '..')),
- os.path.abspath(os.path.join(here, '..', '..', '..')),
- ]
-else:
- possible_paths = [credentials_loc]
-
-username, password = None, None
-for path in possible_paths:
- credentials_path = os.path.join(path, credentials_name)
- if os.path.isfile(credentials_path):
- credentials = {}
- try:
- execfile(credentials_path, credentials, credentials)
- username = credentials.get('slavename', credentials.get('username'))
- password = credentials.get('passwd', credentials.get('password'))
- break
- except:
- pass
-
-
-URL_ROOT = os.environ.get('BUILDLOGGER_URL', 'http://buildlogs.mongodb.org/')
-TIMEOUT_SECONDS = 10
-socket.setdefaulttimeout(TIMEOUT_SECONDS)
-
-digest_handler = urllib2.HTTPDigestAuthHandler()
-digest_handler.add_password(
- realm='buildlogs',
- uri=URL_ROOT,
- user=username,
- passwd=password)
-
-# This version of HTTPErrorProcessor is copied from
-# Python 2.7, and allows REST response codes (e.g.
-# "201 Created") which are treated as errors by
-# older versions.
-class HTTPErrorProcessor(urllib2.HTTPErrorProcessor):
- def http_response(self, request, response):
- code, msg, hdrs = response.code, response.msg, response.info()
-
- # According to RFC 2616, "2xx" code indicates that the client's
- # request was successfully received, understood, and accepted.
- if not (200 <= code < 300):
- response = self.parent.error(
- 'http', request, response, code, msg, hdrs)
-
- return response
-
-url_opener = urllib2.build_opener(digest_handler, HTTPErrorProcessor())
-
-def url(endpoint):
- if not endpoint.endswith('/'):
- endpoint = '%s/' % endpoint
-
- return '%s/%s' % (URL_ROOT.rstrip('/'), endpoint)
-
-def post(endpoint, data, headers=None):
- data = json.dumps(data, encoding='utf-8')
-
- headers = headers or {}
- headers.update({'Content-Type': 'application/json; charset=utf-8'})
-
- req = urllib2.Request(url=url(endpoint), data=data, headers=headers)
- try:
- response = url_opener.open(req)
- except urllib2.URLError:
- import traceback
- traceback.print_exc(file=sys.stderr)
- sys.stderr.flush()
- # indicate that the request did not succeed
- return None
-
- response_headers = dict(response.info())
-
- # eg "Content-Type: application/json; charset=utf-8"
- content_type = response_headers.get('content-type')
- match = re.match(r'(?P<mimetype>[^;]+).*(?:charset=(?P<charset>[^ ]+))?$', content_type)
- if match and match.group('mimetype') == 'application/json':
- encoding = match.group('charset') or 'utf-8'
- return json.load(response, encoding=encoding)
-
- return response.read()
-
-def traceback_to_stderr(func):
- """
- decorator which logs any exceptions encountered to stderr
- and returns none.
- """
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- try:
- return func(*args, **kwargs)
- except urllib2.HTTPError, err:
- sys.stderr.write('error: HTTP code %d\n----\n' % err.code)
- if hasattr(err, 'hdrs'):
- for k, v in err.hdrs.items():
- sys.stderr.write("%s: %s\n" % (k, v))
- sys.stderr.write('\n')
- sys.stderr.write(err.read())
- sys.stderr.write('\n----\n')
- sys.stderr.flush()
- except:
- sys.stderr.write('Traceback from buildlogger:\n')
- traceback.print_exc(file=sys.stderr)
- sys.stderr.flush()
- return None
- return wrapper
-
-
-@traceback_to_stderr
-def get_or_create_build(builder, buildnum, extra={}):
- data = {'builder': builder, 'buildnum': buildnum}
- data.update(extra)
- response = post('build', data)
- if response is None:
- return None
- return response['id']
-
-@traceback_to_stderr
-def create_test(build_id, test_filename, test_command, test_phase):
- response = post('build/%s/test' % build_id, {
- 'test_filename': test_filename,
- 'command': test_command,
- 'phase': test_phase,
- })
- if response is None:
- return None
- return response['id']
-
-@traceback_to_stderr
-def append_test_logs(build_id, test_id, log_lines):
- response = post('build/%s/test/%s' % (build_id, test_id), data=log_lines)
- if response is None:
- return False
- return True
-
-@traceback_to_stderr
-def append_global_logs(build_id, log_lines):
- """
- "global" logs are for the mongod(s) started by smoke.py
- that last the duration of a test phase -- since there
- may be output in here that is important but spans individual
- tests, the buildlogs webapp handles these logs specially.
- """
- response = post('build/%s' % build_id, data=log_lines)
- if response is None:
- return False
- return True
-
-@traceback_to_stderr
-def finish_test(build_id, test_id, failed=False):
- response = post('build/%s/test/%s' % (build_id, test_id), data=[], headers={
- 'X-Sendlogs-Test-Done': 'true',
- 'X-Sendlogs-Test-Failed': failed and 'true' or 'false',
- })
- if response is None:
- return False
- return True
-
-def run_and_echo(command):
- """
- this just calls the command, and returns its return code,
- allowing stdout and stderr to work as normal. it is used
- as a fallback when environment variables or python
- dependencies cannot be configured, or when the logging
- webapp is unavailable, etc
- """
- proc = subprocess.Popen(command)
-
- def handle_sigterm(signum, frame):
- try:
- proc.send_signal(signum)
- except AttributeError:
- os.kill(proc.pid, signum)
- orig_handler = signal.signal(signal.SIGTERM, handle_sigterm)
-
- proc.wait()
-
- signal.signal(signal.SIGTERM, orig_handler)
- return proc.returncode
-
-class LogAppender(object):
- def __init__(self, callback, args, send_after_lines=2000, send_after_seconds=10):
- self.callback = callback
- self.callback_args = args
-
- self.send_after_lines = send_after_lines
- self.send_after_seconds = send_after_seconds
-
- self.buf = []
- self.retrybuf = []
- self.last_sent = time.time()
-
- def __call__(self, line):
- self.buf.append((time.time(), line))
-
- delay = time.time() - self.last_sent
- if len(self.buf) >= self.send_after_lines or delay >= self.send_after_seconds:
- self.submit()
-
- # no return value is expected
-
- def submit(self):
- if len(self.buf) + len(self.retrybuf) == 0:
- return True
-
- args = list(self.callback_args)
- args.append(list(self.buf) + self.retrybuf)
-
- self.last_sent = time.time()
-
- if self.callback(*args):
- self.buf = []
- self.retrybuf = []
- return True
- else:
- self.retrybuf += self.buf
- self.buf = []
- return False
-
-
-def wrap_test(command):
- """
- call the given command, intercept its stdout and stderr,
- and send results in batches of 100 lines or 10s to the
- buildlogger webapp
- """
-
- # get builder name and build number from environment
- builder = os.environ.get('MONGO_BUILDER_NAME')
- buildnum = os.environ.get('MONGO_BUILD_NUMBER')
-
- if builder is None or buildnum is None:
- return run_and_echo(command)
-
- try:
- buildnum = int(buildnum)
- except ValueError:
- sys.stderr.write('buildlogger: build number ("%s") was not an int\n' % buildnum)
- sys.stderr.flush()
- return run_and_echo(command)
-
- # test takes some extra info
- phase = os.environ.get('MONGO_PHASE', 'unknown')
- test_filename = os.environ.get('MONGO_TEST_FILENAME', 'unknown')
-
- build_info = dict((k, v) for k, v in os.environ.items() if k.startswith('MONGO_'))
- build_info.pop('MONGO_BUILDER_NAME', None)
- build_info.pop('MONGO_BUILD_NUMBER', None)
- build_info.pop('MONGO_PHASE', None)
- build_info.pop('MONGO_TEST_FILENAME', None)
-
- build_id = get_or_create_build(builder, buildnum, extra=build_info)
- if not build_id:
- return run_and_echo(command)
-
- test_id = create_test(build_id, test_filename, ' '.join(command), phase)
- if not test_id:
- return run_and_echo(command)
-
- # the peculiar formatting here matches what is printed by
- # smoke.py when starting tests
- output_url = '%s/build/%s/test/%s/' % (URL_ROOT.rstrip('/'), build_id, test_id)
- sys.stdout.write(' (output suppressed; see %s)\n' % output_url)
- sys.stdout.flush()
-
- callback = LogAppender(callback=append_test_logs, args=(build_id, test_id))
- returncode = loop_and_callback(command, callback)
- failed = bool(returncode != 0)
-
- # this will append any remaining unsubmitted logs, or
- # return True if there are none left to submit
- tries = 5
- while not callback.submit() and tries > 0:
- sys.stderr.write('failed to finish sending test logs, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- tries = 5
- while not finish_test(build_id, test_id, failed) and tries > 5:
- sys.stderr.write('failed to mark test finished, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- return returncode
-
-def wrap_global(command):
- """
- call the given command, intercept its stdout and stderr,
- and send results in batches of 100 lines or 10s to the
- buildlogger webapp. see :func:`append_global_logs` for the
- difference between "global" and "test" log output.
- """
-
- # get builder name and build number from environment
- builder = os.environ.get('MONGO_BUILDER_NAME')
- buildnum = os.environ.get('MONGO_BUILD_NUMBER')
-
- if builder is None or buildnum is None:
- return run_and_echo(command)
-
- try:
- buildnum = int(buildnum)
- except ValueError:
- sys.stderr.write('int(os.environ["MONGO_BUILD_NUMBER"]):\n')
- sys.stderr.write(traceback.format_exc())
- sys.stderr.flush()
- return run_and_echo(command)
-
- build_info = dict((k, v) for k, v in os.environ.items() if k.startswith('MONGO_'))
- build_info.pop('MONGO_BUILDER_NAME', None)
- build_info.pop('MONGO_BUILD_NUMBER', None)
-
- build_id = get_or_create_build(builder, buildnum, extra=build_info)
- if not build_id:
- return run_and_echo(command)
-
- callback = LogAppender(callback=append_global_logs, args=(build_id, ))
- returncode = loop_and_callback(command, callback)
-
- # this will append any remaining unsubmitted logs, or
- # return True if there are none left to submit
- tries = 5
- while not callback.submit() and tries > 0:
- sys.stderr.write('failed to finish sending global logs, retrying in 1s\n')
- sys.stderr.flush()
- time.sleep(1)
- tries -= 1
-
- return returncode
-
-def loop_and_callback(command, callback):
- """
- run the given command (a sequence of arguments, ordinarily
- from sys.argv), and call the given callback with each line
- of stdout or stderr encountered. after the command is finished,
- callback is called once more with None instead of a string.
- """
- proc = subprocess.Popen(
- command,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- )
-
- def handle_sigterm(signum, frame):
- try:
- proc.send_signal(signum)
- except AttributeError:
- os.kill(proc.pid, signum)
-
- # register a handler to delegate SIGTERM
- # to the child process
- orig_handler = signal.signal(signal.SIGTERM, handle_sigterm)
-
- while proc.poll() is None:
- try:
- line = proc.stdout.readline().strip('\r\n')
- line = utils.unicode_dammit(line)
- callback(line)
- except IOError:
- # if the signal handler is called while
- # we're waiting for readline() to return,
- # don't show a traceback
- break
-
- # There may be additional buffered output
- for line in proc.stdout.readlines():
- callback(line.strip('\r\n'))
-
- # restore the original signal handler, if any
- signal.signal(signal.SIGTERM, orig_handler)
- return proc.returncode
-
-
-if __name__ == '__main__':
- # argv[0] is 'buildlogger.py'
- del sys.argv[0]
-
- if sys.argv[0] in ('-g', '--global'):
- # then this is wrapping a "global" command, and should
- # submit global logs to the build, not test logs to a
- # test within the build
- del sys.argv[0]
- wrapper = wrap_global
-
- else:
- wrapper = wrap_test
-
- # if we are missing credentials or the json module, then
- # we can't use buildlogger; so just echo output, but also
- # log why we can't work.
- if json is None:
- sys.stderr.write('buildlogger: could not import a json module\n')
- sys.stderr.flush()
- wrapper = run_and_echo
-
- elif username is None or password is None:
- sys.stderr.write('buildlogger: could not find or import %s for authentication\n' % credentials_file)
- sys.stderr.flush()
- wrapper = run_and_echo
-
- # otherwise wrap a test command as normal; the
- # wrapper functions return the return code of
- # the wrapped command, so that should be our
- # exit code as well.
- sys.exit(wrapper(sys.argv))
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/cleanbb.py b/src/mongo/gotools/test/qa-tests/buildscripts/cleanbb.py
deleted file mode 100644
index fee7efdc0c1..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/cleanbb.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-
-import re
-import sys
-import os, os.path
-import utils
-import time
-from optparse import OptionParser
-
-def shouldKill( c, root=None ):
-
- if "smoke.py" in c:
- return False
-
- if "emr.py" in c:
- return False
-
- if "java" in c:
- return False
-
- # if root directory is provided, see if command line matches mongod process running
- # with the same data directory
-
- if root and re.compile("(\W|^)mongod(.exe)?\s+.*--dbpath(\s+|=)%s(\s+|$)" % root).search( c ):
- return True
-
- if ( c.find( "buildbot" ) >= 0 or c.find( "slave" ) >= 0 ) and c.find( "/mongo/" ) >= 0:
- return True
-
- if c.find( "xml-data/build-dir" ) >= 0: # for bamboo
- return True
-
- return False
-
-def killprocs( signal="", root=None ):
- killed = 0
-
- if sys.platform == 'win32':
- return killed
-
- l = utils.getprocesslist()
- print( "num procs:" + str( len( l ) ) )
- if len(l) == 0:
- print( "no procs" )
- try:
- print( execsys( "/sbin/ifconfig -a" ) )
- except Exception,e:
- print( "can't get interfaces" + str( e ) )
-
- for x in l:
- x = x.lstrip()
- if not shouldKill( x, root=root ):
- continue
-
- pid = x.split( " " )[0]
- print( "killing: " + x )
- utils.execsys( "/bin/kill " + signal + " " + pid )
- killed = killed + 1
-
- return killed
-
-
-def tryToRemove(path):
- for _ in range(60):
- try:
- os.remove(path)
- return True
- except OSError, e:
- errno = getattr(e, 'winerror', None)
- # check for the access denied and file in use WindowsErrors
- if errno in (5, 32):
- print("os.remove(%s) failed, retrying in one second." % path)
- time.sleep(1)
- else:
- raise e
- return False
-
-
-def cleanup( root , nokill ):
- if nokill:
- print "nokill requested, not killing anybody"
- else:
- if killprocs( root=root ) > 0:
- time.sleep(3)
- killprocs( "-9", root=root )
-
- # delete all regular files, directories can stay
- # NOTE: if we delete directories later, we can't delete diskfulltest
- for ( dirpath , dirnames , filenames ) in os.walk( root , topdown=False ):
- for x in filenames:
- foo = dirpath + "/" + x
- if os.path.exists(foo):
- if not tryToRemove(foo):
- raise Exception("Couldn't remove file '%s' after 60 seconds" % foo)
-
-if __name__ == "__main__":
- parser = OptionParser(usage="read the script")
- parser.add_option("--nokill", dest='nokill', default=False, action='store_true')
- (options, args) = parser.parse_args()
-
- root = "/data/db/"
- if len(args) > 0:
- root = args[0]
-
- cleanup( root , options.nokill )
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmoke.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmoke.py
deleted file mode 100755
index a6cb03cb620..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmoke.py
+++ /dev/null
@@ -1,216 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Command line utility for executing MongoDB tests of all kinds.
-"""
-
-from __future__ import absolute_import
-
-import json
-import os.path
-import random
-import signal
-import sys
-import time
-import traceback
-
-# Get relative imports to work when the package is not installed on the PYTHONPATH.
-if __name__ == "__main__" and __package__ is None:
- sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
- from buildscripts import resmokelib
-
-
-def _execute_suite(suite, logging_config):
- """
- Executes each test group of 'suite', failing fast if requested.
-
- Returns true if the execution of the suite was interrupted by the
- user, and false otherwise.
- """
-
- logger = resmokelib.logging.loggers.EXECUTOR
-
- for group in suite.test_groups:
- if resmokelib.config.SHUFFLE:
- logger.info("Shuffling order of tests for %ss in suite %s. The seed is %d.",
- group.test_kind, suite.get_name(), resmokelib.config.RANDOM_SEED)
- random.seed(resmokelib.config.RANDOM_SEED)
- random.shuffle(group.tests)
-
- if resmokelib.config.DRY_RUN == "tests":
- sb = []
- sb.append("Tests that would be run for %ss in suite %s:"
- % (group.test_kind, suite.get_name()))
- if len(group.tests) > 0:
- for test in group.tests:
- sb.append(test)
- else:
- sb.append("(no tests)")
- logger.info("\n".join(sb))
-
- # Set a successful return code on the test group because we want to output the tests
- # that would get run by any other suites the user specified.
- group.return_code = 0
- continue
-
- if len(group.tests) == 0:
- logger.info("Skipping %ss, no tests to run", group.test_kind)
- continue
-
- group_config = suite.get_executor_config().get(group.test_kind, {})
- executor = resmokelib.testing.executor.TestGroupExecutor(logger,
- group,
- logging_config,
- **group_config)
-
- try:
- executor.run()
- if resmokelib.config.FAIL_FAST and group.return_code != 0:
- suite.return_code = group.return_code
- return False
- except resmokelib.errors.UserInterrupt:
- suite.return_code = 130 # Simulate SIGINT as exit code.
- return True
- except:
- logger.exception("Encountered an error when running %ss of suite %s.",
- group.test_kind, suite.get_name())
- suite.return_code = 2
- return False
-
-
-def _log_summary(logger, suites, time_taken):
- if len(suites) > 1:
- sb = []
- sb.append("Summary of all suites: %d suites ran in %0.2f seconds"
- % (len(suites), time_taken))
- for suite in suites:
- suite_sb = []
- suite.summarize(suite_sb)
- sb.append(" %s: %s" % (suite.get_name(), "\n ".join(suite_sb)))
-
- logger.info("=" * 80)
- logger.info("\n".join(sb))
-
-
-def _summarize_suite(suite):
- sb = []
- suite.summarize(sb)
- return "\n".join(sb)
-
-
-def _dump_suite_config(suite, logging_config):
- """
- Returns a string that represents the YAML configuration of a suite.
-
- TODO: include the "options" key in the result
- """
-
- sb = []
- sb.append("YAML configuration of suite %s" % (suite.get_name()))
- sb.append(resmokelib.utils.dump_yaml({"selector": suite.get_selector_config()}))
- sb.append("")
- sb.append(resmokelib.utils.dump_yaml({"executor": suite.get_executor_config()}))
- sb.append("")
- sb.append(resmokelib.utils.dump_yaml({"logging": logging_config}))
- return "\n".join(sb)
-
-
-def _write_report_file(suites, pathname):
- """
- Writes the report.json file if requested.
- """
-
- reports = []
- for suite in suites:
- for group in suite.test_groups:
- reports.extend(group.get_reports())
-
- combined_report_dict = resmokelib.testing.report.TestReport.combine(*reports).as_dict()
- with open(pathname, "w") as fp:
- json.dump(combined_report_dict, fp)
-
-
-def main():
- start_time = time.time()
-
- values, args = resmokelib.parser.parse_command_line()
-
- logging_config = resmokelib.parser.get_logging_config(values)
- resmokelib.logging.config.apply_config(logging_config)
- resmokelib.logging.flush.start_thread()
-
- resmokelib.parser.update_config_vars(values)
-
- exec_logger = resmokelib.logging.loggers.EXECUTOR
- resmoke_logger = resmokelib.logging.loggers.new_logger("resmoke", parent=exec_logger)
-
- if values.list_suites:
- suite_names = resmokelib.parser.get_named_suites()
- resmoke_logger.info("Suites available to execute:\n%s", "\n".join(suite_names))
- sys.exit(0)
-
- interrupted = False
- suites = resmokelib.parser.get_suites(values, args)
- try:
- for suite in suites:
- resmoke_logger.info(_dump_suite_config(suite, logging_config))
-
- suite.record_start()
- interrupted = _execute_suite(suite, logging_config)
- suite.record_end()
-
- resmoke_logger.info("=" * 80)
- resmoke_logger.info("Summary of %s suite: %s",
- suite.get_name(), _summarize_suite(suite))
-
- if interrupted or (resmokelib.config.FAIL_FAST and suite.return_code != 0):
- time_taken = time.time() - start_time
- _log_summary(resmoke_logger, suites, time_taken)
- sys.exit(suite.return_code)
-
- time_taken = time.time() - start_time
- _log_summary(resmoke_logger, suites, time_taken)
-
- # Exit with a nonzero code if any of the suites failed.
- exit_code = max(suite.return_code for suite in suites)
- sys.exit(exit_code)
- finally:
- if not interrupted:
- resmokelib.logging.flush.stop_thread()
-
- if resmokelib.config.REPORT_FILE is not None:
- _write_report_file(suites, resmokelib.config.REPORT_FILE)
-
-
-if __name__ == "__main__":
-
- def _dump_stacks(signum, frame):
- """
- Signal handler that will dump the stacks of all threads.
- """
-
- header_msg = "Dumping stacks due to SIGUSR1 signal"
-
- sb = []
- sb.append("=" * len(header_msg))
- sb.append(header_msg)
- sb.append("=" * len(header_msg))
-
- frames = sys._current_frames()
- sb.append("Total threads: %d" % (len(frames)))
- sb.append("")
-
- for thread_id in frames:
- stack = frames[thread_id]
- sb.append("Thread %d:" % (thread_id))
- sb.append("".join(traceback.format_stack(stack)))
-
- sb.append("=" * len(header_msg))
- print "\n".join(sb)
-
- try:
- signal.signal(signal.SIGUSR1, _dump_stacks)
- except AttributeError:
- print "Cannot catch signals on Windows"
-
- main()
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/__init__.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/__init__.py
deleted file mode 100644
index 37f5a889956..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from __future__ import absolute_import
-
-from .suites import NAMED_SUITES
-from .loggers import NAMED_LOGGERS
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/__init__.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/__init__.py
deleted file mode 100644
index 6511d496364..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/__init__.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-Defines a mapping of shortened names for logger configuration files to
-their full path.
-"""
-
-from __future__ import absolute_import
-
-import os
-import os.path
-
-
-def _get_named_loggers():
- """
- Explores this directory for any YAML configuration files.
-
- Returns a mapping of basenames without the file extension to their
- full path.
- """
-
- dirname = os.path.dirname(__file__)
- named_loggers = {}
-
- try:
- (root, _dirs, files) = os.walk(dirname).next()
- for filename in files:
- (short_name, ext) = os.path.splitext(filename)
- if ext in (".yml", ".yaml"):
- pathname = os.path.join(root, filename)
- named_loggers[short_name] = os.path.relpath(pathname)
- except StopIteration:
- # 'dirname' does not exist, which should be impossible because it contains __file__.
- raise IOError("Directory '%s' does not exist" % (dirname))
-
- return named_loggers
-
-NAMED_LOGGERS = _get_named_loggers()
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/buildlogger.yml b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/buildlogger.yml
deleted file mode 100644
index 302d2677491..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/buildlogger.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-logging:
- executor:
- format: '[%(name)s] %(asctime)s %(message)s'
- handlers:
- - class: logging.StreamHandler
- fixture:
- format: '[%(name)s] %(message)s'
- handlers:
- - class: buildlogger
- tests:
- format: '[%(name)s] %(asctime)s %(message)s'
- handlers:
- - class: buildlogger
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/console.yml b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/console.yml
deleted file mode 100644
index b233de409b3..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/console.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-logging:
- executor:
- format: '[%(name)s] %(asctime)s %(message)s'
- handlers:
- - class: logging.StreamHandler
- fixture:
- format: '[%(name)s] %(message)s'
- handlers:
- - class: logging.StreamHandler
- tests:
- format: '[%(name)s] %(asctime)s %(message)s'
- handlers:
- - class: logging.StreamHandler
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/file.yml b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/file.yml
deleted file mode 100644
index 3d2d15cd5bc..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/file.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-logging:
- executor:
- format: '[%(name)s] %(asctime)s %(message)s'
- handlers:
- - class: logging.FileHandler
- filename: executor.log
- mode: w
- fixture:
- format: '[%(name)s] %(message)s'
- handlers:
- - class: logging.FileHandler
- filename: fixture.log
- mode: w
- tests:
- format: '[%(name)s] %(asctime)s %(message)s'
- handlers:
- - class: logging.FileHandler
- filename: tests.log
- mode: w
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/suppress.yml b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/suppress.yml
deleted file mode 100644
index c69bb793b0b..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/loggers/suppress.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-logging:
- executor:
- handlers:
- - class: logging.NullHandler
- fixture:
- handlers:
- - class: logging.NullHandler
- tests:
- handlers:
- - class: logging.NullHandler
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/__init__.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/__init__.py
deleted file mode 100644
index e075dd22e0d..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/__init__.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-Defines a mapping of shortened names for suite configuration files to
-their full path.
-"""
-
-from __future__ import absolute_import
-
-import os
-import os.path
-
-
-def _get_named_suites():
- """
- Explores this directory for any YAML configuration files.
-
- Returns a mapping of basenames without the file extension to their
- full path.
- """
-
- dirname = os.path.dirname(__file__)
- named_suites = {}
-
- try:
- (root, _dirs, files) = os.walk(dirname).next()
- for filename in files:
- (short_name, ext) = os.path.splitext(filename)
- if ext in (".yml", ".yaml"):
- pathname = os.path.join(root, filename)
- named_suites[short_name] = os.path.relpath(pathname)
- except StopIteration:
- # 'dirname' does not exist, which should be impossible because it contains __file__.
- raise IOError("Directory '%s' does not exist" % (dirname))
-
- return named_suites
-
-NAMED_SUITES = _get_named_suites()
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/core.yml b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/core.yml
deleted file mode 100644
index bc094c1f549..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/core.yml
+++ /dev/null
@@ -1,27 +0,0 @@
-selector:
- js_test:
- roots:
- - jstests/bson/*.js
- - jstests/dump/*.js
- - jstests/export/*.js
- - jstests/files/*.js
- - jstests/import/*.js
- - jstests/oplog/*.js
- - jstests/restore/*.js
- - jstests/stat/*.js
- - jstests/top/*.js
-
-executor:
- js_test:
- config:
- shell_options:
- eval_prepend: "load('jstests/libs/servers.js'); load('jstests/libs/servers_misc.js');"
- readMode: commands
- hooks:
- - class: CleanEveryN
- n: 20
- fixture:
- class: MongoDFixture
- mongod_options:
- set_parameters:
- enableTestCommands: 1
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/core_ssl.yml b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/core_ssl.yml
deleted file mode 100644
index 2a9330e2856..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/core_ssl.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-selector:
- js_test:
- roots:
- - jstests/bson/*.js
- - jstests/dump/*.js
- - jstests/export/*.js
- - jstests/files/*.js
- - jstests/import/*.js
- - jstests/oplog/*.js
- - jstests/restore/*.js
- - jstests/stat/*.js
- - jstests/top/*.js
-
-executor:
- js_test:
- config:
- shell_options:
- global_vars:
- TestData:
- useSSL: true
- eval_prepend: "load('jstests/libs/servers.js'); load('jstests/libs/servers_misc.js');"
- readMode: commands
- ssl: ''
- sslAllowInvalidCertificates: ''
- sslCAFile: jstests/libs/ca.pem
- sslPEMKeyFile: jstests/libs/client.pem
- hooks:
- - class: CleanEveryN
- n: 20
- fixture:
- class: MongoDFixture
- mongod_options:
- set_parameters:
- enableTestCommands: 1
- sslMode: allowSSL
- sslPEMKeyFile: jstests/libs/server.pem
- sslCAFile: jstests/libs/ca.pem
- sslWeakCertificateValidation: ''
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/native_cert_ssl.yml b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/native_cert_ssl.yml
deleted file mode 100644
index 175d6ac9941..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/native_cert_ssl.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-selector:
- js_test:
- roots:
- - jstests/ssl/*.js
-
-# ssl tests start their own mongod's.
-executor:
- js_test:
- config:
- shell_options:
- nodb: ''
- ssl: ''
- sslAllowInvalidCertificates: ''
- readMode: commands
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/restore_archive.yml b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/restore_archive.yml
deleted file mode 100644
index 8c51a3b2f46..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/restore_archive.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-selector:
- js_test:
- roots:
- - jstests/dump/*.js
- - jstests/restore/*.js
-
-executor:
- js_test:
- config:
- shell_options:
- eval_prepend: "load('jstests/libs/servers.js'); load('jstests/libs/servers_misc.js');"
- eval: "load('jstests/configs/archive_targets.js');"
- readMode: commands
- hooks:
- - class: CleanEveryN
- n: 20
- fixture:
- class: MongoDFixture
- mongod_options:
- set_parameters:
- enableTestCommands: 1
-
-
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/restore_gzip.yml b/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/restore_gzip.yml
deleted file mode 100644
index 768b88ca6dd..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokeconfig/suites/restore_gzip.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-selector:
- js_test:
- roots:
- - jstests/dump/*.js
- - jstests/restore/*.js
-
-executor:
- js_test:
- config:
- shell_options:
- eval_prepend: "load('jstests/libs/servers.js'); load('jstests/libs/servers_misc.js');"
- eval: "load('jstests/configs/gzip_targets.js');"
- hooks:
- - class: CleanEveryN
- n: 20
- fixture:
- class: MongoDFixture
- mongod_options:
- set_parameters:
- enableTestCommands: 1
-
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/__init__.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/__init__.py
deleted file mode 100644
index 06b0539e25b..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from __future__ import absolute_import
-
-from . import errors
-from . import logging
-from . import parser
-from . import testing
-from . import utils
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/config.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/config.py
deleted file mode 100644
index ecb7fec7fa3..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/config.py
+++ /dev/null
@@ -1,165 +0,0 @@
-"""
-Configuration options for resmoke.py.
-"""
-
-from __future__ import absolute_import
-
-import os
-import os.path
-import time
-
-
-##
-# Default values.
-##
-
-# Default path for where to look for executables.
-DEFAULT_DBTEST_EXECUTABLE = os.path.join(os.curdir, "dbtest")
-DEFAULT_MONGO_EXECUTABLE = os.path.join(os.curdir, "mongo")
-DEFAULT_MONGOD_EXECUTABLE = os.path.join(os.curdir, "mongod")
-DEFAULT_MONGOS_EXECUTABLE = os.path.join(os.curdir, "mongos")
-
-# Default root directory for where resmoke.py puts directories containing data files of mongod's it
-# starts, as well as those started by individual tests.
-DEFAULT_DBPATH_PREFIX = os.path.normpath("/data/db")
-
-# Subdirectory under the dbpath prefix that contains directories with data files of mongod's started
-# by resmoke.py.
-FIXTURE_SUBDIR = "resmoke"
-
-# Subdirectory under the dbpath prefix that contains directories with data files of mongod's started
-# by individual tests.
-MONGO_RUNNER_SUBDIR = "mongorunner"
-
-# Names below correspond to how they are specified via the command line or in the options YAML file.
-DEFAULTS = {
- "basePort": 20000,
- "buildloggerUrl": "https://logkeeper.mongodb.org",
- "continueOnFailure": False,
- "dbpathPrefix": None,
- "dbtest": None,
- "dryRun": None,
- "excludeWithAllTags": None,
- "excludeWithAnyTags": None,
- "includeWithAllTags": None,
- "includeWithAnyTags": None,
- "jobs": 1,
- "mongo": None,
- "mongod": None,
- "mongodSetParameters": None,
- "mongos": None,
- "mongosSetParameters": None,
- "nojournal": False,
- "repeat": 1,
- "reportFile": None,
- "seed": long(time.time() * 256), # Taken from random.py code in Python 2.7.
- "shellReadMode": None,
- "shellWriteMode": None,
- "shuffle": False,
- "storageEngine": None,
- "wiredTigerCollectionConfigString": None,
- "wiredTigerEngineConfigString": None,
- "wiredTigerIndexConfigString": None
-}
-
-
-##
-# Variables that are set by the user at the command line or with --options.
-##
-
-# The starting port number to use for mongod and mongos processes spawned by resmoke.py and the
-# mongo shell.
-BASE_PORT = None
-
-# The root url of the buildlogger server.
-BUILDLOGGER_URL = None
-
-# Root directory for where resmoke.py puts directories containing data files of mongod's it starts,
-# as well as those started by individual tests.
-DBPATH_PREFIX = None
-
-# The path to the dbtest executable used by resmoke.py.
-DBTEST_EXECUTABLE = None
-
-# If set to "tests", then resmoke.py will output the tests that would be run by each suite (without
-# actually running them).
-DRY_RUN = None
-
-# If set, then any jstests that have all of the specified tags will be excluded from the suite(s).
-EXCLUDE_WITH_ALL_TAGS = None
-
-# If set, then any jstests that have any of the specified tags will be excluded from the suite(s).
-EXCLUDE_WITH_ANY_TAGS = None
-
-# If true, then a test failure or error will cause resmoke.py to exit and not run any more tests.
-FAIL_FAST = None
-
-# If set, then only jstests that have all of the specified tags will be run during the jstest
-# portion of the suite(s).
-INCLUDE_WITH_ALL_TAGS = None
-
-# If set, then only jstests that have at least one of the specified tags will be run during the
-# jstest portion of the suite(s).
-INCLUDE_WITH_ANY_TAGS = None
-
-# If set, then resmoke.py starts the specified number of Job instances to run tests.
-JOBS = None
-
-# The path to the mongo executable used by resmoke.py.
-MONGO_EXECUTABLE = None
-
-# The path to the mongod executable used by resmoke.py.
-MONGOD_EXECUTABLE = None
-
-# The --setParameter options passed to mongod.
-MONGOD_SET_PARAMETERS = None
-
-# The path to the mongos executable used by resmoke.py.
-MONGOS_EXECUTABLE = None
-
-# The --setParameter options passed to mongos.
-MONGOS_SET_PARAMETERS = None
-
-# If true, then all mongod's started by resmoke.py and by the mongo shell will not have journaling
-# enabled.
-NO_JOURNAL = None
-
-# If true, then all mongod's started by resmoke.py and by the mongo shell will not preallocate
-# journal files.
-NO_PREALLOC_JOURNAL = None
-
-# If set, then the RNG is seeded with the specified value. Otherwise uses a seed based on the time
-# this module was loaded.
-RANDOM_SEED = None
-
-# If set, then each suite is repeated the specified number of times.
-REPEAT = None
-
-# If set, then resmoke.py will write out a report file with the status of each test that ran.
-REPORT_FILE = None
-
-# If set, then mongo shells started by resmoke.py will use the specified read mode.
-SHELL_READ_MODE = None
-
-# If set, then mongo shells started by resmoke.py will use the specified write mode.
-SHELL_WRITE_MODE = None
-
-# If true, then the order the tests run in is randomized. Otherwise the tests will run in
-# alphabetical (case-insensitive) order.
-SHUFFLE = None
-
-# If set, then all mongod's started by resmoke.py and by the mongo shell will use the specified
-# storage engine.
-STORAGE_ENGINE = None
-
-# If set, then all mongod's started by resmoke.py and by the mongo shell will use the specified
-# WiredTiger collection configuration settings.
-WT_COLL_CONFIG = None
-
-# If set, then all mongod's started by resmoke.py and by the mongo shell will use the specified
-# WiredTiger storage engine configuration settings.
-WT_ENGINE_CONFIG = None
-
-# If set, then all mongod's started by resmoke.py and by the mongo shell will use the specified
-# WiredTiger index configuration settings.
-WT_INDEX_CONFIG = None
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/__init__.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/__init__.py
deleted file mode 100644
index 29a19a52500..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from __future__ import absolute_import
-
-from . import process
-from . import programs
-from . import network
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/network.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/network.py
deleted file mode 100644
index 44e54667a67..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/network.py
+++ /dev/null
@@ -1,114 +0,0 @@
-"""
-Class used to allocate ports for use by various mongod and mongos
-processes involved in running the tests.
-"""
-
-from __future__ import absolute_import
-
-import collections
-import functools
-import threading
-
-from .. import config
-from .. import errors
-
-
-def _check_port(func):
- """
- A decorator that verifies the port returned by the wrapped function
- is in the valid range.
-
- Returns the port if it is valid, and raises a PortAllocationError
- otherwise.
- """
-
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- port = func(*args, **kwargs)
-
- if port < 0:
- raise errors.PortAllocationError("Attempted to use a negative port")
-
- if port > PortAllocator.MAX_PORT:
- raise errors.PortAllocationError("Exhausted all available ports. Consider decreasing"
- " the number of jobs, or using a lower base port")
-
- return port
-
- return wrapper
-
-
-class PortAllocator(object):
- """
- This class is responsible for allocating ranges of ports.
-
- It reserves a range of ports for each job with the first part of
- that range used for the fixture started by that job, and the second
- part of the range used for mongod and mongos processes started by
- tests run by that job.
- """
-
- # A PortAllocator will not return any port greater than this number.
- MAX_PORT = 2 ** 16 - 1
-
- # Each job gets a contiguous range of _PORTS_PER_JOB ports, with job 0 getting the first block
- # of ports, job 1 getting the second block, and so on.
- _PORTS_PER_JOB = 250
-
- # The first _PORTS_PER_FIXTURE ports of each range are reserved for the fixtures, the remainder
- # of the port range is used by tests.
- _PORTS_PER_FIXTURE = 10
-
- _NUM_USED_PORTS_LOCK = threading.Lock()
-
- # Used to keep track of how many ports a fixture has allocated.
- _NUM_USED_PORTS = collections.defaultdict(int)
-
- @classmethod
- @_check_port
- def next_fixture_port(cls, job_num):
- """
- Returns the next port for a fixture to use.
-
- Raises a PortAllocationError if the fixture has requested more
- ports than are reserved per job, or if the next port is not a
- valid port number.
- """
- with cls._NUM_USED_PORTS_LOCK:
- start_port = config.BASE_PORT + (job_num * cls._PORTS_PER_JOB)
- num_used_ports = cls._NUM_USED_PORTS[job_num]
- next_port = start_port + num_used_ports
-
- cls._NUM_USED_PORTS[job_num] += 1
-
- if next_port >= start_port + cls._PORTS_PER_FIXTURE:
- raise errors.PortAllocationError(
- "Fixture has requested more than the %d ports reserved per fixture"
- % cls._PORTS_PER_FIXTURE)
-
- return next_port
-
- @classmethod
- @_check_port
- def min_test_port(cls, job_num):
- """
- For the given job, returns the lowest port that is reserved for
- use by tests.
-
- Raises a PortAllocationError if that port is higher than the
- maximum port.
- """
- return config.BASE_PORT + (job_num * cls._PORTS_PER_JOB) + cls._PORTS_PER_FIXTURE
-
- @classmethod
- @_check_port
- def max_test_port(cls, job_num):
- """
- For the given job, returns the highest port that is reserved
- for use by tests.
-
- Raises a PortAllocationError if that port is higher than the
- maximum port.
- """
- next_range_start = config.BASE_PORT + ((job_num + 1) * cls._PORTS_PER_JOB)
- return next_range_start - 1
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/pipe.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/pipe.py
deleted file mode 100644
index bb080721b2d..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/pipe.py
+++ /dev/null
@@ -1,87 +0,0 @@
-"""
-Helper class to read output of a subprocess. Used to avoid deadlocks
-from the pipe buffer filling up and blocking the subprocess while it's
-being waited on.
-"""
-
-from __future__ import absolute_import
-
-import threading
-
-
-class LoggerPipe(threading.Thread):
- """
- Asynchronously reads the output of a subprocess and sends it to a
- logger.
- """
-
- # The start() and join() methods are not intended to be called directly on the LoggerPipe
- # instance. Since we override them for that effect, the super's version are preserved here.
- __start = threading.Thread.start
- __join = threading.Thread.join
-
- def __init__(self, logger, level, pipe_out):
- """
- Initializes the LoggerPipe with the specified logger, logging
- level to use, and pipe to read from.
- """
-
- threading.Thread.__init__(self)
- # Main thread should not call join() when exiting
- self.daemon = True
-
- self.__logger = logger
- self.__level = level
- self.__pipe_out = pipe_out
-
- self.__lock = threading.Lock()
- self.__condition = threading.Condition(self.__lock)
-
- self.__started = False
- self.__finished = False
-
- LoggerPipe.__start(self)
-
- def start(self):
- raise NotImplementedError("start should not be called directly")
-
- def run(self):
- """
- Reads the output from 'pipe_out' and logs each line to 'logger'.
- """
-
- with self.__lock:
- self.__started = True
- self.__condition.notify_all()
-
- # Close the pipe when finished reading all of the output.
- with self.__pipe_out:
- # Avoid buffering the output from the pipe.
- for line in iter(self.__pipe_out.readline, b""):
- # Convert the output of the process from a bytestring to a UTF-8 string, and replace
- # any characters that cannot be decoded with the official Unicode replacement
- # character, U+FFFD. The log messages of MongoDB processes are not always valid
- # UTF-8 sequences. See SERVER-7506.
- line = line.decode("utf-8", "replace")
- self.__logger.log(self.__level, line.rstrip())
-
- with self.__lock:
- self.__finished = True
- self.__condition.notify_all()
-
- def join(self, timeout=None):
- raise NotImplementedError("join should not be called directly")
-
- def wait_until_started(self):
- with self.__lock:
- while not self.__started:
- self.__condition.wait()
-
- def wait_until_finished(self):
- with self.__lock:
- while not self.__finished:
- self.__condition.wait()
-
- # No need to pass a timeout to join() because the thread should already be done after
- # notifying us it has finished reading output from the pipe.
- LoggerPipe.__join(self) # Tidy up the started thread.
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/process.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/process.py
deleted file mode 100644
index f54b0f0a640..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/process.py
+++ /dev/null
@@ -1,234 +0,0 @@
-"""
-A more reliable way to create and destroy processes.
-
-Uses job objects when running on Windows to ensure that all created
-processes are terminated.
-"""
-
-from __future__ import absolute_import
-
-import atexit
-import logging
-import os
-import os.path
-import sys
-import threading
-
-# The subprocess32 module resolves the thread-safety issues of the subprocess module in Python 2.x
-# when the _posixsubprocess C extension module is also available. Additionally, the _posixsubprocess
-# C extension module avoids triggering invalid free() calls on Python's internal data structure for
-# thread-local storage by skipping the PyOS_AfterFork() call when the 'preexec_fn' parameter isn't
-# specified to subprocess.Popen(). See SERVER-22219 for more details.
-#
-# The subprocess32 module is untested on Windows and thus isn't recommended for use, even when it's
-# installed. See https://github.com/google/python-subprocess32/blob/3.2.7/README.md#usage.
-if os.name == "posix" and sys.version_info[0] == 2:
- try:
- import subprocess32 as subprocess
- except ImportError:
- import warnings
- warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
- " available. When using the subprocess module, a child process may trigger"
- " an invalid free(). See SERVER-22219 for more details."),
- RuntimeWarning)
- import subprocess
-else:
- import subprocess
-
-from . import pipe
-from .. import utils
-
-# Attempt to avoid race conditions (e.g. hangs caused by a file descriptor being left open) when
-# starting subprocesses concurrently from multiple threads by guarding calls to subprocess.Popen()
-# with a lock. See https://bugs.python.org/issue2320 and https://bugs.python.org/issue12739 as
-# reports of such hangs.
-#
-# This lock probably isn't necessary when both the subprocess32 module and its _posixsubprocess C
-# extension module are available because either
-# (a) the pipe2() syscall is available on the platform we're using, so pipes are atomically
-# created with the FD_CLOEXEC flag set on them, or
-# (b) the pipe2() syscall isn't available, but the GIL isn't released during the
-# _posixsubprocess.fork_exec() call or the _posixsubprocess.cloexec_pipe() call.
-# See https://bugs.python.org/issue7213 for more details.
-_POPEN_LOCK = threading.Lock()
-
-# Job objects are the only reliable way to ensure that processes are terminated on Windows.
-if sys.platform == "win32":
- import win32api
- import win32con
- import win32job
- import win32process
- import winerror
-
- def _init_job_object():
- job_object = win32job.CreateJobObject(None, "")
-
- # Get the limit and job state information of the newly-created job object.
- job_info = win32job.QueryInformationJobObject(job_object,
- win32job.JobObjectExtendedLimitInformation)
-
- # Set up the job object so that closing the last handle to the job object
- # will terminate all associated processes and destroy the job object itself.
- job_info["BasicLimitInformation"]["LimitFlags"] |= \
- win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
-
- # Update the limits of the job object.
- win32job.SetInformationJobObject(job_object,
- win32job.JobObjectExtendedLimitInformation,
- job_info)
-
- return job_object
-
- # Don't create a job object if the current process is already inside one.
- if win32job.IsProcessInJob(win32process.GetCurrentProcess(), None):
- _JOB_OBJECT = None
- else:
- _JOB_OBJECT = _init_job_object()
- atexit.register(win32api.CloseHandle, _JOB_OBJECT)
-
-
-class Process(object):
- """
- Wrapper around subprocess.Popen class.
- """
-
- def __init__(self, logger, args, env=None, env_vars=None):
- """
- Initializes the process with the specified logger, arguments,
- and environment.
- """
-
- # Ensure that executable files on Windows have a ".exe" extension.
- if sys.platform == "win32" and os.path.splitext(args[0])[1] != ".exe":
- args[0] += ".exe"
-
- self.logger = logger
- self.args = args
- self.env = utils.default_if_none(env, os.environ.copy())
- if env_vars is not None:
- self.env.update(env_vars)
-
- self.pid = None
-
- self._process = None
- self._stdout_pipe = None
- self._stderr_pipe = None
-
- def start(self):
- """
- Starts the process and the logger pipes for its stdout and
- stderr.
- """
-
- creation_flags = 0
- if sys.platform == "win32" and _JOB_OBJECT is not None:
- creation_flags |= win32process.CREATE_BREAKAWAY_FROM_JOB
-
- # Use unbuffered I/O pipes to avoid adding delay between when the subprocess writes output
- # and when the LoggerPipe thread reads it.
- buffer_size = 0
-
- # Close file descriptors in the child process before executing the program. This prevents
- # file descriptors that were inherited due to multiple calls to fork() -- either within one
- # thread, or concurrently from multiple threads -- from causing another subprocess to wait
- # for the completion of the newly spawned child process. Closing other file descriptors
- # isn't supported on Windows when stdout and stderr are redirected.
- close_fds = (sys.platform != "win32")
-
- with _POPEN_LOCK:
- self._process = subprocess.Popen(self.args,
- bufsize=buffer_size,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- close_fds=close_fds,
- env=self.env,
- creationflags=creation_flags)
- self.pid = self._process.pid
-
- self._stdout_pipe = pipe.LoggerPipe(self.logger, logging.INFO, self._process.stdout)
- self._stderr_pipe = pipe.LoggerPipe(self.logger, logging.ERROR, self._process.stderr)
-
- self._stdout_pipe.wait_until_started()
- self._stderr_pipe.wait_until_started()
-
- if sys.platform == "win32" and _JOB_OBJECT is not None:
- try:
- win32job.AssignProcessToJobObject(_JOB_OBJECT, self._process._handle)
- except win32job.error as err:
- # ERROR_ACCESS_DENIED (winerror=5) is received when the process has already died.
- if err.winerror != winerror.ERROR_ACCESS_DENIED:
- raise
- return_code = win32process.GetExitCodeProcess(self._process._handle)
- if return_code == win32con.STILL_ACTIVE:
- raise
-
- def stop(self):
- """
- Terminates the process.
- """
-
- if sys.platform == "win32":
- # Adapted from implementation of Popen.terminate() in subprocess.py of Python 2.7
- # because earlier versions do not catch exceptions.
- try:
- # Have the process exit with code 0 if it is terminated by us to simplify the
- # success-checking logic later on.
- win32process.TerminateProcess(self._process._handle, 0)
- except win32process.error as err:
- # ERROR_ACCESS_DENIED (winerror=5) is received when the process
- # has already died.
- if err.winerror != winerror.ERROR_ACCESS_DENIED:
- raise
- return_code = win32process.GetExitCodeProcess(self._process._handle)
- if return_code == win32con.STILL_ACTIVE:
- raise
- else:
- try:
- self._process.terminate()
- except OSError as err:
- # ESRCH (errno=3) is received when the process has already died.
- if err.errno != 3:
- raise
-
- def poll(self):
- return self._process.poll()
-
- def wait(self):
- """
- Waits until the process has terminated and all output has been
- consumed by the logger pipes.
- """
-
- return_code = self._process.wait()
-
- if self._stdout_pipe:
- self._stdout_pipe.wait_until_finished()
- if self._stderr_pipe:
- self._stderr_pipe.wait_until_finished()
-
- return return_code
-
- def as_command(self):
- """
- Returns an equivalent command line invocation of the process.
- """
-
- default_env = os.environ
- env_diff = self.env.copy()
-
- # Remove environment variables that appear in both 'os.environ' and 'self.env'.
- for env_var in default_env:
- if env_var in env_diff and env_diff[env_var] == default_env[env_var]:
- del env_diff[env_var]
-
- sb = [] # String builder.
- for env_var in env_diff:
- sb.append("%s=%s" % (env_var, env_diff[env_var]))
- sb.extend(self.args)
-
- return " ".join(sb)
-
- def __str__(self):
- if self.pid is None:
- return self.as_command()
- return "%s (%d)" % (self.as_command(), self.pid)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/programs.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/programs.py
deleted file mode 100644
index cdffcdf7bca..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/core/programs.py
+++ /dev/null
@@ -1,311 +0,0 @@
-"""
-Utility functions to create MongoDB processes.
-
-Handles all the nitty-gritty parameter conversion.
-"""
-
-from __future__ import absolute_import
-
-import json
-import os
-import os.path
-import stat
-
-from . import process as _process
-from .. import utils
-from .. import config
-
-
-def mongod_program(logger, executable=None, process_kwargs=None, **kwargs):
- """
- Returns a Process instance that starts a mongod executable with
- arguments constructed from 'kwargs'.
- """
-
- executable = utils.default_if_none(executable, config.DEFAULT_MONGOD_EXECUTABLE)
- args = [executable]
-
- # Apply the --setParameter command line argument. Command line options to resmoke.py override
- # the YAML configuration.
- suite_set_parameters = kwargs.pop("set_parameters", {})
-
- if config.MONGOD_SET_PARAMETERS is not None:
- suite_set_parameters.update(utils.load_yaml(config.MONGOD_SET_PARAMETERS))
-
- _apply_set_parameters(args, suite_set_parameters)
-
- shortcut_opts = {
- "nojournal": config.NO_JOURNAL,
- "nopreallocj": config.NO_PREALLOC_JOURNAL,
- "storageEngine": config.STORAGE_ENGINE,
- "wiredTigerCollectionConfigString": config.WT_COLL_CONFIG,
- "wiredTigerEngineConfigString": config.WT_ENGINE_CONFIG,
- "wiredTigerIndexConfigString": config.WT_INDEX_CONFIG,
- }
-
- # These options are just flags, so they should not take a value.
- opts_without_vals = ("nojournal", "nopreallocj")
-
- # Have the --nojournal command line argument to resmoke.py unset the journal option.
- if shortcut_opts["nojournal"] and "journal" in kwargs:
- del kwargs["journal"]
-
- # Ensure that config servers run with journaling enabled.
- if "configsvr" in kwargs:
- shortcut_opts["nojournal"] = False
- kwargs["journal"] = ""
-
- # Command line options override the YAML configuration.
- for opt_name in shortcut_opts:
- opt_value = shortcut_opts[opt_name]
- if opt_name in opts_without_vals:
- # Options that are specified as --flag on the command line are represented by a boolean
- # value where True indicates that the flag should be included in 'kwargs'.
- if opt_value:
- kwargs[opt_name] = ""
- else:
- # Options that are specified as --key=value on the command line are represented by a
- # value where None indicates that the key-value pair shouldn't be included in 'kwargs'.
- if opt_value is not None:
- kwargs[opt_name] = opt_value
-
- # Override the storage engine specified on the command line with "wiredTiger" if running a
- # config server replica set.
- if "replSet" in kwargs and "configsvr" in kwargs:
- kwargs["storageEngine"] = "wiredTiger"
-
- # Apply the rest of the command line arguments.
- _apply_kwargs(args, kwargs)
-
- _set_keyfile_permissions(kwargs)
-
- process_kwargs = utils.default_if_none(process_kwargs, {})
- return _process.Process(logger, args, **process_kwargs)
-
-
-def mongos_program(logger, executable=None, process_kwargs=None, **kwargs):
- """
- Returns a Process instance that starts a mongos executable with
- arguments constructed from 'kwargs'.
- """
-
- executable = utils.default_if_none(executable, config.DEFAULT_MONGOS_EXECUTABLE)
- args = [executable]
-
- # Apply the --setParameter command line argument. Command line options to resmoke.py override
- # the YAML configuration.
- suite_set_parameters = kwargs.pop("set_parameters", {})
-
- if config.MONGOS_SET_PARAMETERS is not None:
- suite_set_parameters.update(utils.load_yaml(config.MONGOS_SET_PARAMETERS))
-
- _apply_set_parameters(args, suite_set_parameters)
-
- # Apply the rest of the command line arguments.
- _apply_kwargs(args, kwargs)
-
- _set_keyfile_permissions(kwargs)
-
- process_kwargs = utils.default_if_none(process_kwargs, {})
- return _process.Process(logger, args, **process_kwargs)
-
-
-def mongo_shell_program(logger, executable=None, filename=None, process_kwargs=None, **kwargs):
- """
- Returns a Process instance that starts a mongo shell with arguments
- constructed from 'kwargs'.
- """
-
- executable = utils.default_if_none(executable, config.DEFAULT_MONGO_EXECUTABLE)
- args = [executable]
-
- eval_sb = [] # String builder.
- global_vars = kwargs.pop("global_vars", {}).copy()
-
- shortcut_opts = {
- "noJournal": (config.NO_JOURNAL, False),
- "noJournalPrealloc": (config.NO_PREALLOC_JOURNAL, False),
- "storageEngine": (config.STORAGE_ENGINE, ""),
- "testName": (os.path.splitext(os.path.basename(filename))[0], ""),
- "wiredTigerCollectionConfigString": (config.WT_COLL_CONFIG, ""),
- "wiredTigerEngineConfigString": (config.WT_ENGINE_CONFIG, ""),
- "wiredTigerIndexConfigString": (config.WT_INDEX_CONFIG, ""),
- }
-
- test_data = global_vars.get("TestData", {}).copy()
- for opt_name in shortcut_opts:
- (opt_value, opt_default) = shortcut_opts[opt_name]
- if opt_value is not None:
- test_data[opt_name] = opt_value
- elif opt_name not in test_data:
- # Only use 'opt_default' if the property wasn't set in the YAML configuration.
- test_data[opt_name] = opt_default
- global_vars["TestData"] = test_data
-
- # Pass setParameters for mongos and mongod through TestData. The setParameter parsing in
- # servers.js is very primitive (just splits on commas), so this may break for non-scalar
- # setParameter values.
- if config.MONGOD_SET_PARAMETERS is not None:
- if "setParameters" in test_data:
- raise ValueError("setParameters passed via TestData can only be set from either the"
- " command line or the suite YAML, not both")
- mongod_set_parameters = utils.load_yaml(config.MONGOD_SET_PARAMETERS)
- test_data["setParameters"] = _format_test_data_set_parameters(mongod_set_parameters)
-
- if config.MONGOS_SET_PARAMETERS is not None:
- if "setParametersMongos" in test_data:
- raise ValueError("setParametersMongos passed via TestData can only be set from either"
- " the command line or the suite YAML, not both")
- mongos_set_parameters = utils.load_yaml(config.MONGOS_SET_PARAMETERS)
- test_data["setParametersMongos"] = _format_test_data_set_parameters(mongos_set_parameters)
-
- if "eval_prepend" in kwargs:
- eval_sb.append(str(kwargs.pop("eval_prepend")))
-
- for var_name in global_vars:
- _format_shell_vars(eval_sb, var_name, global_vars[var_name])
-
- if "eval" in kwargs:
- eval_sb.append(str(kwargs.pop("eval")))
-
- eval_str = "; ".join(eval_sb)
- args.append("--eval")
- args.append(eval_str)
-
- if config.SHELL_READ_MODE is not None:
- kwargs["readMode"] = config.SHELL_READ_MODE
-
- if config.SHELL_WRITE_MODE is not None:
- kwargs["writeMode"] = config.SHELL_WRITE_MODE
-
- # Apply the rest of the command line arguments.
- _apply_kwargs(args, kwargs)
-
- # Have the mongos shell run the specified file.
- args.append(filename)
-
- _set_keyfile_permissions(test_data)
-
- process_kwargs = utils.default_if_none(process_kwargs, {})
- return _process.Process(logger, args, **process_kwargs)
-
-
-def _format_shell_vars(sb, path, value):
- """
- Formats 'value' in a way that can be passed to --eval.
-
- If 'value' is a dictionary, then it is unrolled into the creation of
- a new JSON object with properties assigned for each key of the
- dictionary.
- """
-
- # Only need to do special handling for JSON objects.
- if not isinstance(value, dict):
- sb.append("%s = %s" % (path, json.dumps(value)))
- return
-
- # Avoid including curly braces and colons in output so that the command invocation can be
- # copied and run through bash.
- sb.append("%s = new Object()" % (path))
- for subkey in value:
- _format_shell_vars(sb, ".".join((path, subkey)), value[subkey])
-
-
-def dbtest_program(logger, executable=None, suites=None, process_kwargs=None, **kwargs):
- """
- Returns a Process instance that starts a dbtest executable with
- arguments constructed from 'kwargs'.
- """
-
- executable = utils.default_if_none(executable, config.DEFAULT_DBTEST_EXECUTABLE)
- args = [executable]
-
- if suites is not None:
- args.extend(suites)
-
- if config.STORAGE_ENGINE is not None:
- kwargs["storageEngine"] = config.STORAGE_ENGINE
-
- return generic_program(logger, args, process_kwargs=process_kwargs, **kwargs)
-
-def generic_program(logger, args, process_kwargs=None, **kwargs):
- """
- Returns a Process instance that starts an arbitrary executable with
- arguments constructed from 'kwargs'. The args parameter is an array
- of strings containing the command to execute.
- """
-
- if not utils.is_string_list(args):
- raise ValueError("The args parameter must be a list of command arguments")
-
- _apply_kwargs(args, kwargs)
-
- process_kwargs = utils.default_if_none(process_kwargs, {})
- return _process.Process(logger, args, **process_kwargs)
-
-
-def _format_test_data_set_parameters(set_parameters):
- """
- Converts key-value pairs from 'set_parameters' into the comma
- delimited list format expected by the parser in servers.js.
-
- WARNING: the parsing logic in servers.js is very primitive.
- Non-scalar options such as logComponentVerbosity will not work
- correctly.
- """
- params = []
- for param_name in set_parameters:
- param_value = set_parameters[param_name]
- if isinstance(param_value, bool):
- # Boolean valued setParameters are specified as lowercase strings.
- param_value = "true" if param_value else "false"
- elif isinstance(param_value, dict):
- raise TypeError("Non-scalar setParameter values are not currently supported.")
- params.append("%s=%s" % (param_name, param_value))
- return ",".join(params)
-
-def _apply_set_parameters(args, set_parameter):
- """
- Converts key-value pairs from 'kwargs' into --setParameter key=value
- arguments to an executable and appends them to 'args'.
- """
-
- for param_name in set_parameter:
- param_value = set_parameter[param_name]
- # --setParameter takes boolean values as lowercase strings.
- if isinstance(param_value, bool):
- param_value = "true" if param_value else "false"
- args.append("--setParameter")
- args.append("%s=%s" % (param_name, param_value))
-
-
-def _apply_kwargs(args, kwargs):
- """
- Converts key-value pairs from 'kwargs' into --key value arguments
- to an executable and appends them to 'args'.
-
- A --flag without a value is represented with the empty string.
- """
-
- for arg_name in kwargs:
- arg_value = str(kwargs[arg_name])
- args.append("--%s" % (arg_name))
- if arg_value:
- args.append(arg_value)
-
-
-def _set_keyfile_permissions(opts):
- """
- Change the permissions of keyfiles in 'opts' to 600, i.e. only the
- user can read and write the file.
-
- This necessary to avoid having the mongod/mongos fail to start up
- because "permissions on the keyfiles are too open".
-
- We can't permanently set the keyfile permissions because git is not
- aware of them.
- """
- if "keyFile" in opts:
- os.chmod(opts["keyFile"], stat.S_IRUSR | stat.S_IWUSR)
- if "encryptionKeyFile" in opts:
- os.chmod(opts["encryptionKeyFile"], stat.S_IRUSR | stat.S_IWUSR)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/errors.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/errors.py
deleted file mode 100644
index 6d2a704e390..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/errors.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""
-Exceptions raised by resmoke.py.
-"""
-
-
-class ResmokeError(Exception):
- """
- Base class for all resmoke.py exceptions.
- """
- pass
-
-
-class StopExecution(ResmokeError):
- """
- Exception that is raised when resmoke.py should stop executing tests
- if failing fast is enabled.
- """
- pass
-
-
-class UserInterrupt(StopExecution):
- """
- Exception that is raised when a user signals resmoke.py to
- unconditionally stop executing tests.
- """
- pass
-
-
-class TestFailure(ResmokeError):
- """
- Exception that is raised by a hook in the after_test method if it
- determines the the previous test should be marked as a failure.
- """
- pass
-
-
-class ServerFailure(TestFailure):
- """
- Exception that is raised by a hook in the after_test method if it
- detects that the fixture did not exit cleanly and should be marked
- as a failure.
- """
- pass
-
-
-class PortAllocationError(ResmokeError):
- """
- Exception that is raised by the PortAllocator if a port is requested
- outside of the range of valid ports, or if a fixture requests more
- ports than were reserved for that job.
- """
- pass
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/__init__.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/__init__.py
deleted file mode 100644
index 54609ad861f..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-"""
-Extension to the logging package to support buildlogger.
-"""
-
-from __future__ import absolute_import
-
-# Alias the built-in logging.Logger class for type checking arguments. Those interested in
-# constructing a new Logger instance should use the loggers.new_logger() function instead.
-from logging import Logger
-
-from . import config
-from . import buildlogger
-from . import flush
-from . import loggers
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/buildlogger.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/buildlogger.py
deleted file mode 100644
index c5f5d40401b..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/buildlogger.py
+++ /dev/null
@@ -1,284 +0,0 @@
-"""
-Defines handlers for communicating with a buildlogger server.
-"""
-
-from __future__ import absolute_import
-
-import functools
-import urllib2
-
-from . import handlers
-from . import loggers
-from .. import config as _config
-
-
-CREATE_BUILD_ENDPOINT = "/build"
-APPEND_GLOBAL_LOGS_ENDPOINT = "/build/%(build_id)s"
-CREATE_TEST_ENDPOINT = "/build/%(build_id)s/test"
-APPEND_TEST_LOGS_ENDPOINT = "/build/%(build_id)s/test/%(test_id)s"
-
-_BUILDLOGGER_REALM = "buildlogs"
-_BUILDLOGGER_CONFIG = "mci.buildlogger"
-
-_SEND_AFTER_LINES = 2000
-_SEND_AFTER_SECS = 10
-
-
-def _log_on_error(func):
- """
- A decorator that causes any exceptions to be logged by the
- "buildlogger" Logger instance.
-
- Returns the wrapped function's return value, or None if an error
- was encountered.
- """
-
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- try:
- return func(*args, **kwargs)
- except urllib2.HTTPError as err:
- sb = [] # String builder.
- sb.append("HTTP Error %s: %s" % (err.code, err.msg))
- sb.append("POST %s" % (err.filename))
-
- for name in err.hdrs:
- value = err.hdrs[name]
- sb.append(" %s: %s" % (name, value))
-
- # Try to read the response back from the server.
- if hasattr(err, "read"):
- sb.append(err.read())
-
- loggers._BUILDLOGGER_FALLBACK.exception("\n".join(sb))
- except:
- loggers._BUILDLOGGER_FALLBACK.exception("Encountered an error.")
- return None
-
- return wrapper
-
-@_log_on_error
-def get_config():
- """
- Returns the buildlogger configuration as evaluated from the
- _BUILDLOGGER_CONFIG file.
- """
-
- tmp_globals = {} # Avoid conflicts with variables defined in 'config_file'.
- config = {}
- execfile(_BUILDLOGGER_CONFIG, tmp_globals, config)
-
- # Rename "slavename" to "username" if present.
- if "slavename" in config and "username" not in config:
- config["username"] = config["slavename"]
- del config["slavename"]
- # Rename "passwd" to "password" if present.
- if "passwd" in config and "password" not in config:
- config["password"] = config["passwd"]
- del config["passwd"]
-
- return config
-
-@_log_on_error
-def new_build_id(config):
- """
- Returns a new build id for sending global logs to.
- """
-
- if config is None:
- return None
-
- username = config["username"]
- password = config["password"]
- builder = config["builder"]
- build_num = int(config["build_num"])
-
- handler = handlers.HTTPHandler(
- realm=_BUILDLOGGER_REALM,
- url_root=_config.BUILDLOGGER_URL,
- username=username,
- password=password)
-
- response = handler.post(CREATE_BUILD_ENDPOINT, data={
- "builder": builder,
- "buildnum": build_num,
- })
-
- return response["id"]
-
-@_log_on_error
-def new_test_id(build_id, build_config, test_filename, test_command):
- """
- Returns a new test id for sending test logs to.
- """
-
- if build_id is None or build_config is None:
- return None
-
- handler = handlers.HTTPHandler(
- realm=_BUILDLOGGER_REALM,
- url_root=_config.BUILDLOGGER_URL,
- username=build_config["username"],
- password=build_config["password"])
-
- endpoint = CREATE_TEST_ENDPOINT % {"build_id": build_id}
- response = handler.post(endpoint, data={
- "test_filename": test_filename,
- "command": test_command,
- "phase": build_config.get("build_phase", "unknown"),
- })
-
- return response["id"]
-
-
-class _BaseBuildloggerHandler(handlers.BufferedHandler):
- """
- Base class of the buildlogger handler for the global logs and the
- handler for the test logs.
- """
-
- def __init__(self,
- build_id,
- build_config,
- capacity=_SEND_AFTER_LINES,
- interval_secs=_SEND_AFTER_SECS):
- """
- Initializes the buildlogger handler with the build id and
- credentials.
- """
-
- handlers.BufferedHandler.__init__(self, capacity, interval_secs)
-
- username = build_config["username"]
- password = build_config["password"]
-
- self.http_handler = handlers.HTTPHandler(_BUILDLOGGER_REALM,
- _config.BUILDLOGGER_URL,
- username,
- password)
-
- self.build_id = build_id
- self.retry_buffer = []
-
- def process_record(self, record):
- """
- Returns a tuple of the time the log record was created, and the
- message because the buildlogger expects the log messages
- formatted in JSON as:
-
- [ [ <log-time-1>, <log-message-1> ],
- [ <log-time-2>, <log-message-2> ],
- ... ]
- """
- msg = self.format(record)
- return (record.created, msg)
-
- def post(self, *args, **kwargs):
- """
- Convenience method for subclasses to use when making POST requests.
- """
-
- return self.http_handler.post(*args, **kwargs)
-
- def _append_logs(self, log_lines):
- raise NotImplementedError("_append_logs must be implemented by _BaseBuildloggerHandler"
- " subclasses")
-
- def flush_with_lock(self, close_called):
- """
- Ensures all logging output has been flushed to the buildlogger
- server.
-
- If _append_logs() returns false, then the log messages are added
- to a separate buffer and retried the next time flush() is
- called.
- """
-
- self.retry_buffer.extend(self.buffer)
-
- if self._append_logs(self.retry_buffer):
- self.retry_buffer = []
- elif close_called:
- # Request to the buildlogger server returned an error, so use the fallback logger to
- # avoid losing the log messages entirely.
- for (_, message) in self.retry_buffer:
- # TODO: construct an LogRecord instance equivalent to the one passed to the
- # process_record() method if we ever decide to log the time when the
- # LogRecord was created, e.g. using %(asctime)s in
- # _fallback_buildlogger_handler().
- loggers._BUILDLOGGER_FALLBACK.info(message)
- self.retry_buffer = []
-
- self.buffer = []
-
-
-class BuildloggerTestHandler(_BaseBuildloggerHandler):
- """
- Buildlogger handler for the test logs.
- """
-
- def __init__(self, build_id, build_config, test_id, **kwargs):
- """
- Initializes the buildlogger handler with the build id, test id,
- and credentials.
- """
-
- _BaseBuildloggerHandler.__init__(self, build_id, build_config, **kwargs)
-
- self.test_id = test_id
-
- @_log_on_error
- def _append_logs(self, log_lines):
- """
- Sends a POST request to the APPEND_TEST_LOGS_ENDPOINT with the
- logs that have been captured.
- """
- endpoint = APPEND_TEST_LOGS_ENDPOINT % {
- "build_id": self.build_id,
- "test_id": self.test_id,
- }
-
- response = self.post(endpoint, data=log_lines)
- return response is not None
-
- @_log_on_error
- def _finish_test(self, failed=False):
- """
- Sends a POST request to the APPEND_TEST_LOGS_ENDPOINT with the
- test status.
- """
- endpoint = APPEND_TEST_LOGS_ENDPOINT % {
- "build_id": self.build_id,
- "test_id": self.test_id,
- }
-
- self.post(endpoint, headers={
- "X-Sendlogs-Test-Done": "true",
- "X-Sendlogs-Test-Failed": "true" if failed else "false",
- })
-
- def close(self):
- """
- Closes the buildlogger handler.
- """
-
- _BaseBuildloggerHandler.close(self)
-
- # TODO: pass the test status (success/failure) to this method
- self._finish_test()
-
-
-class BuildloggerGlobalHandler(_BaseBuildloggerHandler):
- """
- Buildlogger handler for the global logs.
- """
-
- @_log_on_error
- def _append_logs(self, log_lines):
- """
- Sends a POST request to the APPEND_GLOBAL_LOGS_ENDPOINT with
- the logs that have been captured.
- """
- endpoint = APPEND_GLOBAL_LOGS_ENDPOINT % {"build_id": self.build_id}
- response = self.post(endpoint, data=log_lines)
- return response is not None
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/config.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/config.py
deleted file mode 100644
index c3960bbafd3..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/config.py
+++ /dev/null
@@ -1,161 +0,0 @@
-"""
-Configuration functions for the logging package.
-"""
-
-from __future__ import absolute_import
-
-import logging
-import sys
-
-from . import buildlogger
-from . import formatters
-from . import loggers
-
-
-_DEFAULT_FORMAT = "[%(name)s] %(message)s"
-
-
-def using_buildlogger(logging_config):
- """
- Returns true if buildlogger is set as a handler on the "fixture" or
- "tests" loggers, and false otherwise.
- """
- for logger_name in (loggers.FIXTURE_LOGGER_NAME, loggers.TESTS_LOGGER_NAME):
- logger_info = logging_config[logger_name]
- if _get_buildlogger_handler_info(logger_info) is not None:
- return True
- return False
-
-
-def apply_config(logging_config):
- """
- Adds all handlers specified by the configuration to the "executor",
- "fixture", and "tests" loggers.
- """
-
- logging_components = (loggers.EXECUTOR_LOGGER_NAME,
- loggers.FIXTURE_LOGGER_NAME,
- loggers.TESTS_LOGGER_NAME)
-
- if not all(component in logging_config for component in logging_components):
- raise ValueError("Logging configuration should contain %s, %s, and %s components"
- % logging_components)
-
- # Configure the executor, fixture, and tests loggers.
- for component in logging_components:
- logger = loggers.LOGGERS_BY_NAME[component]
- logger_info = logging_config[component]
- _configure_logger(logger, logger_info)
-
- # Configure the buildlogger logger.
- loggers._BUILDLOGGER_FALLBACK.addHandler(_fallback_buildlogger_handler())
-
-
-def apply_buildlogger_global_handler(logger, logging_config, build_id=None, build_config=None):
- """
- Adds a buildlogger.BuildloggerGlobalHandler to 'logger' if specified
- to do so by the configuration.
- """
-
- logger_info = logging_config[loggers.FIXTURE_LOGGER_NAME]
- handler_info = _get_buildlogger_handler_info(logger_info)
- if handler_info is None:
- # Not configured to use buildlogger.
- return
-
- if all(x is not None for x in (build_id, build_config)):
- log_format = logger_info.get("format", _DEFAULT_FORMAT)
- formatter = formatters.ISO8601Formatter(fmt=log_format)
-
- handler = buildlogger.BuildloggerGlobalHandler(build_id,
- build_config,
- **handler_info)
- handler.setFormatter(formatter)
- else:
- handler = _fallback_buildlogger_handler()
- # Fallback handler already has formatting configured.
-
- logger.addHandler(handler)
-
-
-def apply_buildlogger_test_handler(logger,
- logging_config,
- build_id=None,
- build_config=None,
- test_id=None):
- """
- Adds a buildlogger.BuildloggerTestHandler to 'logger' if specified
- to do so by the configuration.
- """
-
- logger_info = logging_config[loggers.TESTS_LOGGER_NAME]
- handler_info = _get_buildlogger_handler_info(logger_info)
- if handler_info is None:
- # Not configured to use buildlogger.
- return
-
- if all(x is not None for x in (build_id, build_config, test_id)):
- log_format = logger_info.get("format", _DEFAULT_FORMAT)
- formatter = formatters.ISO8601Formatter(fmt=log_format)
-
- handler = buildlogger.BuildloggerTestHandler(build_id,
- build_config,
- test_id,
- **handler_info)
- handler.setFormatter(formatter)
- else:
- handler = _fallback_buildlogger_handler()
- # Fallback handler already has formatting configured.
-
- logger.addHandler(handler)
-
-
-def _configure_logger(logger, logger_info):
- """
- Adds the handlers specified by the configuration to 'logger'.
- """
-
- log_format = logger_info.get("format", _DEFAULT_FORMAT)
- formatter = formatters.ISO8601Formatter(fmt=log_format)
-
- for handler_info in logger_info.get("handlers", []):
- handler_class = handler_info["class"]
- if handler_class == "logging.FileHandler":
- handler = logging.FileHandler(filename=handler_info["filename"],
- mode=handler_info.get("mode", "w"))
- elif handler_class == "logging.NullHandler":
- handler = logging.NullHandler()
- elif handler_class == "logging.StreamHandler":
- handler = logging.StreamHandler(sys.stdout)
- elif handler_class == "buildlogger":
- continue # Buildlogger handlers are applied when running tests.
- else:
- raise ValueError("Unknown handler class '%s'" % (handler_class))
- handler.setFormatter(formatter)
- logger.addHandler(handler)
-
-
-def _fallback_buildlogger_handler():
- """
- Returns a handler that writes to stderr.
- """
-
- log_format = "[buildlogger:%(name)s] %(message)s"
- formatter = formatters.ISO8601Formatter(fmt=log_format)
-
- handler = logging.StreamHandler(sys.stderr)
- handler.setFormatter(formatter)
-
- return handler
-
-def _get_buildlogger_handler_info(logger_info):
- """
- Returns the buildlogger handler information if it exists, and None
- otherwise.
- """
-
- for handler_info in logger_info["handlers"]:
- handler_info = handler_info.copy()
- if handler_info.pop("class") == "buildlogger":
- return handler_info
- return None
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/flush.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/flush.py
deleted file mode 100644
index c45533f1e13..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/flush.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-Workaround for having too many threads running on 32-bit systems when
-logging to buildlogger that still allows periodically flushing messages
-to the buildlogger server.
-
-This is because a utils.timer.AlarmClock instance is used for each
-buildlogger.BuildloggerTestHandler, but only dismiss()ed when the Python
-process is about to exit.
-"""
-
-from __future__ import absolute_import
-
-import threading
-
-from ..utils import queue
-
-
-_LOGGER_QUEUE = queue.Queue()
-
-_FLUSH_THREAD_LOCK = threading.Lock()
-_FLUSH_THREAD = None
-
-
-def start_thread():
- """
- Starts the flush thread.
- """
-
- global _FLUSH_THREAD
- with _FLUSH_THREAD_LOCK:
- if _FLUSH_THREAD is not None:
- raise ValueError("FlushThread has already been started")
-
- _FLUSH_THREAD = _FlushThread()
- _FLUSH_THREAD.start()
-
-
-def stop_thread():
- """
- Signals the flush thread to stop and waits until it does.
- """
-
- with _FLUSH_THREAD_LOCK:
- if _FLUSH_THREAD is None:
- raise ValueError("FlushThread hasn't been started")
-
- # Add sentinel value to indicate when there are no more loggers to process.
- _LOGGER_QUEUE.put(None)
- _FLUSH_THREAD.join()
-
-
-def close_later(logger):
- """
- Adds 'logger' to the queue so that it is closed later by the flush
- thread.
- """
- _LOGGER_QUEUE.put(logger)
-
-
-class _FlushThread(threading.Thread):
- """
- Asynchronously flushes and closes logging handlers.
- """
-
- def __init__(self):
- """
- Initializes the flush thread.
- """
-
- threading.Thread.__init__(self, name="FlushThread")
- # Do not wait to flush the logs if interrupted by the user.
- self.daemon = True
-
- def run(self):
- """
- Continuously shuts down loggers from the queue.
- """
-
- while True:
- logger = _LOGGER_QUEUE.get()
- try:
- if logger is None:
- # Sentinel value received, so exit.
- break
- _FlushThread._shutdown_logger(logger)
- finally:
- _LOGGER_QUEUE.task_done()
-
- @staticmethod
- def _shutdown_logger(logger):
- """
- Flushes and closes all handlers of 'logger'.
- """
-
- for handler in logger.handlers:
- handler.flush()
- handler.close()
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/formatters.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/formatters.py
deleted file mode 100644
index 4cc36da32d4..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/formatters.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""
-Custom formatters for the logging handlers.
-"""
-
-from __future__ import absolute_import
-
-import logging
-import sys
-import time
-
-
-class ISO8601Formatter(logging.Formatter):
- """
- An ISO 8601 compliant formatter for log messages. It formats the
- timezone as an hour/minute offset and uses a period as the
- millisecond separator in order to match the log messages of MongoDB.
- """
-
- def formatTime(self, record, datefmt=None):
- converted_time = self.converter(record.created)
-
- if datefmt is not None:
- return time.strftime(datefmt, converted_time)
-
- formatted_time = time.strftime("%Y-%m-%dT%H:%M:%S", converted_time)
- timezone = ISO8601Formatter._format_timezone_offset(converted_time)
- return "%s.%03d%s" % (formatted_time, record.msecs, timezone)
-
- @staticmethod
- def _format_timezone_offset(converted_time):
- """
- Returns the timezone as an hour/minute offset in the form
- "+HHMM" or "-HHMM".
- """
-
- # Windows treats %z in the format string as %Z, so we compute the hour/minute offset
- # manually.
- if converted_time.tm_isdst == 1 and time.daylight:
- utc_offset_secs = time.altzone
- else:
- utc_offset_secs = time.timezone
-
- # The offset is positive if the local timezone is behind (east of) UTC, and negative if it
- # is ahead (west) of UTC.
- utc_offset_prefix = "-" if utc_offset_secs > 0 else "+"
- utc_offset_secs = abs(utc_offset_secs)
-
- utc_offset_mins = (utc_offset_secs / 60) % 60
- utc_offset_hours = utc_offset_secs / 3600
- return "%s%02d%02d" % (utc_offset_prefix, utc_offset_hours, utc_offset_mins)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/handlers.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/handlers.py
deleted file mode 100644
index b688a1da68a..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/handlers.py
+++ /dev/null
@@ -1,178 +0,0 @@
-"""
-Additional handlers that are used as the base classes of the buildlogger
-handler.
-"""
-
-from __future__ import absolute_import
-
-import json
-import logging
-import threading
-import urllib2
-
-from .. import utils
-from ..utils import timer
-
-_TIMEOUT_SECS = 10
-
-class BufferedHandler(logging.Handler):
- """
- A handler class that buffers logging records in memory. Whenever
- each record is added to the buffer, a check is made to see if the
- buffer should be flushed. If it should, then flush() is expected to
- do what's needed.
- """
-
- def __init__(self, capacity, interval_secs):
- """
- Initializes the handler with the buffer size and timeout after
- which the buffer is flushed regardless.
- """
-
- logging.Handler.__init__(self)
-
- if not isinstance(capacity, int):
- raise TypeError("capacity must be an integer")
- elif capacity <= 0:
- raise ValueError("capacity must be a positive integer")
-
- if not isinstance(interval_secs, (int, float)):
- raise TypeError("interval_secs must be a number")
- elif interval_secs <= 0.0:
- raise ValueError("interval_secs must be a positive number")
-
- self.capacity = capacity
- self.interval_secs = interval_secs
- self.buffer = []
-
- self._lock = threading.Lock()
- self._timer = None # Defer creation until actually begin to log messages.
-
- def _new_timer(self):
- """
- Returns a new timer.AlarmClock instance that will call the
- flush() method after 'interval_secs' seconds.
- """
-
- return timer.AlarmClock(self.interval_secs, self.flush, args=[self])
-
- def process_record(self, record):
- """
- Applies a transformation to the record before it gets added to
- the buffer.
-
- The default implementation returns 'record' unmodified.
- """
-
- return record
-
- def emit(self, record):
- """
- Emits a record.
-
- Append the record to the buffer after it has been transformed by
- process_record(). If the length of the buffer is greater than or
- equal to its capacity, then flush() is called to process the
- buffer.
-
- After flushing the buffer, the timer is restarted so that it
- will expire after another 'interval_secs' seconds.
- """
-
- with self._lock:
- self.buffer.append(self.process_record(record))
- if len(self.buffer) >= self.capacity:
- if self._timer is not None:
- self._timer.snooze()
- self.flush_with_lock(False)
- if self._timer is not None:
- self._timer.reset()
-
- if self._timer is None:
- self._timer = self._new_timer()
- self._timer.start()
-
- def flush(self, close_called=False):
- """
- Ensures all logging output has been flushed.
- """
-
- with self._lock:
- if self.buffer:
- self.flush_with_lock(close_called)
-
- def flush_with_lock(self, close_called):
- """
- Ensures all logging output has been flushed.
-
- This version resets the buffers back to an empty list and is
- intended to be overridden by subclasses.
- """
-
- self.buffer = []
-
- def close(self):
- """
- Tidies up any resources used by the handler.
-
- Stops the timer and flushes the buffer.
- """
-
- if self._timer is not None:
- self._timer.dismiss()
- self.flush(close_called=True)
-
- logging.Handler.close(self)
-
-
-class HTTPHandler(object):
- """
- A class which sends data to a web server using POST requests.
- """
-
- def __init__(self, realm, url_root, username, password):
- """
- Initializes the handler with the necessary authenticaton
- credentials.
- """
-
- digest_handler = urllib2.HTTPDigestAuthHandler()
- digest_handler.add_password(
- realm=realm,
- uri=url_root,
- user=username,
- passwd=password)
-
- self.url_root = url_root
- self.url_opener = urllib2.build_opener(digest_handler, urllib2.HTTPErrorProcessor())
-
- def _make_url(self, endpoint):
- return "%s/%s/" % (self.url_root.rstrip("/"), endpoint.strip("/"))
-
- def post(self, endpoint, data=None, headers=None, timeout_secs=_TIMEOUT_SECS):
- """
- Sends a POST request to the specified endpoint with the supplied
- data.
-
- Returns the response, either as a string or a JSON object based
- on the content type.
- """
-
- data = utils.default_if_none(data, [])
- data = json.dumps(data, encoding="utf-8")
-
- headers = utils.default_if_none(headers, {})
- headers["Content-Type"] = "application/json; charset=utf-8"
-
- url = self._make_url(endpoint)
- request = urllib2.Request(url=url, data=data, headers=headers)
-
- response = self.url_opener.open(request, timeout=timeout_secs)
- headers = response.info()
-
- content_type = headers.gettype()
- if content_type == "application/json":
- encoding = headers.getparam("charset") or "utf-8"
- return json.load(response, encoding=encoding)
-
- return response.read()
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/loggers.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/loggers.py
deleted file mode 100644
index 35f41512425..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/logging/loggers.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-Module to hold the logger instances themselves.
-"""
-
-from __future__ import absolute_import
-
-import logging
-
-EXECUTOR_LOGGER_NAME = "executor"
-FIXTURE_LOGGER_NAME = "fixture"
-TESTS_LOGGER_NAME = "tests"
-
-def new_logger(logger_name, parent=None):
- """
- Returns a new logging.Logger instance with the specified name.
- """
-
- # Set up the logger to handle all messages it receives.
- logger = logging.Logger(logger_name, level=logging.DEBUG)
-
- if parent is not None:
- logger.parent = parent
- logger.propagate = True
-
- return logger
-
-EXECUTOR = new_logger(EXECUTOR_LOGGER_NAME)
-FIXTURE = new_logger(FIXTURE_LOGGER_NAME)
-TESTS = new_logger(TESTS_LOGGER_NAME)
-
-LOGGERS_BY_NAME = {
- EXECUTOR_LOGGER_NAME: EXECUTOR,
- FIXTURE_LOGGER_NAME: FIXTURE,
- TESTS_LOGGER_NAME: TESTS,
-}
-
-_BUILDLOGGER_FALLBACK = new_logger("fallback")
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/parser.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/parser.py
deleted file mode 100644
index 4bcc7bfb137..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/parser.py
+++ /dev/null
@@ -1,368 +0,0 @@
-"""
-Parser for command line arguments.
-"""
-
-from __future__ import absolute_import
-
-import os
-import os.path
-import optparse
-
-from . import config as _config
-from . import testing
-from . import utils
-from .. import resmokeconfig
-
-
-# Mapping of the attribute of the parsed arguments (dest) to its key as it appears in the options
-# YAML configuration file. Most should only be converting from snake_case to camelCase.
-DEST_TO_CONFIG = {
- "base_port": "basePort",
- "buildlogger_url": "buildloggerUrl",
- "continue_on_failure": "continueOnFailure",
- "dbpath_prefix": "dbpathPrefix",
- "dbtest_executable": "dbtest",
- "dry_run": "dryRun",
- "exclude_with_all_tags": "excludeWithAllTags",
- "exclude_with_any_tags": "excludeWithAnyTags",
- "include_with_all_tags": "includeWithAllTags",
- "include_with_any_tags": "includeWithAnyTags",
- "jobs": "jobs",
- "mongo_executable": "mongo",
- "mongod_executable": "mongod",
- "mongod_parameters": "mongodSetParameters",
- "mongos_executable": "mongos",
- "mongos_parameters": "mongosSetParameters",
- "no_journal": "nojournal",
- "prealloc_journal": "preallocJournal",
- "repeat": "repeat",
- "report_file": "reportFile",
- "seed": "seed",
- "shell_read_mode": "shellReadMode",
- "shell_write_mode": "shellWriteMode",
- "shuffle": "shuffle",
- "storage_engine": "storageEngine",
- "wt_coll_config": "wiredTigerCollectionConfigString",
- "wt_engine_config": "wiredTigerEngineConfigString",
- "wt_index_config": "wiredTigerIndexConfigString"
-}
-
-
-def parse_command_line():
- """
- Parses the command line arguments passed to resmoke.py.
- """
-
- parser = optparse.OptionParser()
-
- parser.add_option("--suites", dest="suite_files", metavar="SUITE1,SUITE2",
- help=("Comma separated list of YAML files that each specify the configuration"
- " of a suite. If the file is located in the resmokeconfig/suites/"
- " directory, then the basename without the .yml extension can be"
- " specified, e.g. 'core'."))
-
- parser.add_option("--executor", dest="executor_file", metavar="EXECUTOR",
- help=("A YAML file that specifies the executor configuration. If the file is"
- " located in the resmokeconfig/suites/ directory, then the basename"
- " without the .yml extension can be specified, e.g. 'core_small_oplog'."
- " If specified in combination with the --suites option, then the suite"
- " configuration takes precedence."))
-
- parser.add_option("--log", dest="logger_file", metavar="LOGGER",
- help=("A YAML file that specifies the logging configuration. If the file is"
- " located in the resmokeconfig/suites/ directory, then the basename"
- " without the .yml extension can be specified, e.g. 'console'."))
-
- parser.add_option("--options", dest="options_file", metavar="OPTIONS",
- help="A YAML file that specifies global options to resmoke.py.")
-
- parser.add_option("--basePort", dest="base_port", metavar="PORT",
- help=("The starting port number to use for mongod and mongos processes"
- " spawned by resmoke.py or the tests themselves. Each fixture and Job"
- " allocates a contiguous range of ports."))
-
- parser.add_option("--buildloggerUrl", action="store", dest="buildlogger_url", metavar="URL",
- help="The root url of the buildlogger server.")
-
- parser.add_option("--continueOnFailure", action="store_true", dest="continue_on_failure",
- help="Executes all tests in all suites, even if some of them fail.")
-
- parser.add_option("--dbpathPrefix", dest="dbpath_prefix", metavar="PATH",
- help=("The directory which will contain the dbpaths of any mongod's started"
- " by resmoke.py or the tests themselves."))
-
- parser.add_option("--dbtest", dest="dbtest_executable", metavar="PATH",
- help="The path to the dbtest executable for resmoke to use.")
-
- parser.add_option("--excludeWithAllTags", dest="exclude_with_all_tags", metavar="TAG1,TAG2",
- help=("Comma separated list of tags. Any jstest that contains all of the"
- " specified tags will be excluded from any suites that are run."))
-
- parser.add_option("--excludeWithAnyTags", dest="exclude_with_any_tags", metavar="TAG1,TAG2",
- help=("Comma separated list of tags. Any jstest that contains any of the"
- " specified tags will be excluded from any suites that are run."))
-
- parser.add_option("--includeWithAllTags", dest="include_with_all_tags", metavar="TAG1,TAG2",
- help=("Comma separated list of tags. For the jstest portion of the suite(s),"
- " only tests which have all of the specified tags will be run."))
-
- parser.add_option("--includeWithAnyTags", dest="include_with_any_tags", metavar="TAG1,TAG2",
- help=("Comma separated list of tags. For the jstest portion of the suite(s),"
- " only tests which have at least one of the specified tags will be"
- " run."))
-
- parser.add_option("-n", action="store_const", const="tests", dest="dry_run",
- help=("Output the tests that would be run."))
-
- # TODO: add support for --dryRun=commands
- parser.add_option("--dryRun", type="choice", action="store", dest="dry_run",
- choices=("off", "tests"), metavar="MODE",
- help=("Instead of running the tests, output the tests that would be run"
- " (if MODE=tests). Defaults to MODE=%default."))
-
- parser.add_option("-j", "--jobs", type="int", dest="jobs", metavar="JOBS",
- help=("The number of Job instances to use. Each instance will receive its own"
- " MongoDB deployment to dispatch tests to."))
-
- parser.add_option("-l", "--listSuites", action="store_true", dest="list_suites",
- help="List the names of the suites available to execute.")
-
- parser.add_option("--mongo", dest="mongo_executable", metavar="PATH",
- help="The path to the mongo shell executable for resmoke.py to use.")
-
- parser.add_option("--mongod", dest="mongod_executable", metavar="PATH",
- help="The path to the mongod executable for resmoke.py to use.")
-
- parser.add_option("--mongodSetParameters", dest="mongod_parameters",
- metavar="{key1: value1, key2: value2, ..., keyN: valueN}",
- help=("Pass one or more --setParameter options to all mongod processes"
- " started by resmoke.py. The argument is specified as bracketed YAML -"
- " i.e. JSON with support for single quoted and unquoted keys."))
-
- parser.add_option("--mongos", dest="mongos_executable", metavar="PATH",
- help="The path to the mongos executable for resmoke.py to use.")
-
- parser.add_option("--mongosSetParameters", dest="mongos_parameters",
- metavar="{key1: value1, key2: value2, ..., keyN: valueN}",
- help=("Pass one or more --setParameter options to all mongos processes"
- " started by resmoke.py. The argument is specified as bracketed YAML -"
- " i.e. JSON with support for single quoted and unquoted keys."))
-
- parser.add_option("--nojournal", action="store_true", dest="no_journal",
- help="Disable journaling for all mongod's.")
-
- parser.add_option("--nopreallocj", action="store_const", const="off", dest="prealloc_journal",
- help="Disable preallocation of journal files for all mongod processes.")
-
- parser.add_option("--preallocJournal", type="choice", action="store", dest="prealloc_journal",
- choices=("on", "off"), metavar="ON|OFF",
- help=("Enable or disable preallocation of journal files for all mongod"
- " processes. Defaults to %default."))
-
- parser.add_option("--repeat", type="int", dest="repeat", metavar="N",
- help="Repeat the given suite(s) N times, or until one fails.")
-
- parser.add_option("--reportFile", dest="report_file", metavar="REPORT",
- help="Write a JSON file with test status and timing information.")
-
- parser.add_option("--seed", type="int", dest="seed", metavar="SEED",
- help=("Seed for the random number generator. Useful in combination with the"
- " --shuffle option for producing a consistent test execution order."))
-
- parser.add_option("--shellReadMode", type="choice", action="store", dest="shell_read_mode",
- choices=("commands", "compatibility", "legacy"), metavar="READ_MODE",
- help="The read mode used by the mongo shell.")
-
- parser.add_option("--shellWriteMode", type="choice", action="store", dest="shell_write_mode",
- choices=("commands", "compatibility", "legacy"), metavar="WRITE_MODE",
- help="The write mode used by the mongo shell.")
-
- parser.add_option("--shuffle", action="store_true", dest="shuffle",
- help="Randomize the order in which tests are executed.")
-
- parser.add_option("--storageEngine", dest="storage_engine", metavar="ENGINE",
- help="The storage engine used by dbtests and jstests.")
-
- parser.add_option("--wiredTigerCollectionConfigString", dest="wt_coll_config", metavar="CONFIG",
- help="Set the WiredTiger collection configuration setting for all mongod's.")
-
- parser.add_option("--wiredTigerEngineConfigString", dest="wt_engine_config", metavar="CONFIG",
- help="Set the WiredTiger engine configuration setting for all mongod's.")
-
- parser.add_option("--wiredTigerIndexConfigString", dest="wt_index_config", metavar="CONFIG",
- help="Set the WiredTiger index configuration setting for all mongod's.")
-
- parser.set_defaults(executor_file="with_server",
- logger_file="console",
- dry_run="off",
- list_suites=False,
- prealloc_journal="off")
-
- return parser.parse_args()
-
-
-def get_logging_config(values):
- return _get_logging_config(values.logger_file)
-
-
-def update_config_vars(values):
- options = _get_options_config(values.options_file)
-
- config = _config.DEFAULTS.copy()
- config.update(options)
-
- values = vars(values)
- for dest in values:
- if dest not in DEST_TO_CONFIG:
- continue
- config_var = DEST_TO_CONFIG[dest]
- if values[dest] is not None:
- config[config_var] = values[dest]
-
- _config.BASE_PORT = int(config.pop("basePort"))
- _config.BUILDLOGGER_URL = config.pop("buildloggerUrl")
- _config.DBPATH_PREFIX = _expand_user(config.pop("dbpathPrefix"))
- _config.DBTEST_EXECUTABLE = _expand_user(config.pop("dbtest"))
- _config.DRY_RUN = config.pop("dryRun")
- _config.EXCLUDE_WITH_ALL_TAGS = config.pop("excludeWithAllTags")
- _config.EXCLUDE_WITH_ANY_TAGS = config.pop("excludeWithAnyTags")
- _config.FAIL_FAST = not config.pop("continueOnFailure")
- _config.INCLUDE_WITH_ALL_TAGS = config.pop("includeWithAllTags")
- _config.INCLUDE_WITH_ANY_TAGS = config.pop("includeWithAnyTags")
- _config.JOBS = config.pop("jobs")
- _config.MONGO_EXECUTABLE = _expand_user(config.pop("mongo"))
- _config.MONGOD_EXECUTABLE = _expand_user(config.pop("mongod"))
- _config.MONGOD_SET_PARAMETERS = config.pop("mongodSetParameters")
- _config.MONGOS_EXECUTABLE = _expand_user(config.pop("mongos"))
- _config.MONGOS_SET_PARAMETERS = config.pop("mongosSetParameters")
- _config.NO_JOURNAL = config.pop("nojournal")
- _config.NO_PREALLOC_JOURNAL = config.pop("preallocJournal") == "off"
- _config.RANDOM_SEED = config.pop("seed")
- _config.REPEAT = config.pop("repeat")
- _config.REPORT_FILE = config.pop("reportFile")
- _config.SHELL_READ_MODE = config.pop("shellReadMode")
- _config.SHELL_WRITE_MODE = config.pop("shellWriteMode")
- _config.SHUFFLE = config.pop("shuffle")
- _config.STORAGE_ENGINE = config.pop("storageEngine")
- _config.WT_COLL_CONFIG = config.pop("wiredTigerCollectionConfigString")
- _config.WT_ENGINE_CONFIG = config.pop("wiredTigerEngineConfigString")
- _config.WT_INDEX_CONFIG = config.pop("wiredTigerIndexConfigString")
-
- if config:
- raise optparse.OptionValueError("Unknown option(s): %s" % (config.keys()))
-
-
-def get_suites(values, args):
- if (values.suite_files is None and not args) or (values.suite_files is not None and args):
- raise optparse.OptionValueError("Must specify either --suites or a list of tests")
-
- # If there are no suites specified, but there are args, assume they are jstests.
- if args:
- # No specified config, just use the following, and default the logging and executor.
- suite_config = _make_jstests_config(args)
- _ensure_executor(suite_config, values.executor_file)
- suite = testing.suite.Suite("<jstests>", suite_config)
- return [suite]
-
- suite_files = values.suite_files.split(",")
-
- suites = []
- for suite_filename in suite_files:
- suite_config = _get_suite_config(suite_filename)
- _ensure_executor(suite_config, values.executor_file)
- suite = testing.suite.Suite(suite_filename, suite_config)
- suites.append(suite)
- return suites
-
-
-def get_named_suites():
- """
- Returns the list of suites available to execute.
- """
-
- # Skip "with_server" and "no_server" because they do not define any test files to run.
- executor_only = set(["with_server", "no_server"])
- suite_names = [suite for suite in resmokeconfig.NAMED_SUITES if suite not in executor_only]
- suite_names.sort()
- return suite_names
-
-
-def _get_logging_config(pathname):
- """
- Attempts to read a YAML configuration from 'pathname' that describes
- how resmoke.py should log the tests and fixtures.
- """
-
- # Named loggers are specified as the basename of the file, without the .yml extension.
- if not utils.is_yaml_file(pathname) and not os.path.dirname(pathname):
- if pathname not in resmokeconfig.NAMED_LOGGERS:
- raise optparse.OptionValueError("Unknown logger '%s'" % (pathname))
- pathname = resmokeconfig.NAMED_LOGGERS[pathname] # Expand 'pathname' to full path.
-
- if not utils.is_yaml_file(pathname) or not os.path.isfile(pathname):
- raise optparse.OptionValueError("Expected a logger YAML config, but got '%s'" % (pathname))
-
- return utils.load_yaml_file(pathname).pop("logging")
-
-
-def _get_options_config(pathname):
- """
- Attempts to read a YAML configuration from 'pathname' that describes
- any modifications to global options.
- """
-
- if pathname is None:
- return {}
-
- return utils.load_yaml_file(pathname).pop("options")
-
-
-def _get_suite_config(pathname):
- """
- Attempts to read a YAML configuration from 'pathname' that describes
- what tests to run and how to run them.
- """
-
- # Named suites are specified as the basename of the file, without the .yml extension.
- if not utils.is_yaml_file(pathname) and not os.path.dirname(pathname):
- if pathname not in resmokeconfig.NAMED_SUITES:
- raise optparse.OptionValueError("Unknown suite '%s'" % (pathname))
- pathname = resmokeconfig.NAMED_SUITES[pathname] # Expand 'pathname' to full path.
-
- if not utils.is_yaml_file(pathname) or not os.path.isfile(pathname):
- raise optparse.OptionValueError("Expected a suite YAML config, but got '%s'" % (pathname))
-
- return utils.load_yaml_file(pathname)
-
-
-def _make_jstests_config(js_files):
- for pathname in js_files:
- if not utils.is_js_file(pathname) or not os.path.isfile(pathname):
- raise optparse.OptionValueError("Expected a list of JS files, but got '%s'"
- % (pathname))
-
- return {"selector": {"js_test": {"roots": js_files}}}
-
-
-def _ensure_executor(suite_config, executor_pathname):
- if "executor" not in suite_config:
- # Named executors are specified as the basename of the file, without the .yml extension.
- if not utils.is_yaml_file(executor_pathname) and not os.path.dirname(executor_pathname):
- if executor_pathname not in resmokeconfig.NAMED_SUITES:
- raise optparse.OptionValueError("Unknown executor '%s'" % (executor_pathname))
- executor_pathname = resmokeconfig.NAMED_SUITES[executor_pathname]
-
- if not utils.is_yaml_file(executor_pathname) or not os.path.isfile(executor_pathname):
- raise optparse.OptionValueError("Expected an executor YAML config, but got '%s'"
- % (executor_pathname))
-
- suite_config["executor"] = utils.load_yaml_file(executor_pathname).pop("executor")
-
-
-def _expand_user(pathname):
- """
- Wrapper around os.path.expanduser() to do nothing when given None.
- """
- if pathname is None:
- return None
- return os.path.expanduser(pathname)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/selector.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/selector.py
deleted file mode 100644
index c2dc0fca41b..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/selector.py
+++ /dev/null
@@ -1,291 +0,0 @@
-"""
-Test selection utility.
-
-Defines filtering rules for what tests to include in a suite depending
-on whether they apply to C++ unit tests, dbtests, or JS tests.
-"""
-
-from __future__ import absolute_import
-
-import fnmatch
-import os.path
-import subprocess
-import sys
-
-from . import config
-from . import errors
-from . import utils
-from .utils import globstar
-from .utils import jscomment
-
-def _filter_cpp_tests(kind, root, include_files, exclude_files):
- """
- Generic filtering logic for C++ tests that are sourced from a list
- of test executables.
- """
- include_files = utils.default_if_none(include_files, [])
- exclude_files = utils.default_if_none(exclude_files, [])
-
- tests = []
- with open(root, "r") as fp:
- for test_path in fp:
- test_path = test_path.rstrip()
- tests.append(test_path)
-
- (remaining, included, _) = _filter_by_filename(kind,
- tests,
- include_files,
- exclude_files)
-
- if include_files:
- return list(included)
- elif exclude_files:
- return list(remaining)
- return tests
-
-def filter_cpp_unit_tests(root="build/unittests.txt", include_files=None, exclude_files=None):
- """
- Filters out what C++ unit tests to run.
- """
- return _filter_cpp_tests("C++ unit test", root, include_files, exclude_files)
-
-
-def filter_cpp_integration_tests(root="build/integration_tests.txt",
- include_files=None,
- exclude_files=None):
- """
- Filters out what C++ integration tests to run.
- """
- return _filter_cpp_tests("C++ integration test", root, include_files, exclude_files)
-
-
-def filter_dbtests(binary=None, include_suites=None):
- """
- Filters out what dbtests to run.
- """
-
- # Command line option overrides the YAML configuration.
- binary = utils.default_if_none(config.DBTEST_EXECUTABLE, binary)
- # Use the default if nothing specified.
- binary = utils.default_if_none(binary, config.DEFAULT_DBTEST_EXECUTABLE)
-
- include_suites = utils.default_if_none(include_suites, [])
-
- if not utils.is_string_list(include_suites):
- raise TypeError("include_suites must be a list of strings")
-
- # Ensure that executable files on Windows have a ".exe" extension.
- if sys.platform == "win32" and os.path.splitext(binary)[1] != ".exe":
- binary += ".exe"
-
- program = subprocess.Popen([binary, "--list"], stdout=subprocess.PIPE)
- stdout = program.communicate()[0]
-
- if program.returncode != 0:
- raise errors.ResmokeError("Getting list of dbtest suites failed")
-
- dbtests = stdout.splitlines()
-
- if not include_suites:
- return dbtests
-
- dbtests = set(dbtests)
-
- (verbatim, globbed) = _partition(include_suites, normpath=False)
- included = _pop_all("dbtest suite", dbtests, verbatim)
-
- for suite_pattern in globbed:
- for suite_name in dbtests:
- if fnmatch.fnmatchcase(suite_name, suite_pattern):
- included.add(suite_name)
-
- return list(included)
-
-
-def filter_jstests(roots,
- include_files=None,
- include_with_all_tags=None,
- include_with_any_tags=None,
- exclude_files=None,
- exclude_with_all_tags=None,
- exclude_with_any_tags=None):
- """
- Filters out what jstests to run.
- """
-
- include_files = utils.default_if_none(include_files, [])
- exclude_files = utils.default_if_none(exclude_files, [])
-
- # Command line options override the YAML options, and all should be defaulted to an empty list
- # if not specified.
- tags = {
- "exclude_with_all_tags": exclude_with_all_tags,
- "exclude_with_any_tags": exclude_with_any_tags,
- "include_with_all_tags": include_with_all_tags,
- "include_with_any_tags": include_with_any_tags,
- }
- cmd_line_values = (
- ("exclude_with_all_tags", config.EXCLUDE_WITH_ALL_TAGS),
- ("exclude_with_any_tags", config.EXCLUDE_WITH_ANY_TAGS),
- ("include_with_all_tags", config.INCLUDE_WITH_ALL_TAGS),
- ("include_with_any_tags", config.INCLUDE_WITH_ANY_TAGS),
- )
- for (tag_category, cmd_line_val) in cmd_line_values:
- if cmd_line_val is not None:
- # Ignore the empty string when it is used as a tag. Specifying an empty string on the
- # command line allows a user to unset the list of tags specified in the YAML
- # configuration.
- tags[tag_category] = set([tag for tag in cmd_line_val.split(",") if tag != ""])
- else:
- tags[tag_category] = set(utils.default_if_none(tags[tag_category], []))
-
- using_tags = 0
- for name in tags:
- if not utils.is_string_set(tags[name]):
- raise TypeError("%s must be a list of strings" % (name))
- if len(tags[name]) > 0:
- using_tags += 1
-
- if using_tags > 1:
- raise ValueError("Can only specify one of 'include_with_all_tags', 'include_with_any_tags',"
- " 'exclude_with_all_tags', and 'exclude_with_any_tags'. If you wish to"
- " unset one of these options, use --includeWithAllTags='' or similar")
-
- jstests = []
- for root in roots:
- jstests.extend(globstar.iglob(root))
-
- (remaining, included, _) = _filter_by_filename("jstest",
- jstests,
- include_files,
- exclude_files)
-
- # Skip parsing comments if not using tags
- if not using_tags:
- if include_files:
- return list(included)
- elif exclude_files:
- return list(remaining)
- return jstests
-
- jstests = set(remaining)
- excluded = set()
-
- for filename in jstests:
- file_tags = set(jscomment.get_tags(filename))
- if tags["include_with_all_tags"] and not tags["include_with_all_tags"] - file_tags:
- included.add(filename)
- elif tags["include_with_any_tags"] and tags["include_with_any_tags"] & file_tags:
- included.add(filename)
- elif tags["exclude_with_all_tags"] and not tags["exclude_with_all_tags"] - file_tags:
- excluded.add(filename)
- elif tags["exclude_with_any_tags"] and tags["exclude_with_any_tags"] & file_tags:
- excluded.add(filename)
-
- if tags["include_with_all_tags"] or tags["include_with_any_tags"]:
- if exclude_files:
- return list((included & jstests) - excluded)
- return list(included)
- else:
- if include_files:
- return list(included | (jstests - excluded))
- return list(jstests - excluded)
-
-
-def _filter_by_filename(kind, universe, include_files, exclude_files):
- """
- Filters out what tests to run solely by filename.
-
- Returns the triplet (remaining, included, excluded), where
- 'remaining' is 'universe' after 'included' and 'excluded' were
- removed from it.
- """
-
- if not utils.is_string_list(include_files):
- raise TypeError("include_files must be a list of strings")
- elif not utils.is_string_list(exclude_files):
- raise TypeError("exclude_files must be a list of strings")
- elif include_files and exclude_files:
- raise ValueError("Cannot specify both include_files and exclude_files")
-
- universe = set(universe)
- if include_files:
- (verbatim, globbed) = _partition(include_files)
- # Remove all matching files of 'verbatim' from 'universe'.
- included_verbatim = _pop_all(kind, universe, verbatim)
- included_globbed = set()
-
- for file_pattern in globbed:
- included_globbed.update(globstar.iglob(file_pattern))
-
- # Remove all matching files of 'included_globbed' from 'universe' without checking whether
- # the same file is expanded to multiple times. This implicitly takes an intersection
- # between 'included_globbed' and 'universe'.
- included_globbed = _pop_all(kind, universe, included_globbed, validate=False)
- return (universe, included_verbatim | included_globbed, set())
-
- elif exclude_files:
- (verbatim, globbed) = _partition(exclude_files)
-
- # Remove all matching files of 'verbatim' from 'universe'.
- excluded_verbatim = _pop_all(kind, universe, verbatim)
- excluded_globbed = set()
-
- for file_pattern in globbed:
- excluded_globbed.update(globstar.iglob(file_pattern))
-
- # Remove all matching files of 'excluded_globbed' from 'universe' without checking whether
- # the same file is expanded to multiple times. This implicitly takes an intersection
- # between 'excluded_globbed' and 'universe'.
- excluded_globbed = _pop_all(kind, universe, excluded_globbed, validate=False)
- return (universe, set(), excluded_verbatim | excluded_globbed)
-
- return (universe, set(), set())
-
-
-def _partition(pathnames, normpath=True):
- """
- Splits 'pathnames' into two separate lists based on whether they
- use a glob pattern.
-
- Returns the pair (non-globbed pathnames, globbed pathnames).
- """
-
- verbatim = []
- globbed = []
-
- for pathname in pathnames:
- if globstar.is_glob_pattern(pathname):
- globbed.append(pathname)
- continue
-
- # Normalize 'pathname' so exact string comparison can be used later.
- if normpath:
- pathname = os.path.normpath(pathname)
- verbatim.append(pathname)
-
- return (verbatim, globbed)
-
-
-def _pop_all(kind, universe, iterable, validate=True):
- """
- Removes all elements of 'iterable' from 'universe' and returns them.
-
- If 'validate' is true, then a ValueError is raised if a element
- would be removed multiple times, or if an element of 'iterable' does
- not appear in 'universe' at all.
- """
-
- members = set()
-
- for elem in iterable:
- if validate and elem in members:
- raise ValueError("%s '%s' specified multiple times" % (kind, elem))
-
- if elem in universe:
- universe.remove(elem)
- members.add(elem)
- elif validate:
- raise ValueError("Unrecognized %s '%s'" % (kind, elem))
-
- return members
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/__init__.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/__init__.py
deleted file mode 100644
index e4acff00521..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-"""
-Extension to the unittest package to support buildlogger and parallel
-test execution.
-"""
-
-from __future__ import absolute_import
-
-from . import executor
-from . import suite
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/executor.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/executor.py
deleted file mode 100644
index 5d79abd6ac6..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/executor.py
+++ /dev/null
@@ -1,307 +0,0 @@
-"""
-Driver of the test execution framework.
-"""
-
-from __future__ import absolute_import
-
-import threading
-
-from . import fixtures
-from . import hooks as _hooks
-from . import job as _job
-from . import report as _report
-from . import testcases
-from .. import config as _config
-from .. import errors
-from .. import logging
-from .. import utils
-from ..utils import queue as _queue
-
-
-class TestGroupExecutor(object):
- """
- Executes a test group.
-
- Responsible for setting up and tearing down the fixtures that the
- tests execute against.
- """
-
- _TIMEOUT = 24 * 60 * 60 # =1 day (a long time to have tests run)
-
- def __init__(self,
- exec_logger,
- test_group,
- logging_config,
- config=None,
- fixture=None,
- hooks=None):
- """
- Initializes the TestGroupExecutor with the test group to run.
- """
-
- # Build a logger for executing this group of tests.
- logger_name = "%s:%s" % (exec_logger.name, test_group.test_kind)
- self.logger = logging.loggers.new_logger(logger_name, parent=exec_logger)
-
- self.logging_config = logging_config
- self.fixture_config = fixture
- self.hooks_config = utils.default_if_none(hooks, [])
- self.test_config = utils.default_if_none(config, {})
-
- self._test_group = test_group
-
- self._using_buildlogger = logging.config.using_buildlogger(logging_config)
- self._build_config = None
-
- if self._using_buildlogger:
- self._build_config = logging.buildlogger.get_config()
-
- # Must be done after getting buildlogger configuration.
- self._jobs = [self._make_job(job_num) for job_num in xrange(_config.JOBS)]
-
- def run(self):
- """
- Executes the test group.
-
- Any exceptions that occur during setting up or tearing down a
- fixture are propagated.
- """
-
- self.logger.info("Starting execution of %ss...", self._test_group.test_kind)
-
- return_code = 0
- try:
- if not self._setup_fixtures():
- return_code = 2
- return
-
- num_repeats = _config.REPEAT
- while num_repeats > 0:
- test_queue = self._make_test_queue()
- self._test_group.record_start()
- (report, interrupted) = self._run_tests(test_queue)
- self._test_group.record_end(report)
-
- # If the user triggered a KeyboardInterrupt, then we should stop.
- if interrupted:
- raise errors.UserInterrupt("Received interrupt from user")
-
- sb = [] # String builder.
- self._test_group.summarize_latest(sb)
- self.logger.info("Summary: %s", "\n ".join(sb))
-
- if not report.wasSuccessful():
- return_code = 1
- if _config.FAIL_FAST:
- break
-
- # Clear the report so it can be reused for the next execution.
- for job in self._jobs:
- job.report.reset()
- num_repeats -= 1
- finally:
- if not self._teardown_fixtures():
- return_code = 2
- self._test_group.return_code = return_code
-
- def _setup_fixtures(self):
- """
- Sets up a fixture for each job.
- """
-
- for job in self._jobs:
- try:
- job.fixture.setup()
- except:
- self.logger.exception("Encountered an error while setting up %s.", job.fixture)
- return False
-
- # Once they have all been started, wait for them to become available.
- for job in self._jobs:
- try:
- job.fixture.await_ready()
- except:
- self.logger.exception("Encountered an error while waiting for %s to be ready",
- job.fixture)
- return False
-
- return True
-
- def _run_tests(self, test_queue):
- """
- Starts a thread for each Job instance and blocks until all of
- the tests are run.
-
- Returns a (combined report, user interrupted) pair, where the
- report contains the status and timing information of tests run
- by all of the threads.
- """
-
- threads = []
- interrupt_flag = threading.Event()
- user_interrupted = False
- try:
- # Run each Job instance in its own thread.
- for job in self._jobs:
- t = threading.Thread(target=job, args=(test_queue, interrupt_flag))
- # Do not wait for tests to finish executing if interrupted by the user.
- t.daemon = True
- t.start()
- threads.append(t)
-
- joined = False
- while not joined:
- # Need to pass a timeout to join() so that KeyboardInterrupt exceptions
- # are propagated.
- joined = test_queue.join(TestGroupExecutor._TIMEOUT)
- except (KeyboardInterrupt, SystemExit):
- interrupt_flag.set()
- user_interrupted = True
- else:
- # Only wait for all the Job instances if not interrupted by the user.
- for t in threads:
- t.join()
-
- reports = [job.report for job in self._jobs]
- combined_report = _report.TestReport.combine(*reports)
-
- # We cannot return 'interrupt_flag.is_set()' because the interrupt flag can be set by a Job
- # instance if a test fails and it decides to drain the queue. We only want to raise a
- # StopExecution exception in TestGroupExecutor.run() if the user triggered the interrupt.
- return (combined_report, user_interrupted)
-
- def _teardown_fixtures(self):
- """
- Tears down all of the fixtures.
-
- Returns true if all fixtures were torn down successfully, and
- false otherwise.
- """
-
- success = True
- for job in self._jobs:
- try:
- if not job.fixture.teardown():
- self.logger.warn("Teardown of %s was not successful.", job.fixture)
- success = False
- except:
- self.logger.exception("Encountered an error while tearing down %s.", job.fixture)
- success = False
-
- return success
-
- def _get_build_id(self, job_num):
- """
- Returns a unique build id for a job.
- """
-
- build_config = self._build_config
-
- if self._using_buildlogger:
- # Use a distinct "builder" for each job in order to separate their logs.
- if build_config is not None and "builder" in build_config:
- build_config = build_config.copy()
- build_config["builder"] = "%s_job%d" % (build_config["builder"], job_num)
-
- build_id = logging.buildlogger.new_build_id(build_config)
-
- if build_config is None or build_id is None:
- self.logger.info("Encountered an error configuring buildlogger for job #%d, falling"
- " back to stderr.", job_num)
-
- return build_id, build_config
-
- return None, build_config
-
- def _make_fixture(self, job_num, build_id, build_config):
- """
- Creates a fixture for a job.
- """
-
- fixture_config = {}
- fixture_class = fixtures.NOOP_FIXTURE_CLASS
-
- if self.fixture_config is not None:
- fixture_config = self.fixture_config.copy()
- fixture_class = fixture_config.pop("class")
-
- logger_name = "%s:job%d" % (fixture_class, job_num)
- logger = logging.loggers.new_logger(logger_name, parent=logging.loggers.FIXTURE)
- logging.config.apply_buildlogger_global_handler(logger,
- self.logging_config,
- build_id=build_id,
- build_config=build_config)
-
- return fixtures.make_fixture(fixture_class, logger, job_num, **fixture_config)
-
- def _make_hooks(self, job_num, fixture):
- """
- Creates the custom behaviors for the job's fixture.
- """
-
- behaviors = []
-
- for behavior_config in self.hooks_config:
- behavior_config = behavior_config.copy()
- behavior_class = behavior_config.pop("class")
-
- logger_name = "%s:job%d" % (behavior_class, job_num)
- logger = logging.loggers.new_logger(logger_name, parent=self.logger)
- behavior = _hooks.make_custom_behavior(behavior_class,
- logger,
- fixture,
- **behavior_config)
- behaviors.append(behavior)
-
- return behaviors
-
- def _make_job(self, job_num):
- """
- Returns a Job instance with its own fixture, hooks, and test
- report.
- """
-
- build_id, build_config = self._get_build_id(job_num)
- fixture = self._make_fixture(job_num, build_id, build_config)
- hooks = self._make_hooks(job_num, fixture)
-
- logger_name = "%s:job%d" % (self.logger.name, job_num)
- logger = logging.loggers.new_logger(logger_name, parent=self.logger)
-
- if build_id is not None:
- endpoint = logging.buildlogger.APPEND_GLOBAL_LOGS_ENDPOINT % {"build_id": build_id}
- url = "%s/%s/" % (_config.BUILDLOGGER_URL.rstrip("/"), endpoint.strip("/"))
- logger.info("Writing output of job #%d to %s.", job_num, url)
-
- report = _report.TestReport(logger,
- self.logging_config,
- build_id=build_id,
- build_config=build_config)
-
- return _job.Job(logger, fixture, hooks, report)
-
- def _make_test_queue(self):
- """
- Returns a queue of TestCase instances.
-
- Use a multi-consumer queue instead of a unittest.TestSuite so
- that the test cases can be dispatched to multiple threads.
- """
-
- test_kind_logger = logging.loggers.new_logger(self._test_group.test_kind,
- parent=logging.loggers.TESTS)
-
- # Put all the test cases in a queue.
- queue = _queue.Queue()
- for test_name in self._test_group.tests:
- test_case = testcases.make_test_case(self._test_group.test_kind,
- test_kind_logger,
- test_name,
- **self.test_config)
- queue.put(test_case)
-
- # Add sentinel value for each job to indicate when there are no more items to process.
- for _ in xrange(_config.JOBS):
- queue.put(None)
-
- return queue
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/__init__.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/__init__.py
deleted file mode 100644
index d68a66911d2..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/__init__.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""
-Fixtures for executing JSTests against.
-"""
-
-from __future__ import absolute_import
-
-from .interface import Fixture, ReplFixture
-from .standalone import MongoDFixture
-from .replicaset import ReplicaSetFixture
-from .masterslave import MasterSlaveFixture
-from .shardedcluster import ShardedClusterFixture
-
-
-NOOP_FIXTURE_CLASS = "Fixture"
-
-_FIXTURES = {
- "Fixture": Fixture,
- "MongoDFixture": MongoDFixture,
- "ReplicaSetFixture": ReplicaSetFixture,
- "MasterSlaveFixture": MasterSlaveFixture,
- "ShardedClusterFixture": ShardedClusterFixture,
-}
-
-
-def make_fixture(class_name, *args, **kwargs):
- """
- Factory function for creating Fixture instances.
- """
-
- if class_name not in _FIXTURES:
- raise ValueError("Unknown fixture class '%s'" % (class_name))
- return _FIXTURES[class_name](*args, **kwargs)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/interface.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/interface.py
deleted file mode 100644
index 5fbf537c107..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/interface.py
+++ /dev/null
@@ -1,128 +0,0 @@
-"""
-Interface of the different fixtures for executing JSTests against.
-"""
-
-from __future__ import absolute_import
-
-import time
-
-import pymongo
-
-from ... import errors
-from ... import logging
-
-
-class Fixture(object):
- """
- Base class for all fixtures.
- """
-
- def __init__(self, logger, job_num):
- """
- Initializes the fixtures with a logger instance.
- """
-
- if not isinstance(logger, logging.Logger):
- raise TypeError("logger must be a Logger instance")
-
- if not isinstance(job_num, int):
- raise TypeError("job_num must be an integer")
- elif job_num < 0:
- raise ValueError("job_num must be a nonnegative integer")
-
- self.logger = logger
- self.job_num = job_num
-
- self.port = None # Port that the mongo shell should connect to.
-
- def setup(self):
- """
- Creates the fixture.
- """
- pass
-
- def await_ready(self):
- """
- Blocks until the fixture can be used for testing.
- """
- pass
-
- def teardown(self):
- """
- Destroys the fixture. Return true if was successful, and false otherwise.
- """
- return True
-
- def is_running(self):
- """
- Returns true if the fixture is still operating and more tests
- can be run, and false otherwise.
- """
- return True
-
- def get_connection_string(self):
- """
- Returns the connection string for this fixture. This is NOT a
- driver connection string, but a connection string of the format
- expected by the mongo::ConnectionString class.
- """
- raise NotImplementedError("get_connection_string must be implemented by Fixture subclasses")
-
- def __str__(self):
- return "%s (Job #%d)" % (self.__class__.__name__, self.job_num)
-
- def __repr__(self):
- return "%r(%r, %r)" % (self.__class__.__name__, self.logger, self.job_num)
-
-
-class ReplFixture(Fixture):
- """
- Base class for all fixtures that support replication.
- """
-
- AWAIT_REPL_TIMEOUT_MINS = 5
-
- def get_primary(self):
- """
- Returns the primary of a replica set, or the master of a
- master-slave deployment.
- """
- raise NotImplementedError("get_primary must be implemented by ReplFixture subclasses")
-
- def get_secondaries(self):
- """
- Returns a list containing the secondaries of a replica set, or
- the slave of a master-slave deployment.
- """
- raise NotImplementedError("get_secondaries must be implemented by ReplFixture subclasses")
-
- def await_repl(self):
- """
- Blocks until all operations on the primary/master have
- replicated to all other nodes.
- """
- raise NotImplementedError("await_repl must be implemented by ReplFixture subclasses")
-
- def retry_until_wtimeout(self, insert_fn):
- """
- Given a callback function representing an insert operation on
- the primary, handle any connection failures, and keep retrying
- the operation for up to 'AWAIT_REPL_TIMEOUT_MINS' minutes.
-
- The insert operation callback should take an argument for the
- number of remaining seconds to provide as the timeout for the
- operation.
- """
-
- deadline = time.time() + ReplFixture.AWAIT_REPL_TIMEOUT_MINS * 60
-
- while True:
- try:
- remaining = deadline - time.time()
- insert_fn(remaining)
- break
- except pymongo.errors.ConnectionFailure:
- remaining = deadline - time.time()
- if remaining <= 0.0:
- raise errors.ServerFailure("Failed to connect to the primary on port %d" %
- self.port)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/masterslave.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/masterslave.py
deleted file mode 100644
index f3dbf87eb91..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/masterslave.py
+++ /dev/null
@@ -1,209 +0,0 @@
-"""
-Master/slave fixture for executing JSTests against.
-"""
-
-from __future__ import absolute_import
-
-import os.path
-
-import pymongo
-
-from . import interface
-from . import standalone
-from ... import config
-from ... import logging
-from ... import utils
-
-
-class MasterSlaveFixture(interface.ReplFixture):
- """
- Fixture which provides JSTests with a master/slave deployment to
- run against.
- """
-
- def __init__(self,
- logger,
- job_num,
- mongod_executable=None,
- mongod_options=None,
- master_options=None,
- slave_options=None,
- dbpath_prefix=None,
- preserve_dbpath=False):
-
- interface.ReplFixture.__init__(self, logger, job_num)
-
- if "dbpath" in mongod_options:
- raise ValueError("Cannot specify mongod_options.dbpath")
-
- self.mongod_executable = mongod_executable
- self.mongod_options = utils.default_if_none(mongod_options, {})
- self.master_options = utils.default_if_none(master_options, {})
- self.slave_options = utils.default_if_none(slave_options, {})
- self.preserve_dbpath = preserve_dbpath
-
- # Command line options override the YAML configuration.
- dbpath_prefix = utils.default_if_none(config.DBPATH_PREFIX, dbpath_prefix)
- dbpath_prefix = utils.default_if_none(dbpath_prefix, config.DEFAULT_DBPATH_PREFIX)
- self._dbpath_prefix = os.path.join(dbpath_prefix,
- "job%d" % (self.job_num),
- config.FIXTURE_SUBDIR)
-
- self.master = None
- self.slave = None
-
- def setup(self):
- if self.master is None:
- self.master = self._new_mongod_master()
- self.master.setup()
- self.port = self.master.port
-
- if self.slave is None:
- self.slave = self._new_mongod_slave()
- self.slave.setup()
-
- def await_ready(self):
- self.master.await_ready()
- self.slave.await_ready()
-
- # Do a replicated write to ensure that the slave has finished with its initial sync before
- # starting to run any tests.
- client = utils.new_mongo_client(self.port)
-
- # Keep retrying this until it times out waiting for replication.
- def insert_fn(remaining_secs):
- remaining_millis = int(round(remaining_secs * 1000))
- write_concern = pymongo.WriteConcern(w=2, wtimeout=remaining_millis)
- coll = client.resmoke.get_collection("await_ready", write_concern=write_concern)
- coll.insert_one({"awaiting": "ready"})
-
- try:
- self.retry_until_wtimeout(insert_fn)
- except pymongo.errors.WTimeoutError:
- self.logger.info("Replication of write operation timed out.")
- raise
-
- def teardown(self):
- running_at_start = self.is_running()
- success = True # Still a success if nothing is running.
-
- if not running_at_start:
- self.logger.info("Master-slave deployment was expected to be running in teardown(),"
- " but wasn't.")
-
- if self.slave is not None:
- if running_at_start:
- self.logger.info("Stopping slave...")
-
- success = self.slave.teardown()
-
- if running_at_start:
- self.logger.info("Successfully stopped slave.")
-
- if self.master is not None:
- if running_at_start:
- self.logger.info("Stopping master...")
-
- success = self.master.teardown() and success
-
- if running_at_start:
- self.logger.info("Successfully stopped master.")
-
- return success
-
- def is_running(self):
- return (self.master is not None and self.master.is_running() and
- self.slave is not None and self.slave.is_running())
-
- def get_primary(self):
- return self.master
-
- def get_secondaries(self):
- return [self.slave]
-
- def await_repl(self):
- """
- Inserts a document into each database on the master and waits
- for all write operations to be acknowledged by the master-slave
- deployment.
- """
-
- client = utils.new_mongo_client(self.port)
-
- # We verify that each database has replicated to the slave because in the case of an initial
- # sync, the slave may acknowledge writes to one database before it has finished syncing
- # others.
- db_names = client.database_names()
- self.logger.info("Awaiting replication of inserts to each of the following databases on"
- " master on port %d: %s",
- self.port,
- db_names)
-
- for db_name in db_names:
- if db_name == "local":
- continue # The local database is expected to differ, ignore.
-
- self.logger.info("Awaiting replication of insert to database %s (w=2, wtimeout=%d min)"
- " to master on port %d",
- db_name,
- interface.ReplFixture.AWAIT_REPL_TIMEOUT_MINS,
- self.port)
-
- # Keep retrying this until it times out waiting for replication.
- def insert_fn(remaining_secs):
- remaining_millis = int(round(remaining_secs * 1000))
- write_concern = pymongo.WriteConcern(w=2, wtimeout=remaining_millis)
- coll = client[db_name].get_collection("await_repl", write_concern=write_concern)
- coll.insert_one({"awaiting": "repl"})
-
- try:
- self.retry_until_wtimeout(insert_fn)
- except pymongo.errors.WTimeoutError:
- self.logger.info("Replication of write operation timed out.")
- raise
-
- self.logger.info("Replication of write operation completed for database %s.", db_name)
-
- self.logger.info("Finished awaiting replication.")
-
- def _new_mongod(self, mongod_logger, mongod_options):
- """
- Returns a standalone.MongoDFixture with the specified logger and
- options.
- """
- return standalone.MongoDFixture(mongod_logger,
- self.job_num,
- mongod_executable=self.mongod_executable,
- mongod_options=mongod_options,
- preserve_dbpath=self.preserve_dbpath)
-
- def _new_mongod_master(self):
- """
- Returns a standalone.MongoDFixture configured to be used as the
- master of a master-slave deployment.
- """
-
- logger_name = "%s:master" % (self.logger.name)
- mongod_logger = logging.loggers.new_logger(logger_name, parent=self.logger)
-
- mongod_options = self.mongod_options.copy()
- mongod_options.update(self.master_options)
- mongod_options["master"] = ""
- mongod_options["dbpath"] = os.path.join(self._dbpath_prefix, "master")
- return self._new_mongod(mongod_logger, mongod_options)
-
- def _new_mongod_slave(self):
- """
- Returns a standalone.MongoDFixture configured to be used as the
- slave of a master-slave deployment.
- """
-
- logger_name = "%s:slave" % (self.logger.name)
- mongod_logger = logging.loggers.new_logger(logger_name, parent=self.logger)
-
- mongod_options = self.mongod_options.copy()
- mongod_options.update(self.slave_options)
- mongod_options["slave"] = ""
- mongod_options["source"] = "localhost:%d" % (self.port)
- mongod_options["dbpath"] = os.path.join(self._dbpath_prefix, "slave")
- return self._new_mongod(mongod_logger, mongod_options)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/replicaset.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/replicaset.py
deleted file mode 100644
index e9930627641..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/replicaset.py
+++ /dev/null
@@ -1,211 +0,0 @@
-"""
-Replica set fixture for executing JSTests against.
-"""
-
-from __future__ import absolute_import
-
-import os.path
-import time
-
-import pymongo
-
-from . import interface
-from . import standalone
-from ... import config
-from ... import logging
-from ... import utils
-
-
-class ReplicaSetFixture(interface.ReplFixture):
- """
- Fixture which provides JSTests with a replica set to run against.
- """
-
- def __init__(self,
- logger,
- job_num,
- mongod_executable=None,
- mongod_options=None,
- dbpath_prefix=None,
- preserve_dbpath=False,
- num_nodes=2,
- auth_options=None,
- replset_config_options=None):
-
- interface.ReplFixture.__init__(self, logger, job_num)
-
- self.mongod_executable = mongod_executable
- self.mongod_options = utils.default_if_none(mongod_options, {})
- self.preserve_dbpath = preserve_dbpath
- self.num_nodes = num_nodes
- self.auth_options = auth_options
- self.replset_config_options = utils.default_if_none(replset_config_options, {})
-
- # The dbpath in mongod_options is used as the dbpath prefix for replica set members and
- # takes precedence over other settings. The ShardedClusterFixture uses this parameter to
- # create replica sets and assign their dbpath structure explicitly.
- if "dbpath" in self.mongod_options:
- self._dbpath_prefix = self.mongod_options.pop("dbpath")
- else:
- # Command line options override the YAML configuration.
- dbpath_prefix = utils.default_if_none(config.DBPATH_PREFIX, dbpath_prefix)
- dbpath_prefix = utils.default_if_none(dbpath_prefix, config.DEFAULT_DBPATH_PREFIX)
- self._dbpath_prefix = os.path.join(dbpath_prefix,
- "job%d" % (self.job_num),
- config.FIXTURE_SUBDIR)
-
- self.nodes = []
- self.replset_name = None
-
- def setup(self):
- self.replset_name = self.mongod_options.get("replSet", "rs")
-
- if not self.nodes:
- for i in xrange(self.num_nodes):
- node = self._new_mongod(i, self.replset_name)
- self.nodes.append(node)
-
- for node in self.nodes:
- node.setup()
-
- self.port = self.get_primary().port
-
- # Call await_ready() on each of the nodes here because we want to start the election as
- # soon as possible.
- for node in self.nodes:
- node.await_ready()
-
- # Initiate the replica set.
- members = []
- for (i, node) in enumerate(self.nodes):
- member_info = {"_id": i, "host": node.get_connection_string()}
- if i > 0:
- member_info["priority"] = 0
- if i >= 7:
- # Only 7 nodes in a replica set can vote, so the other members must be non-voting.
- member_info["votes"] = 0
- members.append(member_info)
- initiate_cmd_obj = {"replSetInitiate": {"_id": self.replset_name, "members": members}}
-
- client = utils.new_mongo_client(port=self.port)
- if self.auth_options is not None:
- auth_db = client[self.auth_options["authenticationDatabase"]]
- auth_db.authenticate(self.auth_options["username"],
- password=self.auth_options["password"],
- mechanism=self.auth_options["authenticationMechanism"])
-
- if self.replset_config_options.get("configsvr", False):
- initiate_cmd_obj["replSetInitiate"]["configsvr"] = True
-
- self.logger.info("Issuing replSetInitiate command...")
- client.admin.command(initiate_cmd_obj)
-
- def await_ready(self):
- # Wait for the primary to be elected.
- client = utils.new_mongo_client(port=self.port)
- while True:
- is_master = client.admin.command("isMaster")["ismaster"]
- if is_master:
- break
- self.logger.info("Waiting for primary on port %d to be elected.", self.port)
- time.sleep(0.1) # Wait a little bit before trying again.
-
- # Wait for the secondaries to become available.
- for secondary in self.get_secondaries():
- client = utils.new_mongo_client(port=secondary.port,
- read_preference=pymongo.ReadPreference.SECONDARY)
- while True:
- is_secondary = client.admin.command("isMaster")["secondary"]
- if is_secondary:
- break
- self.logger.info("Waiting for secondary on port %d to become available.",
- secondary.port)
- time.sleep(0.1) # Wait a little bit before trying again.
-
- def teardown(self):
- running_at_start = self.is_running()
- success = True # Still a success even if nothing is running.
-
- if not running_at_start:
- self.logger.info("Replica set was expected to be running in teardown(), but wasn't.")
- else:
- self.logger.info("Stopping all members of the replica set...")
-
- # Terminate the secondaries first to reduce noise in the logs.
- for node in reversed(self.nodes):
- success = node.teardown() and success
-
- if running_at_start:
- self.logger.info("Successfully stopped all members of the replica set.")
-
- return success
-
- def is_running(self):
- return all(node.is_running() for node in self.nodes)
-
- def get_primary(self):
- # The primary is always the first element of the 'nodes' list because all other members of
- # the replica set are configured with priority=0.
- return self.nodes[0]
-
- def get_secondaries(self):
- return self.nodes[1:]
-
- def await_repl(self):
- self.logger.info("Awaiting replication of insert (w=%d, wtimeout=%d min) to primary on port"
- " %d", self.num_nodes, interface.ReplFixture.AWAIT_REPL_TIMEOUT_MINS,
- self.port)
- client = utils.new_mongo_client(port=self.port)
-
- # Keep retrying this until it times out waiting for replication.
- def insert_fn(remaining_secs):
- remaining_millis = int(round(remaining_secs * 1000))
- write_concern = pymongo.WriteConcern(w=self.num_nodes, wtimeout=remaining_millis)
- coll = client.resmoke.get_collection("await_repl", write_concern=write_concern)
- coll.insert_one({"awaiting": "repl"})
-
- try:
- self.retry_until_wtimeout(insert_fn)
- except pymongo.errors.WTimeoutError:
- self.logger.info("Replication of write operation timed out.")
- raise
-
- self.logger.info("Replication of write operation completed.")
-
- def _new_mongod(self, index, replset_name):
- """
- Returns a standalone.MongoDFixture configured to be used as a
- replica-set member of 'replset_name'.
- """
-
- mongod_logger = self._get_logger_for_mongod(index)
- mongod_options = self.mongod_options.copy()
- mongod_options["replSet"] = replset_name
- mongod_options["dbpath"] = os.path.join(self._dbpath_prefix, "node%d" % (index))
-
- return standalone.MongoDFixture(mongod_logger,
- self.job_num,
- mongod_executable=self.mongod_executable,
- mongod_options=mongod_options,
- preserve_dbpath=self.preserve_dbpath)
-
- def _get_logger_for_mongod(self, index):
- """
- Returns a new logging.Logger instance for use as the primary or
- secondary of a replica-set.
- """
-
- if index == 0:
- logger_name = "%s:primary" % (self.logger.name)
- else:
- suffix = str(index - 1) if self.num_nodes > 2 else ""
- logger_name = "%s:secondary%s" % (self.logger.name, suffix)
-
- return logging.loggers.new_logger(logger_name, parent=self.logger)
-
- def get_connection_string(self):
- if self.replset_name is None:
- raise ValueError("Must call setup() before calling get_connection_string()")
-
- conn_strs = [node.get_connection_string() for node in self.nodes]
- return self.replset_name + "/" + ",".join(conn_strs)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/shardedcluster.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
deleted file mode 100644
index ab7b26bf372..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
+++ /dev/null
@@ -1,347 +0,0 @@
-"""
-Sharded cluster fixture for executing JSTests against.
-"""
-
-from __future__ import absolute_import
-
-import copy
-import os.path
-import time
-
-import pymongo
-
-from . import interface
-from . import standalone
-from . import replicaset
-from ... import config
-from ... import core
-from ... import errors
-from ... import logging
-from ... import utils
-
-
-class ShardedClusterFixture(interface.Fixture):
- """
- Fixture which provides JSTests with a sharded cluster to run
- against.
- """
-
- _CONFIGSVR_REPLSET_NAME = "config-rs"
-
- def __init__(self,
- logger,
- job_num,
- mongos_executable=None,
- mongos_options=None,
- mongod_executable=None,
- mongod_options=None,
- dbpath_prefix=None,
- preserve_dbpath=False,
- num_shards=1,
- separate_configsvr=True,
- enable_sharding=None,
- auth_options=None):
- """
- Initializes ShardedClusterFixture with the different options to
- the mongod and mongos processes.
- """
-
- interface.Fixture.__init__(self, logger, job_num)
-
- if "dbpath" in mongod_options:
- raise ValueError("Cannot specify mongod_options.dbpath")
-
- self.mongos_executable = mongos_executable
- self.mongos_options = utils.default_if_none(mongos_options, {})
- self.mongod_executable = mongod_executable
- self.mongod_options = utils.default_if_none(mongod_options, {})
- self.preserve_dbpath = preserve_dbpath
- self.num_shards = num_shards
- self.separate_configsvr = separate_configsvr
- self.enable_sharding = utils.default_if_none(enable_sharding, [])
- self.auth_options = auth_options
-
- # Command line options override the YAML configuration.
- dbpath_prefix = utils.default_if_none(config.DBPATH_PREFIX, dbpath_prefix)
- dbpath_prefix = utils.default_if_none(dbpath_prefix, config.DEFAULT_DBPATH_PREFIX)
- self._dbpath_prefix = os.path.join(dbpath_prefix,
- "job%d" % (self.job_num),
- config.FIXTURE_SUBDIR)
-
- self.configsvr = None
- self.mongos = None
- self.shards = []
-
- def setup(self):
- if self.separate_configsvr:
- if self.configsvr is None:
- self.configsvr = self._new_configsvr()
- self.configsvr.setup()
-
- if not self.shards:
- for i in xrange(self.num_shards):
- shard = self._new_shard(i)
- self.shards.append(shard)
-
- # Start up each of the shards
- for shard in self.shards:
- shard.setup()
-
- def await_ready(self):
- # Wait for the config server
- if self.configsvr is not None:
- self.configsvr.await_ready()
-
- # Wait for each of the shards
- for shard in self.shards:
- shard.await_ready()
-
- if self.mongos is None:
- self.mongos = self._new_mongos()
-
- # Start up the mongos
- self.mongos.setup()
-
- # Wait for the mongos
- self.mongos.await_ready()
- self.port = self.mongos.port
-
- client = utils.new_mongo_client(port=self.port)
- if self.auth_options is not None:
- auth_db = client[self.auth_options["authenticationDatabase"]]
- auth_db.authenticate(self.auth_options["username"],
- password=self.auth_options["password"],
- mechanism=self.auth_options["authenticationMechanism"])
-
- # Inform mongos about each of the shards
- for shard in self.shards:
- self._add_shard(client, shard)
-
- # Enable sharding on each of the specified databases
- for db_name in self.enable_sharding:
- self.logger.info("Enabling sharding for '%s' database...", db_name)
- client.admin.command({"enablesharding": db_name})
-
- def teardown(self):
- """
- Shuts down the sharded cluster.
- """
- running_at_start = self.is_running()
- success = True # Still a success even if nothing is running.
-
- if not running_at_start:
- self.logger.info("Sharded cluster was expected to be running in teardown(), but"
- " wasn't.")
-
- if self.configsvr is not None:
- if running_at_start:
- self.logger.info("Stopping config server...")
-
- success = self.configsvr.teardown() and success
-
- if running_at_start:
- self.logger.info("Successfully terminated the config server.")
-
- if self.mongos is not None:
- if running_at_start:
- self.logger.info("Stopping mongos...")
-
- success = self.mongos.teardown() and success
-
- if running_at_start:
- self.logger.info("Successfully terminated the mongos.")
-
- if running_at_start:
- self.logger.info("Stopping shards...")
- for shard in self.shards:
- success = shard.teardown() and success
- if running_at_start:
- self.logger.info("Successfully terminated all shards.")
-
- return success
-
- def is_running(self):
- """
- Returns true if the config server, all shards, and the mongos
- are all still operating, and false otherwise.
- """
- return (self.configsvr is not None and self.configsvr.is_running() and
- all(shard.is_running() for shard in self.shards) and
- self.mongos is not None and self.mongos.is_running())
-
- def _new_configsvr(self):
- """
- Returns a replicaset.ReplicaSetFixture configured to be used as
- the config server of a sharded cluster.
- """
-
- logger_name = "%s:configsvr" % (self.logger.name)
- mongod_logger = logging.loggers.new_logger(logger_name, parent=self.logger)
-
- mongod_options = copy.deepcopy(self.mongod_options)
- mongod_options["configsvr"] = ""
- mongod_options["dbpath"] = os.path.join(self._dbpath_prefix, "config")
- mongod_options["replSet"] = ShardedClusterFixture._CONFIGSVR_REPLSET_NAME
- mongod_options["storageEngine"] = "wiredTiger"
-
- return replicaset.ReplicaSetFixture(mongod_logger,
- self.job_num,
- mongod_executable=self.mongod_executable,
- mongod_options=mongod_options,
- preserve_dbpath=self.preserve_dbpath,
- num_nodes=3,
- auth_options=self.auth_options,
- replset_config_options={"configsvr": True})
-
- def _new_shard(self, index):
- """
- Returns a standalone.MongoDFixture configured to be used as a
- shard in a sharded cluster.
- """
-
- logger_name = "%s:shard%d" % (self.logger.name, index)
- mongod_logger = logging.loggers.new_logger(logger_name, parent=self.logger)
-
- mongod_options = copy.deepcopy(self.mongod_options)
- mongod_options["dbpath"] = os.path.join(self._dbpath_prefix, "shard%d" % (index))
-
- return standalone.MongoDFixture(mongod_logger,
- self.job_num,
- mongod_executable=self.mongod_executable,
- mongod_options=mongod_options,
- preserve_dbpath=self.preserve_dbpath)
-
- def _new_mongos(self):
- """
- Returns a _MongoSFixture configured to be used as the mongos for
- a sharded cluster.
- """
-
- logger_name = "%s:mongos" % (self.logger.name)
- mongos_logger = logging.loggers.new_logger(logger_name, parent=self.logger)
-
- mongos_options = copy.deepcopy(self.mongos_options)
- if self.separate_configsvr:
- configdb_replset = ShardedClusterFixture._CONFIGSVR_REPLSET_NAME
- configdb_port = self.configsvr.port
- mongos_options["configdb"] = "%s/localhost:%d" % (configdb_replset, configdb_port)
- else:
- mongos_options["configdb"] = "localhost:%d" % (self.shards[0].port)
-
- return _MongoSFixture(mongos_logger,
- self.job_num,
- mongos_executable=self.mongos_executable,
- mongos_options=mongos_options)
-
- def _add_shard(self, client, shard):
- """
- Add the specified program as a shard by executing the addShard
- command.
-
- See https://docs.mongodb.org/manual/reference/command/addShard
- for more details.
- """
-
- self.logger.info("Adding localhost:%d as a shard...", shard.port)
- client.admin.command({"addShard": "localhost:%d" % (shard.port)})
-
-
-class _MongoSFixture(interface.Fixture):
- """
- Fixture which provides JSTests with a mongos to connect to.
- """
-
- def __init__(self,
- logger,
- job_num,
- mongos_executable=None,
- mongos_options=None):
-
- interface.Fixture.__init__(self, logger, job_num)
-
- # Command line options override the YAML configuration.
- self.mongos_executable = utils.default_if_none(config.MONGOS_EXECUTABLE, mongos_executable)
-
- self.mongos_options = utils.default_if_none(mongos_options, {}).copy()
-
- self.mongos = None
-
- def setup(self):
- if "chunkSize" not in self.mongos_options:
- self.mongos_options["chunkSize"] = 50
-
- if "port" not in self.mongos_options:
- self.mongos_options["port"] = core.network.PortAllocator.next_fixture_port(self.job_num)
- self.port = self.mongos_options["port"]
-
- mongos = core.programs.mongos_program(self.logger,
- executable=self.mongos_executable,
- **self.mongos_options)
- try:
- self.logger.info("Starting mongos on port %d...\n%s", self.port, mongos.as_command())
- mongos.start()
- self.logger.info("mongos started on port %d with pid %d.", self.port, mongos.pid)
- except:
- self.logger.exception("Failed to start mongos on port %d.", self.port)
- raise
-
- self.mongos = mongos
-
- def await_ready(self):
- deadline = time.time() + standalone.MongoDFixture.AWAIT_READY_TIMEOUT_SECS
-
- # Wait until the mongos is accepting connections. The retry logic is necessary to support
- # versions of PyMongo <3.0 that immediately raise a ConnectionFailure if a connection cannot
- # be established.
- while True:
- # Check whether the mongos exited for some reason.
- exit_code = self.mongos.poll()
- if exit_code is not None:
- raise errors.ServerFailure("Could not connect to mongos on port %d, process ended"
- " unexpectedly with code %d." % (self.port, exit_code))
-
- try:
- # Use a shorter connection timeout to more closely satisfy the requested deadline.
- client = utils.new_mongo_client(self.port, timeout_millis=500)
- client.admin.command("ping")
- break
- except pymongo.errors.ConnectionFailure:
- remaining = deadline - time.time()
- if remaining <= 0.0:
- raise errors.ServerFailure(
- "Failed to connect to mongos on port %d after %d seconds"
- % (self.port, standalone.MongoDFixture.AWAIT_READY_TIMEOUT_SECS))
-
- self.logger.info("Waiting to connect to mongos on port %d.", self.port)
- time.sleep(0.1) # Wait a little bit before trying again.
-
- self.logger.info("Successfully contacted the mongos on port %d.", self.port)
-
- def teardown(self):
- running_at_start = self.is_running()
- success = True # Still a success even if nothing is running.
-
- if not running_at_start and self.port is not None:
- self.logger.info("mongos on port %d was expected to be running in teardown(), but"
- " wasn't." % (self.port))
-
- if self.mongos is not None:
- if running_at_start:
- self.logger.info("Stopping mongos on port %d with pid %d...",
- self.port,
- self.mongos.pid)
- self.mongos.stop()
-
- exit_code = self.mongos.wait()
- success = exit_code == 0
-
- if running_at_start:
- self.logger.info("Successfully terminated the mongos on port %d, exited with code"
- " %d",
- self.port,
- exit_code)
-
- return success
-
- def is_running(self):
- return self.mongos is not None and self.mongos.poll() is None
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/standalone.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/standalone.py
deleted file mode 100644
index a8c1dc597c5..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/fixtures/standalone.py
+++ /dev/null
@@ -1,151 +0,0 @@
-"""
-Standalone mongod fixture for executing JSTests against.
-"""
-
-from __future__ import absolute_import
-
-import os
-import os.path
-import shutil
-import time
-
-import pymongo
-
-from . import interface
-from ... import config
-from ... import core
-from ... import errors
-from ... import utils
-
-
-class MongoDFixture(interface.Fixture):
- """
- Fixture which provides JSTests with a standalone mongod to run
- against.
- """
-
- AWAIT_READY_TIMEOUT_SECS = 300
-
- def __init__(self,
- logger,
- job_num,
- mongod_executable=None,
- mongod_options=None,
- dbpath_prefix=None,
- preserve_dbpath=False):
-
- interface.Fixture.__init__(self, logger, job_num)
-
- if "dbpath" in mongod_options and dbpath_prefix is not None:
- raise ValueError("Cannot specify both mongod_options.dbpath and dbpath_prefix")
-
- # Command line options override the YAML configuration.
- self.mongod_executable = utils.default_if_none(config.MONGOD_EXECUTABLE, mongod_executable)
-
- self.mongod_options = utils.default_if_none(mongod_options, {}).copy()
- self.preserve_dbpath = preserve_dbpath
-
- # The dbpath in mongod_options takes precedence over other settings to make it easier for
- # users to specify a dbpath containing data to test against.
- if "dbpath" not in self.mongod_options:
- # Command line options override the YAML configuration.
- dbpath_prefix = utils.default_if_none(config.DBPATH_PREFIX, dbpath_prefix)
- dbpath_prefix = utils.default_if_none(dbpath_prefix, config.DEFAULT_DBPATH_PREFIX)
- self.mongod_options["dbpath"] = os.path.join(dbpath_prefix,
- "job%d" % (self.job_num),
- config.FIXTURE_SUBDIR)
- self._dbpath = self.mongod_options["dbpath"]
-
- self.mongod = None
-
- def setup(self):
- if not self.preserve_dbpath:
- shutil.rmtree(self._dbpath, ignore_errors=True)
-
- try:
- os.makedirs(self._dbpath)
- except os.error:
- # Directory already exists.
- pass
-
- if "port" not in self.mongod_options:
- self.mongod_options["port"] = core.network.PortAllocator.next_fixture_port(self.job_num)
- self.port = self.mongod_options["port"]
-
- mongod = core.programs.mongod_program(self.logger,
- executable=self.mongod_executable,
- **self.mongod_options)
- try:
- self.logger.info("Starting mongod on port %d...\n%s", self.port, mongod.as_command())
- mongod.start()
- self.logger.info("mongod started on port %d with pid %d.", self.port, mongod.pid)
- except:
- self.logger.exception("Failed to start mongod on port %d.", self.port)
- raise
-
- self.mongod = mongod
-
- def await_ready(self):
- deadline = time.time() + MongoDFixture.AWAIT_READY_TIMEOUT_SECS
-
- # Wait until the mongod is accepting connections. The retry logic is necessary to support
- # versions of PyMongo <3.0 that immediately raise a ConnectionFailure if a connection cannot
- # be established.
- while True:
- # Check whether the mongod exited for some reason.
- exit_code = self.mongod.poll()
- if exit_code is not None:
- raise errors.ServerFailure("Could not connect to mongod on port %d, process ended"
- " unexpectedly with code %d." % (self.port, exit_code))
-
- try:
- # Use a shorter connection timeout to more closely satisfy the requested deadline.
- client = utils.new_mongo_client(self.port, timeout_millis=500)
- client.admin.command("ping")
- break
- except pymongo.errors.ConnectionFailure:
- remaining = deadline - time.time()
- if remaining <= 0.0:
- raise errors.ServerFailure(
- "Failed to connect to mongod on port %d after %d seconds"
- % (self.port, MongoDFixture.AWAIT_READY_TIMEOUT_SECS))
-
- self.logger.info("Waiting to connect to mongod on port %d.", self.port)
- time.sleep(0.1) # Wait a little bit before trying again.
-
- self.logger.info("Successfully contacted the mongod on port %d.", self.port)
-
- def teardown(self):
- running_at_start = self.is_running()
- success = True # Still a success even if nothing is running.
-
- if not running_at_start and self.port is not None:
- self.logger.info("mongod on port %d was expected to be running in teardown(), but"
- " wasn't." % (self.port))
-
- if self.mongod is not None:
- if running_at_start:
- self.logger.info("Stopping mongod on port %d with pid %d...",
- self.port,
- self.mongod.pid)
- self.mongod.stop()
-
- exit_code = self.mongod.wait()
- success = exit_code == 0
-
- if running_at_start:
- self.logger.info("Successfully terminated the mongod on port %d, exited with code"
- " %d.",
- self.port,
- exit_code)
-
- return success
-
- def is_running(self):
- return self.mongod is not None and self.mongod.poll() is None
-
- def get_connection_string(self):
- if self.mongod is None:
- raise ValueError("Must call setup() before calling get_connection_string()")
-
- return "localhost:%d" % self.port
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/hooks.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/hooks.py
deleted file mode 100644
index 4c580fa8392..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/hooks.py
+++ /dev/null
@@ -1,704 +0,0 @@
-"""
-Customize the behavior of a fixture by allowing special code to be
-executed before or after each test, and before or after each suite.
-"""
-
-from __future__ import absolute_import
-
-import os
-import sys
-
-import bson
-import pymongo
-
-from . import fixtures
-from . import testcases
-from .. import errors
-from .. import logging
-from .. import utils
-
-
-def make_custom_behavior(class_name, *args, **kwargs):
- """
- Factory function for creating CustomBehavior instances.
- """
-
- if class_name not in _CUSTOM_BEHAVIORS:
- raise ValueError("Unknown custom behavior class '%s'" % (class_name))
- return _CUSTOM_BEHAVIORS[class_name](*args, **kwargs)
-
-
-class CustomBehavior(object):
- """
- The common interface all CustomBehaviors will inherit from.
- """
-
- @staticmethod
- def start_dynamic_test(test_case, test_report):
- """
- If a CustomBehavior wants to add a test case that will show up
- in the test report, it should use this method to add it to the
- report, since we will need to count it as a dynamic test to get
- the stats in the summary information right.
- """
- test_report.startTest(test_case, dynamic=True)
-
- def __init__(self, logger, fixture):
- """
- Initializes the CustomBehavior with the specified fixture.
- """
-
- if not isinstance(logger, logging.Logger):
- raise TypeError("logger must be a Logger instance")
-
- self.logger = logger
- self.fixture = fixture
-
- def before_suite(self, test_report):
- """
- The test runner calls this exactly once before they start
- running the suite.
- """
- pass
-
- def after_suite(self, test_report):
- """
- The test runner calls this exactly once after all tests have
- finished executing. Be sure to reset the behavior back to its
- original state so that it can be run again.
- """
- pass
-
- def before_test(self, test_report):
- """
- Each test will call this before it executes.
-
- Raises a TestFailure if the test should be marked as a failure,
- or a ServerFailure if the fixture exits uncleanly or
- unexpectedly.
- """
- pass
-
- def after_test(self, test_report):
- """
- Each test will call this after it executes.
-
- Raises a TestFailure if the test should be marked as a failure,
- or a ServerFailure if the fixture exits uncleanly or
- unexpectedly.
- """
- pass
-
-
-class CleanEveryN(CustomBehavior):
- """
- Restarts the fixture after it has ran 'n' tests.
- On mongod-related fixtures, this will clear the dbpath.
- """
-
- DEFAULT_N = 20
-
- def __init__(self, logger, fixture, n=DEFAULT_N):
- CustomBehavior.__init__(self, logger, fixture)
-
- # Try to isolate what test triggers the leak by restarting the fixture each time.
- if "detect_leaks=1" in os.getenv("ASAN_OPTIONS", ""):
- self.logger.info("ASAN_OPTIONS environment variable set to detect leaks, so restarting"
- " the fixture after each test instead of after every %d.", n)
- n = 1
-
- self.n = n
- self.tests_run = 0
-
- def after_test(self, test_report):
- self.tests_run += 1
- if self.tests_run >= self.n:
- self.logger.info("%d tests have been run against the fixture, stopping it...",
- self.tests_run)
- self.tests_run = 0
-
- teardown_success = self.fixture.teardown()
- self.logger.info("Starting the fixture back up again...")
- self.fixture.setup()
- self.fixture.await_ready()
-
- # Raise this after calling setup in case --continueOnFailure was specified.
- if not teardown_success:
- raise errors.TestFailure("%s did not exit cleanly" % (self.fixture))
-
-
-class CheckReplDBHash(CustomBehavior):
- """
- Waits for replication after each test, then checks that the dbhahses
- of all databases other than "local" match on the primary and all of
- the secondaries. If any dbhashes do not match, logs information
- about what was different (e.g. Different numbers of collections,
- missing documents in a collection, mismatching documents, etc).
-
- Compatible only with ReplFixture subclasses.
- """
-
- def __init__(self, logger, fixture):
- if not isinstance(fixture, fixtures.ReplFixture):
- raise TypeError("%s does not support replication" % (fixture.__class__.__name__))
-
- CustomBehavior.__init__(self, logger, fixture)
-
- self.test_case = testcases.TestCase(self.logger, "Hook", "#dbhash#")
-
- self.started = False
-
- def after_test(self, test_report):
- """
- After each test, check that the dbhash of the test database is
- the same on all nodes in the replica set or master/slave
- fixture.
- """
-
- try:
- if not self.started:
- CustomBehavior.start_dynamic_test(self.test_case, test_report)
- self.started = True
-
- # Wait until all operations have replicated.
- self.fixture.await_repl()
-
- success = True
- sb = [] # String builder.
-
- primary = self.fixture.get_primary()
- primary_conn = utils.new_mongo_client(port=primary.port)
-
- for secondary in self.fixture.get_secondaries():
- read_preference = pymongo.ReadPreference.SECONDARY
- secondary_conn = utils.new_mongo_client(port=secondary.port,
- read_preference=read_preference)
- # Skip arbiters.
- if secondary_conn.admin.command("isMaster").get("arbiterOnly", False):
- continue
-
- all_matched = CheckReplDBHash._check_all_db_hashes(primary_conn,
- secondary_conn,
- sb)
- if not all_matched:
- sb.insert(0,
- "One or more databases were different between the primary on port %d"
- " and the secondary on port %d:"
- % (primary.port, secondary.port))
-
- success = all_matched and success
-
- if not success:
- # Adding failures to a TestReport requires traceback information, so we raise
- # a 'self.test_case.failureException' that we will catch ourselves.
- self.test_case.logger.info("\n ".join(sb))
- raise self.test_case.failureException("The dbhashes did not match")
- except self.test_case.failureException as err:
- self.test_case.logger.exception("The dbhashes did not match.")
- self.test_case.return_code = 1
- test_report.addFailure(self.test_case, sys.exc_info())
- test_report.stopTest(self.test_case)
- raise errors.ServerFailure(err.args[0])
- except pymongo.errors.WTimeoutError:
- self.test_case.logger.exception("Awaiting replication timed out.")
- self.test_case.return_code = 2
- test_report.addError(self.test_case, sys.exc_info())
- test_report.stopTest(self.test_case)
- raise errors.StopExecution("Awaiting replication timed out")
-
- def after_suite(self, test_report):
- """
- If we get to this point, the #dbhash# test must have been
- successful, so add it to the test report.
- """
-
- if self.started:
- self.test_case.logger.info("The dbhashes matched for all tests.")
- self.test_case.return_code = 0
- test_report.addSuccess(self.test_case)
- # TestReport.stopTest() has already been called if there was a failure.
- test_report.stopTest(self.test_case)
-
- self.started = False
-
- @staticmethod
- def _check_all_db_hashes(primary_conn, secondary_conn, sb):
- """
- Returns true if for each non-local database, the dbhash command
- returns the same MD5 hash on the primary as it does on the
- secondary. Returns false otherwise.
-
- Logs a message describing the differences if any database's
- dbhash did not match.
- """
-
- # Overview of how we'll check that everything replicated correctly between these two nodes:
- #
- # - Check whether they have the same databases.
- # - If not, log which databases are missing where, and dump the contents of any that are
- # missing.
- #
- # - Check whether each database besides "local" gives the same md5 field as the result of
- # running the dbhash command.
- # - If not, check whether they have the same collections.
- # - If not, log which collections are missing where, and dump the contents of any
- # that are missing.
- # - If so, check that the hash of each non-capped collection matches.
- # - If any do not match, log the diff of the collection between the two nodes.
-
- success = True
-
- if not CheckReplDBHash._check_dbs_present(primary_conn, secondary_conn, sb):
- return False
-
- for db_name in primary_conn.database_names():
- if db_name == "local":
- continue # We don't expect this to match across different nodes.
-
- matched = CheckReplDBHash._check_db_hash(primary_conn, secondary_conn, db_name, sb)
- success = matched and success
-
- return success
-
- @staticmethod
- def _check_dbs_present(primary_conn, secondary_conn, sb):
- """
- Returns true if the list of databases on the primary is
- identical to the list of databases on the secondary, and false
- otherwise.
- """
-
- success = True
- primary_dbs = primary_conn.database_names()
-
- # Can't run database_names() on secondary, so instead use the listDatabases command.
- # TODO: Use database_names() once PYTHON-921 is resolved.
- list_db_output = secondary_conn.admin.command("listDatabases")
- secondary_dbs = [db["name"] for db in list_db_output["databases"]]
-
- # There may be a difference in databases which is not considered an error, when
- # the database only contains system collections. This difference is only logged
- # when others are encountered, i.e., success = False.
- missing_on_primary, missing_on_secondary = CheckReplDBHash._check_difference(
- set(primary_dbs), set(secondary_dbs), "database")
-
- for missing_db in missing_on_secondary:
- db = primary_conn[missing_db]
- coll_names = db.collection_names()
- non_system_colls = [name for name in coll_names if not name.startswith("system.")]
-
- # It is only an error if there are any non-system collections in the database,
- # otherwise it's not well defined whether they should exist or not.
- if non_system_colls:
- sb.append("Database %s present on primary but not on secondary." % (missing_db))
- CheckReplDBHash._dump_all_collections(db, non_system_colls, sb)
- success = False
-
- for missing_db in missing_on_primary:
- db = secondary_conn[missing_db]
-
- # Can't run collection_names() on secondary, so instead use the listCollections command.
- # TODO: Always use collection_names() once PYTHON-921 is resolved. Then much of the
- # logic that is duplicated here can be consolidated.
- list_coll_output = db.command("listCollections")["cursor"]["firstBatch"]
- coll_names = [coll["name"] for coll in list_coll_output]
- non_system_colls = [name for name in coll_names if not name.startswith("system.")]
-
- # It is only an error if there are any non-system collections in the database,
- # otherwise it's not well defined if it should exist or not.
- if non_system_colls:
- sb.append("Database %s present on secondary but not on primary." % (missing_db))
- CheckReplDBHash._dump_all_collections(db, non_system_colls, sb)
- success = False
-
- return success
-
- @staticmethod
- def _check_db_hash(primary_conn, secondary_conn, db_name, sb):
- """
- Returns true if the dbhash for 'db_name' matches on the primary
- and the secondary, and false otherwise.
-
- Appends a message to 'sb' describing the differences if the
- dbhashes do not match.
- """
-
- primary_hash = primary_conn[db_name].command("dbhash")
- secondary_hash = secondary_conn[db_name].command("dbhash")
-
- if primary_hash["md5"] == secondary_hash["md5"]:
- return True
-
- success = CheckReplDBHash._check_dbs_eq(
- primary_conn, secondary_conn, primary_hash, secondary_hash, db_name, sb)
-
- if not success:
- sb.append("Database %s has a different hash on the primary and the secondary"
- " ([ %s ] != [ %s ]):"
- % (db_name, primary_hash["md5"], secondary_hash["md5"]))
-
- return success
-
- @staticmethod
- def _check_dbs_eq(primary_conn, secondary_conn, primary_hash, secondary_hash, db_name, sb):
- """
- Returns true if all non-capped collections had the same hash in
- the dbhash response, and false otherwise.
-
- Appends information to 'sb' about the differences between the
- 'db_name' database on the primary and the 'db_name' database on
- the secondary, if any.
- """
-
- success = True
-
- primary_db = primary_conn[db_name]
- secondary_db = secondary_conn[db_name]
-
- primary_coll_hashes = primary_hash["collections"]
- secondary_coll_hashes = secondary_hash["collections"]
-
- primary_coll_names = set(primary_coll_hashes.keys())
- secondary_coll_names = set(secondary_coll_hashes.keys())
-
- missing_on_primary, missing_on_secondary = CheckReplDBHash._check_difference(
- primary_coll_names, secondary_coll_names, "collection", sb=sb)
-
- if missing_on_primary or missing_on_secondary:
-
- # 'sb' already describes which collections are missing where.
- for coll_name in missing_on_primary:
- CheckReplDBHash._dump_all_documents(secondary_db, coll_name, sb)
- for coll_name in missing_on_secondary:
- CheckReplDBHash._dump_all_documents(primary_db, coll_name, sb)
- return
-
- for coll_name in primary_coll_names & secondary_coll_names:
- primary_coll_hash = primary_coll_hashes[coll_name]
- secondary_coll_hash = secondary_coll_hashes[coll_name]
-
- if primary_coll_hash == secondary_coll_hash:
- continue
-
- # Ignore capped collections because they are not expected to match on all nodes.
- if primary_db.command({"collStats": coll_name})["capped"]:
- # Still fail if the collection is not capped on the secondary.
- if not secondary_db.command({"collStats": coll_name})["capped"]:
- success = False
- sb.append("%s.%s collection is capped on primary but not on secondary."
- % (primary_db.name, coll_name))
- sb.append("%s.%s collection is capped, ignoring." % (primary_db.name, coll_name))
- continue
- # Still fail if the collection is capped on the secondary, but not on the primary.
- elif secondary_db.command({"collStats": coll_name})["capped"]:
- success = False
- sb.append("%s.%s collection is capped on secondary but not on primary."
- % (primary_db.name, coll_name))
- continue
-
- success = False
- sb.append("Collection %s.%s has a different hash on the primary and the secondary"
- " ([ %s ] != [ %s ]):"
- % (db_name, coll_name, primary_coll_hash, secondary_coll_hash))
- CheckReplDBHash._check_colls_eq(primary_db, secondary_db, coll_name, sb)
-
- if success:
- sb.append("All collections that were expected to match did.")
- return success
-
- @staticmethod
- def _check_colls_eq(primary_db, secondary_db, coll_name, sb):
- """
- Appends information to 'sb' about the differences or between
- the 'coll_name' collection on the primary and the 'coll_name'
- collection on the secondary, if any.
- """
-
- codec_options = bson.CodecOptions(document_class=TypeSensitiveSON)
-
- primary_coll = primary_db.get_collection(coll_name, codec_options=codec_options)
- secondary_coll = secondary_db.get_collection(coll_name, codec_options=codec_options)
-
- primary_docs = CheckReplDBHash._extract_documents(primary_coll)
- secondary_docs = CheckReplDBHash._extract_documents(secondary_coll)
-
- CheckReplDBHash._get_collection_diff(primary_docs, secondary_docs, sb)
-
- @staticmethod
- def _extract_documents(collection):
- """
- Returns a list of all documents in the collection, sorted by
- their _id.
- """
-
- return [doc for doc in collection.find().sort("_id", pymongo.ASCENDING)]
-
- @staticmethod
- def _get_collection_diff(primary_docs, secondary_docs, sb):
- """
- Returns true if the documents in 'primary_docs' exactly match
- the documents in 'secondary_docs', and false otherwise.
-
- Appends information to 'sb' about what matched or did not match.
- """
-
- matched = True
-
- # These need to be lists instead of sets because documents aren't hashable.
- missing_on_primary = []
- missing_on_secondary = []
-
- p_idx = 0 # Keep track of our position in 'primary_docs'.
- s_idx = 0 # Keep track of our position in 'secondary_docs'.
-
- while p_idx < len(primary_docs) and s_idx < len(secondary_docs):
- primary_doc = primary_docs[p_idx]
- secondary_doc = secondary_docs[s_idx]
-
- if primary_doc == secondary_doc:
- p_idx += 1
- s_idx += 1
- continue
-
- # We have mismatching documents.
- matched = False
-
- if primary_doc["_id"] == secondary_doc["_id"]:
- sb.append("Mismatching document:")
- sb.append(" primary: %s" % (primary_doc))
- sb.append(" secondary: %s" % (secondary_doc))
- p_idx += 1
- s_idx += 1
-
- # One node was missing a document. Since the documents are sorted by _id, the doc with
- # the smaller _id was the one that was skipped.
- elif primary_doc["_id"] < secondary_doc["_id"]:
- missing_on_secondary.append(primary_doc)
-
- # Only move past the doc that we know was skipped.
- p_idx += 1
-
- else: # primary_doc["_id"] > secondary_doc["_id"]
- missing_on_primary.append(secondary_doc)
-
- # Only move past the doc that we know was skipped.
- s_idx += 1
-
- # Check if there are any unmatched documents left.
- while p_idx < len(primary_docs):
- matched = False
- missing_on_secondary.append(primary_docs[p_idx])
- p_idx += 1
- while s_idx < len(secondary_docs):
- matched = False
- missing_on_primary.append(secondary_docs[s_idx])
- s_idx += 1
-
- if not matched:
- CheckReplDBHash._append_differences(
- missing_on_primary, missing_on_secondary, "document", sb)
- else:
- sb.append("All documents matched.")
-
- @staticmethod
- def _check_difference(primary_set, secondary_set, item_type_name, sb=None):
- """
- Returns true if the contents of 'primary_set' and
- 'secondary_set' are identical, and false otherwise. The sets
- contain information about the primary and secondary,
- respectively, e.g. the database names that exist on each node.
-
- Appends information about anything that differed to 'sb'.
- """
-
- missing_on_primary = set()
- missing_on_secondary = set()
-
- for item in primary_set - secondary_set:
- missing_on_secondary.add(item)
-
- for item in secondary_set - primary_set:
- missing_on_primary.add(item)
-
- if sb is not None:
- CheckReplDBHash._append_differences(
- missing_on_primary, missing_on_secondary, item_type_name, sb)
-
- return (missing_on_primary, missing_on_secondary)
-
- @staticmethod
- def _append_differences(missing_on_primary, missing_on_secondary, item_type_name, sb):
- """
- Given two iterables representing items that were missing on the
- primary or the secondary respectively, append the information
- about which items were missing to 'sb', if any.
- """
-
- if missing_on_primary:
- sb.append("The following %ss were present on the secondary, but not on the"
- " primary:" % (item_type_name))
- for item in missing_on_primary:
- sb.append(str(item))
-
- if missing_on_secondary:
- sb.append("The following %ss were present on the primary, but not on the"
- " secondary:" % (item_type_name))
- for item in missing_on_secondary:
- sb.append(str(item))
-
- @staticmethod
- def _dump_all_collections(database, coll_names, sb):
- """
- Appends the contents of each of the collections in 'coll_names'
- to 'sb'.
- """
-
- if coll_names:
- sb.append("Database %s contains the following collections: %s"
- % (database.name, coll_names))
- for coll_name in coll_names:
- CheckReplDBHash._dump_all_documents(database, coll_name, sb)
- else:
- sb.append("No collections in database %s." % (database.name))
-
- @staticmethod
- def _dump_all_documents(database, coll_name, sb):
- """
- Appends the contents of 'coll_name' to 'sb'.
- """
-
- docs = CheckReplDBHash._extract_documents(database[coll_name])
- if docs:
- sb.append("Documents in %s.%s:" % (database.name, coll_name))
- for doc in docs:
- sb.append(" %s" % (doc))
- else:
- sb.append("No documents in %s.%s." % (database.name, coll_name))
-
-class TypeSensitiveSON(bson.SON):
- """
- Extends bson.SON to perform additional type-checking of document values
- to differentiate BSON types.
- """
-
- def items_with_types(self):
- """
- Returns a list of triples. Each triple consists of a field name, a
- field value, and a field type for each field in the document.
- """
-
- return [(key, self[key], type(self[key])) for key in self]
-
- def __eq__(self, other):
- """
- Comparison to another TypeSensitiveSON is order-sensitive and
- type-sensitive while comparison to a regular dictionary ignores order
- and type mismatches.
- """
-
- if isinstance(other, TypeSensitiveSON):
- return (len(self) == len(other) and
- self.items_with_types() == other.items_with_types())
-
- raise TypeError("TypeSensitiveSON objects cannot be compared to other types")
-
-class ValidateCollections(CustomBehavior):
- """
- Runs full validation (db.collection.validate(true)) on all collections
- in all databases on every standalone, or primary mongod. If validation
- fails (validate.valid), then the validate return object is logged.
-
- Compatible with all subclasses.
- """
- DEFAULT_FULL = True
- DEFAULT_SCANDATA = True
-
- def __init__(self, logger, fixture, full=DEFAULT_FULL, scandata=DEFAULT_SCANDATA):
- CustomBehavior.__init__(self, logger, fixture)
-
- if not isinstance(full, bool):
- raise TypeError("Fixture option full is not specified as type bool")
-
- if not isinstance(scandata, bool):
- raise TypeError("Fixture option scandata is not specified as type bool")
-
- self.test_case = testcases.TestCase(self.logger, "Hook", "#validate#")
- self.started = False
- self.full = full
- self.scandata = scandata
-
- def after_test(self, test_report):
- """
- After each test, run a full validation on all collections.
- """
-
- try:
- if not self.started:
- CustomBehavior.start_dynamic_test(self.test_case, test_report)
- self.started = True
-
- sb = [] # String builder.
-
- # The self.fixture.port can be used for client connection to a
- # standalone mongod, a replica-set primary, or mongos.
- # TODO: Run collection validation on all nodes in a replica-set.
- port = self.fixture.port
- conn = utils.new_mongo_client(port=port)
-
- success = ValidateCollections._check_all_collections(
- conn, sb, self.full, self.scandata)
-
- if not success:
- # Adding failures to a TestReport requires traceback information, so we raise
- # a 'self.test_case.failureException' that we will catch ourselves.
- self.test_case.logger.info("\n ".join(sb))
- raise self.test_case.failureException("Collection validation failed")
- except self.test_case.failureException as err:
- self.test_case.logger.exception("Collection validation failed")
- self.test_case.return_code = 1
- test_report.addFailure(self.test_case, sys.exc_info())
- test_report.stopTest(self.test_case)
- raise errors.ServerFailure(err.args[0])
-
- def after_suite(self, test_report):
- """
- If we get to this point, the #validate# test must have been
- successful, so add it to the test report.
- """
-
- if self.started:
- self.test_case.logger.info("Collection validation passed for all tests.")
- self.test_case.return_code = 0
- test_report.addSuccess(self.test_case)
- # TestReport.stopTest() has already been called if there was a failure.
- test_report.stopTest(self.test_case)
-
- self.started = False
-
- @staticmethod
- def _check_all_collections(conn, sb, full, scandata):
- """
- Returns true if for all databases and collections validate_collection
- succeeds. Returns false otherwise.
-
- Logs a message if any database's collection fails validate_collection.
- """
-
- success = True
-
- for db_name in conn.database_names():
- for coll_name in conn[db_name].collection_names():
- try:
- conn[db_name].validate_collection(coll_name, full=full, scandata=scandata)
- except pymongo.errors.CollectionInvalid as err:
- sb.append("Database %s, collection %s failed to validate:\n%s"
- % (db_name, coll_name, err.args[0]))
- success = False
- return success
-
-
-_CUSTOM_BEHAVIORS = {
- "CleanEveryN": CleanEveryN,
- "CheckReplDBHash": CheckReplDBHash,
- "ValidateCollections": ValidateCollections,
-}
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/job.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/job.py
deleted file mode 100644
index bc5705ffdfb..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/job.py
+++ /dev/null
@@ -1,195 +0,0 @@
-"""
-Enables supports for running tests simultaneously by processing them
-from a multi-consumer queue.
-"""
-
-from __future__ import absolute_import
-
-import sys
-
-from .. import config
-from .. import errors
-from ..utils import queue as _queue
-
-
-class Job(object):
- """
- Runs tests from a queue.
- """
-
- def __init__(self, logger, fixture, hooks, report):
- """
- Initializes the job with the specified fixture and custom
- behaviors.
- """
-
- self.logger = logger
- self.fixture = fixture
- self.hooks = hooks
- self.report = report
-
- def __call__(self, queue, interrupt_flag):
- """
- Continuously executes tests from 'queue' and records their
- details in 'report'.
- """
-
- should_stop = False
- try:
- self._run(queue, interrupt_flag)
- except errors.StopExecution as err:
- # Stop running tests immediately.
- self.logger.error("Received a StopExecution exception: %s.", err)
- should_stop = True
- except:
- # Unknown error, stop execution.
- self.logger.exception("Encountered an error during test execution.")
- should_stop = True
-
- if should_stop:
- # Set the interrupt flag so that other jobs do not start running more tests.
- interrupt_flag.set()
- # Drain the queue to unblock the main thread.
- Job._drain_queue(queue)
-
- def _run(self, queue, interrupt_flag):
- """
- Calls the before/after suite hooks and continuously executes
- tests from 'queue'.
- """
-
- for hook in self.hooks:
- hook.before_suite(self.report)
-
- while not interrupt_flag.is_set():
- test = queue.get_nowait()
- try:
- if test is None:
- # Sentinel value received, so exit.
- break
- self._execute_test(test)
- finally:
- queue.task_done()
-
- for hook in self.hooks:
- hook.after_suite(self.report)
-
- def _execute_test(self, test):
- """
- Calls the before/after test hooks and executes 'test'.
- """
-
- test.configure(self.fixture)
- self._run_hooks_before_tests(test)
-
- test(self.report)
- if config.FAIL_FAST and not self.report.wasSuccessful():
- test.logger.info("%s failed, so stopping..." % (test.shortDescription()))
- raise errors.StopExecution("%s failed" % (test.shortDescription()))
-
- if not self.fixture.is_running():
- self.logger.error("%s marked as a failure because the fixture crashed during the test.",
- test.shortDescription())
- self.report.setFailure(test, return_code=2)
- # Always fail fast if the fixture fails.
- raise errors.StopExecution("%s not running after %s" %
- (self.fixture, test.shortDescription()))
-
- self._run_hooks_after_tests(test)
-
- def _run_hooks_before_tests(self, test):
- """
- Runs the before_test method on each of the hooks.
-
- Swallows any TestFailure exceptions if set to continue on
- failure, and reraises any other exceptions.
- """
-
- try:
- for hook in self.hooks:
- hook.before_test(self.report)
-
- except errors.StopExecution:
- raise
-
- except errors.ServerFailure:
- self.logger.exception("%s marked as a failure by a hook's before_test.",
- test.shortDescription())
- self._fail_test(test, sys.exc_info(), return_code=2)
- raise errors.StopExecution("A hook's before_test failed")
-
- except errors.TestFailure:
- self.logger.exception("%s marked as a failure by a hook's before_test.",
- test.shortDescription())
- self._fail_test(test, sys.exc_info(), return_code=1)
- if config.FAIL_FAST:
- raise errors.StopExecution("A hook's before_test failed")
-
- except:
- # Record the before_test() error in 'self.report'.
- self.report.startTest(test)
- self.report.addError(test, sys.exc_info())
- self.report.stopTest(test)
- raise
-
- def _run_hooks_after_tests(self, test):
- """
- Runs the after_test method on each of the hooks.
-
- Swallows any TestFailure exceptions if set to continue on
- failure, and reraises any other exceptions.
- """
- try:
- for hook in self.hooks:
- hook.after_test(self.report)
-
- except errors.StopExecution:
- raise
-
- except errors.ServerFailure:
- self.logger.exception("%s marked as a failure by a hook's after_test.",
- test.shortDescription())
- self.report.setFailure(test, return_code=2)
- raise errors.StopExecution("A hook's after_test failed")
-
- except errors.TestFailure:
- self.logger.exception("%s marked as a failure by a hook's after_test.",
- test.shortDescription())
- self.report.setFailure(test, return_code=1)
- if config.FAIL_FAST:
- raise errors.StopExecution("A hook's after_test failed")
-
- except:
- self.report.setError(test)
- raise
-
- def _fail_test(self, test, exc_info, return_code=1):
- """
- Helper to record a test as a failure with the provided return
- code.
-
- This method should not be used if 'test' has already been
- started, instead use TestReport.setFailure().
- """
-
- self.report.startTest(test)
- test.return_code = return_code
- self.report.addFailure(test, exc_info)
- self.report.stopTest(test)
-
- @staticmethod
- def _drain_queue(queue):
- """
- Removes all elements from 'queue' without actually doing
- anything to them. Necessary to unblock the main thread that is
- waiting for 'queue' to be empty.
- """
-
- try:
- while not queue.empty():
- queue.get_nowait()
- queue.task_done()
- except _queue.Empty:
- # Multiple threads may be draining the queue simultaneously, so just ignore the
- # exception from the race between queue.empty() being false and failing to get an item.
- pass
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/report.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/report.py
deleted file mode 100644
index 61468e1dd41..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/report.py
+++ /dev/null
@@ -1,330 +0,0 @@
-"""
-Extension to the unittest.TestResult to support additional test status
-and timing information for the report.json file.
-"""
-
-from __future__ import absolute_import
-
-import copy
-import time
-import unittest
-
-from .. import config
-from .. import logging
-
-
-class TestReport(unittest.TestResult):
- """
- Records test status and timing information.
- """
-
- def __init__(self, logger, logging_config, build_id=None, build_config=None):
- """
- Initializes the TestReport with the buildlogger configuration.
- """
-
- unittest.TestResult.__init__(self)
-
- self.logger = logger
- self.logging_config = logging_config
- self.build_id = build_id
- self.build_config = build_config
-
- self.reset()
-
- @classmethod
- def combine(cls, *reports):
- """
- Merges the results from multiple TestReport instances into one.
-
- If the same test is present in multiple reports, then one that
- failed or errored is more preferred over one that succeeded.
- This behavior is useful for when running multiple jobs that
- dynamically add a #dbhash# test case.
- """
-
- combined_report = cls(logging.loggers.EXECUTOR, {})
- combining_time = time.time()
-
- for report in reports:
- if not isinstance(report, TestReport):
- raise TypeError("reports must be a list of TestReport instances")
-
- for test_info in report.test_infos:
- # If the user triggers a KeyboardInterrupt exception while a test is running, then
- # it is possible for 'test_info' to be modified by a job thread later on. We make a
- # shallow copy in order to ensure 'num_failed' is consistent with the actual number
- # of tests that have status equal to "failed".
- test_info = copy.copy(test_info)
-
- # TestReport.addXX() may not have been called.
- if test_info.status is None or test_info.return_code is None:
- # Mark the test as having failed if it was interrupted. It might have passed if
- # the suite ran to completion, but we wouldn't know for sure.
- test_info.status = "fail"
- test_info.return_code = -2
-
- # TestReport.stopTest() may not have been called.
- if test_info.end_time is None:
- # Use the current time as the time that the test finished running.
- test_info.end_time = combining_time
-
- combined_report.test_infos.append(test_info)
-
- combined_report.num_dynamic += report.num_dynamic
-
- # Recompute number of success, failures, and errors.
- combined_report.num_succeeded = len(combined_report.get_successful())
- combined_report.num_failed = len(combined_report.get_failed())
- combined_report.num_errored = len(combined_report.get_errored())
-
- return combined_report
-
- def startTest(self, test, dynamic=False):
- """
- Called immediately before 'test' is run.
- """
-
- unittest.TestResult.startTest(self, test)
-
- test_info = _TestInfo(test.id(), dynamic)
- test_info.start_time = time.time()
- self.test_infos.append(test_info)
-
- basename = test.basename()
- if dynamic:
- command = "(dynamic test case)"
- self.num_dynamic += 1
- else:
- command = test.as_command()
- self.logger.info("Running %s...\n%s", basename, command)
-
- test_id = logging.buildlogger.new_test_id(self.build_id,
- self.build_config,
- basename,
- command)
-
- if self.build_id is not None:
- endpoint = logging.buildlogger.APPEND_TEST_LOGS_ENDPOINT % {
- "build_id": self.build_id,
- "test_id": test_id,
- }
-
- test_info.url_endpoint = "%s/%s/" % (config.BUILDLOGGER_URL.rstrip("/"),
- endpoint.strip("/"))
-
- self.logger.info("Writing output of %s to %s.",
- test.shortDescription(),
- test_info.url_endpoint)
-
- # Set up the test-specific logger.
- logger_name = "%s:%s" % (test.logger.name, test.short_name())
- logger = logging.loggers.new_logger(logger_name, parent=test.logger)
- logging.config.apply_buildlogger_test_handler(logger,
- self.logging_config,
- build_id=self.build_id,
- build_config=self.build_config,
- test_id=test_id)
-
- self.__original_loggers[test_info.test_id] = test.logger
- test.logger = logger
-
- def stopTest(self, test):
- """
- Called immediately after 'test' has run.
- """
-
- unittest.TestResult.stopTest(self, test)
-
- test_info = self._find_test_info(test)
- test_info.end_time = time.time()
-
- time_taken = test_info.end_time - test_info.start_time
- self.logger.info("%s ran in %0.2f seconds.", test.basename(), time_taken)
-
- # Asynchronously closes the buildlogger test handler to avoid having too many threads open
- # on 32-bit systems.
- logging.flush.close_later(test.logger)
-
- # Restore the original logger for the test.
- test.logger = self.__original_loggers.pop(test.id())
-
- def addError(self, test, err):
- """
- Called when a non-failureException was raised during the
- execution of 'test'.
- """
-
- unittest.TestResult.addError(self, test, err)
- self.num_errored += 1
-
- test_info = self._find_test_info(test)
- test_info.status = "error"
- test_info.return_code = test.return_code
-
- def setError(self, test):
- """
- Used to change the outcome of an existing test to an error.
- """
-
- test_info = self._find_test_info(test)
- if test_info.end_time is None:
- raise ValueError("stopTest was not called on %s" % (test.basename()))
-
- test_info.status = "error"
- test_info.return_code = 2
-
- # Recompute number of success, failures, and errors.
- self.num_succeeded = len(self.get_successful())
- self.num_failed = len(self.get_failed())
- self.num_errored = len(self.get_errored())
-
- def addFailure(self, test, err):
- """
- Called when a failureException was raised during the execution
- of 'test'.
- """
-
- unittest.TestResult.addFailure(self, test, err)
- self.num_failed += 1
-
- test_info = self._find_test_info(test)
- test_info.status = "fail"
- test_info.return_code = test.return_code
-
- def setFailure(self, test, return_code=1):
- """
- Used to change the outcome of an existing test to a failure.
- """
-
- test_info = self._find_test_info(test)
- if test_info.end_time is None:
- raise ValueError("stopTest was not called on %s" % (test.basename()))
-
- test_info.status = "fail"
- test_info.return_code = return_code
-
- # Recompute number of success, failures, and errors.
- self.num_succeeded = len(self.get_successful())
- self.num_failed = len(self.get_failed())
- self.num_errored = len(self.get_errored())
-
- def addSuccess(self, test):
- """
- Called when 'test' executed successfully.
- """
-
- unittest.TestResult.addSuccess(self, test)
- self.num_succeeded += 1
-
- test_info = self._find_test_info(test)
- test_info.status = "pass"
- test_info.return_code = test.return_code
-
- def wasSuccessful(self):
- """
- Returns true if all tests executed successfully.
- """
- return self.num_failed == self.num_errored == 0
-
- def get_successful(self):
- """
- Returns the status and timing information of the tests that
- executed successfully.
- """
- return [test_info for test_info in self.test_infos if test_info.status == "pass"]
-
- def get_failed(self):
- """
- Returns the status and timing information of the tests that
- raised a failureException during their execution.
- """
- return [test_info for test_info in self.test_infos if test_info.status == "fail"]
-
- def get_errored(self):
- """
- Returns the status and timing information of the tests that
- raised a non-failureException during their execution.
- """
- return [test_info for test_info in self.test_infos if test_info.status == "error"]
-
- def as_dict(self):
- """
- Return the test result information as a dictionary.
-
- Used to create the report.json file.
- """
-
- results = []
- for test_info in self.test_infos:
- # Don't distinguish between failures and errors.
- status = "pass" if test_info.status == "pass" else "fail"
-
- result = {
- "test_file": test_info.test_id,
- "status": status,
- "exit_code": test_info.return_code,
- "start": test_info.start_time,
- "end": test_info.end_time,
- "elapsed": test_info.end_time - test_info.start_time,
- }
-
- if test_info.url_endpoint is not None:
- result["url"] = test_info.url_endpoint
-
- results.append(result)
-
- return {
- "results": results,
- "failures": self.num_failed + self.num_errored,
- }
-
- def reset(self):
- """
- Resets the test report back to its initial state.
- """
-
- self.test_infos = []
-
- self.num_dynamic = 0
- self.num_succeeded = 0
- self.num_failed = 0
- self.num_errored = 0
-
- self.__original_loggers = {}
-
- def _find_test_info(self, test):
- """
- Returns the status and timing information associated with
- 'test'.
- """
-
- test_id = test.id()
-
- # Search the list backwards to efficiently find the status and timing information of a test
- # that was recently started.
- for test_info in reversed(self.test_infos):
- if test_info.test_id == test_id:
- return test_info
-
- raise ValueError("Details for %s not found in the report" % (test.basename()))
-
-
-class _TestInfo(object):
- """
- Holder for the test status and timing information.
- """
-
- def __init__(self, test_id, dynamic):
- """
- Initializes the _TestInfo instance.
- """
-
- self.test_id = test_id
- self.dynamic = dynamic
-
- self.start_time = None
- self.end_time = None
- self.status = None
- self.return_code = None
- self.url_endpoint = None
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/suite.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/suite.py
deleted file mode 100644
index 65503b85e8b..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/suite.py
+++ /dev/null
@@ -1,140 +0,0 @@
-"""
-Holder for a set of TestGroup instances.
-"""
-
-from __future__ import absolute_import
-
-import time
-
-from . import summary as _summary
-from . import testgroup
-from .. import selector as _selector
-
-
-class Suite(object):
- """
- A suite of tests.
- """
-
- TESTS_ORDER = ("cpp_unit_test", "cpp_integration_test", "db_test", "js_test", "mongos_test")
-
- def __init__(self, suite_name, suite_config):
- """
- Initializes the suite with the specified name and configuration.
- """
-
- self._suite_name = suite_name
- self._suite_config = suite_config
-
- self.test_groups = []
- for test_kind in Suite.TESTS_ORDER:
- if test_kind not in suite_config["selector"]:
- continue
- tests = self._get_tests_for_group(test_kind)
- test_group = testgroup.TestGroup(test_kind, tests)
- self.test_groups.append(test_group)
-
- self.return_code = None
-
- self._start_time = None
- self._end_time = None
-
- def _get_tests_for_group(self, test_kind):
- """
- Returns the tests to run based on the 'test_kind'-specific
- filtering policy.
- """
-
- test_info = self.get_selector_config()[test_kind]
-
- # The mongos_test doesn't have to filter anything, the test_info is just the arguments to
- # the mongos program to be used as the test case.
- if test_kind == "mongos_test":
- mongos_options = test_info # Just for easier reading.
- if not isinstance(mongos_options, dict):
- raise TypeError("Expected dictionary of arguments to mongos")
- return [mongos_options]
- elif test_kind == "cpp_integration_test":
- tests = _selector.filter_cpp_integration_tests(**test_info)
- elif test_kind == "cpp_unit_test":
- tests = _selector.filter_cpp_unit_tests(**test_info)
- elif test_kind == "db_test":
- tests = _selector.filter_dbtests(**test_info)
- else: # test_kind == "js_test":
- tests = _selector.filter_jstests(**test_info)
-
- return sorted(tests, key=str.lower)
-
- def get_name(self):
- """
- Returns the name of the test suite.
- """
- return self._suite_name
-
- def get_selector_config(self):
- """
- Returns the "selector" section of the YAML configuration.
- """
- return self._suite_config["selector"]
-
- def get_executor_config(self):
- """
- Returns the "executor" section of the YAML configuration.
- """
- return self._suite_config["executor"]
-
- def record_start(self):
- """
- Records the start time of the suite.
- """
- self._start_time = time.time()
-
- def record_end(self):
- """
- Records the end time of the suite.
-
- Sets the 'return_code' of the suite based on the record codes of
- each of the individual test groups.
- """
-
- self._end_time = time.time()
-
- # Only set 'return_code' if it hasn't been set already. It may have been set if there was
- # an exception that happened during the execution of the suite.
- if self.return_code is None:
- # The return code of the suite should be 2 if any test group has a return code of 2.
- # The return code of the suite should be 1 if any test group has a return code of 1,
- # and none have a return code of 2. Otherwise, the return code should be 0.
- self.return_code = max(test_group.return_code for test_group in self.test_groups)
-
- def summarize(self, sb):
- """
- Appends a summary of each individual test group onto the string
- builder 'sb'.
- """
-
- combined_summary = _summary.Summary(0, 0.0, 0, 0, 0, 0)
-
- summarized_groups = []
- for group in self.test_groups:
- group_sb = []
- summary = group.summarize(group_sb)
- summarized_groups.append(" %ss: %s" % (group.test_kind, "\n ".join(group_sb)))
-
- combined_summary = _summary.combine(combined_summary, summary)
-
- if combined_summary.num_run == 0:
- sb.append("Suite did not run any tests.")
- return
-
- # Override the 'time_taken' attribute of the summary if we have more accurate timing
- # information available.
- if self._start_time is not None and self._end_time is not None:
- time_taken = self._end_time - self._start_time
- combined_summary = combined_summary._replace(time_taken=time_taken)
-
- sb.append("%d test(s) ran in %0.2f seconds"
- " (%d succeeded, %d were skipped, %d failed, %d errored)" % combined_summary)
-
- for summary_text in summarized_groups:
- sb.append(summary_text)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/summary.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/summary.py
deleted file mode 100644
index 1dae9ca81d6..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/summary.py
+++ /dev/null
@@ -1,22 +0,0 @@
-"""
-Holder for summary information about a test group or suite.
-"""
-
-from __future__ import absolute_import
-
-import collections
-
-
-
-Summary = collections.namedtuple("Summary", ["num_run", "time_taken", "num_succeeded",
- "num_skipped", "num_failed", "num_errored"])
-
-
-def combine(summary1, summary2):
- """
- Returns a summary representing the sum of 'summary1' and 'summary2'.
- """
- args = []
- for i in xrange(len(Summary._fields)):
- args.append(summary1[i] + summary2[i])
- return Summary._make(args)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/testcases.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/testcases.py
deleted file mode 100644
index 3b068c3b80f..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/testcases.py
+++ /dev/null
@@ -1,407 +0,0 @@
-"""
-Subclasses of unittest.TestCase.
-"""
-
-from __future__ import absolute_import
-
-import os
-import os.path
-import shutil
-import unittest
-
-from .. import config
-from .. import core
-from .. import logging
-from .. import utils
-
-
-def make_test_case(test_kind, *args, **kwargs):
- """
- Factory function for creating TestCase instances.
- """
-
- if test_kind not in _TEST_CASES:
- raise ValueError("Unknown test kind '%s'" % (test_kind))
- return _TEST_CASES[test_kind](*args, **kwargs)
-
-
-class TestCase(unittest.TestCase):
- """
- A test case to execute.
- """
-
- def __init__(self, logger, test_kind, test_name):
- """
- Initializes the TestCase with the name of the test.
- """
-
- unittest.TestCase.__init__(self, methodName="run_test")
-
- if not isinstance(logger, logging.Logger):
- raise TypeError("logger must be a Logger instance")
-
- if not isinstance(test_kind, basestring):
- raise TypeError("test_kind must be a string")
-
- if not isinstance(test_name, basestring):
- raise TypeError("test_name must be a string")
-
- self.logger = logger
- self.test_kind = test_kind
- self.test_name = test_name
-
- self.fixture = None
- self.return_code = None
-
- def long_name(self):
- """
- Returns the path to the test, relative to the current working directory.
- """
- return os.path.relpath(self.test_name)
-
- def basename(self):
- """
- Returns the basename of the test.
- """
- return os.path.basename(self.test_name)
-
- def short_name(self):
- """
- Returns the basename of the test without the file extension.
- """
- return os.path.splitext(self.basename())[0]
-
- def id(self):
- return self.test_name
-
- def shortDescription(self):
- return "%s %s" % (self.test_kind, self.test_name)
-
- def configure(self, fixture):
- """
- Stores 'fixture' as an attribute for later use during execution.
- """
- self.fixture = fixture
-
- def run_test(self):
- """
- Runs the specified test.
- """
- raise NotImplementedError("run_test must be implemented by TestCase subclasses")
-
- def as_command(self):
- """
- Returns the command invocation used to run the test.
- """
- return self._make_process().as_command()
-
- def _execute(self, process):
- """
- Runs the specified process.
- """
-
- self.logger.info("Starting %s...\n%s", self.shortDescription(), process.as_command())
- process.start()
- self.logger.info("%s started with pid %s.", self.shortDescription(), process.pid)
-
- self.return_code = process.wait()
- if self.return_code != 0:
- raise self.failureException("%s failed" % (self.shortDescription()))
-
- self.logger.info("%s finished.", self.shortDescription())
-
- def _make_process(self):
- """
- Returns a new Process instance that could be used to run the
- test or log the command.
- """
- raise NotImplementedError("_make_process must be implemented by TestCase subclasses")
-
-
-class CPPUnitTestCase(TestCase):
- """
- A C++ unit test to execute.
- """
-
- def __init__(self,
- logger,
- program_executable,
- program_options=None):
- """
- Initializes the CPPUnitTestCase with the executable to run.
- """
-
- TestCase.__init__(self, logger, "Program", program_executable)
-
- self.program_executable = program_executable
- self.program_options = utils.default_if_none(program_options, {}).copy()
-
- def run_test(self):
- try:
- program = self._make_process()
- self._execute(program)
- except self.failureException:
- raise
- except:
- self.logger.exception("Encountered an error running C++ unit test %s.", self.basename())
- raise
-
- def _make_process(self):
- return core.process.Process(self.logger,
- [self.program_executable],
- **self.program_options)
-
-
-class CPPIntegrationTestCase(TestCase):
- """
- A C++ integration test to execute.
- """
-
- def __init__(self,
- logger,
- program_executable,
- program_options=None):
- """
- Initializes the CPPIntegrationTestCase with the executable to run.
- """
-
- TestCase.__init__(self, logger, "Program", program_executable)
-
- self.program_executable = program_executable
- self.program_options = utils.default_if_none(program_options, {}).copy()
-
- def configure(self, fixture):
- TestCase.configure(self, fixture)
-
- self.program_options["connectionString"] = self.fixture.get_connection_string()
-
- def run_test(self):
- try:
- program = self._make_process()
- self._execute(program)
- except self.failureException:
- raise
- except:
- self.logger.exception("Encountered an error running C++ integration test %s.",
- self.basename())
- raise
-
- def _make_process(self):
- return core.programs.generic_program(self.logger,
- [self.program_executable],
- **self.program_options)
-
-
-class DBTestCase(TestCase):
- """
- A dbtest to execute.
- """
-
- def __init__(self,
- logger,
- dbtest_suite,
- dbtest_executable=None,
- dbtest_options=None):
- """
- Initializes the DBTestCase with the dbtest suite to run.
- """
-
- TestCase.__init__(self, logger, "DBTest", dbtest_suite)
-
- # Command line options override the YAML configuration.
- self.dbtest_executable = utils.default_if_none(config.DBTEST_EXECUTABLE, dbtest_executable)
-
- self.dbtest_suite = dbtest_suite
- self.dbtest_options = utils.default_if_none(dbtest_options, {}).copy()
-
- def configure(self, fixture):
- TestCase.configure(self, fixture)
-
- # If a dbpath was specified, then use it as a container for all other dbpaths.
- dbpath_prefix = self.dbtest_options.pop("dbpath", DBTestCase._get_dbpath_prefix())
- dbpath = os.path.join(dbpath_prefix, "job%d" % (self.fixture.job_num), "unittest")
- self.dbtest_options["dbpath"] = dbpath
-
- shutil.rmtree(dbpath, ignore_errors=True)
-
- try:
- os.makedirs(dbpath)
- except os.error:
- # Directory already exists.
- pass
-
- def run_test(self):
- try:
- dbtest = self._make_process()
- self._execute(dbtest)
- except self.failureException:
- raise
- except:
- self.logger.exception("Encountered an error running dbtest suite %s.", self.basename())
- raise
-
- def _make_process(self):
- return core.programs.dbtest_program(self.logger,
- executable=self.dbtest_executable,
- suites=[self.dbtest_suite],
- **self.dbtest_options)
-
- @staticmethod
- def _get_dbpath_prefix():
- """
- Returns the prefix of the dbpath to use for the dbtest
- executable.
-
- Order of preference:
- 1. The --dbpathPrefix specified at the command line.
- 2. Value of the TMPDIR environment variable.
- 3. Value of the TEMP environment variable.
- 4. Value of the TMP environment variable.
- 5. The /tmp directory.
- """
-
- if config.DBPATH_PREFIX is not None:
- return config.DBPATH_PREFIX
-
- for env_var in ("TMPDIR", "TEMP", "TMP"):
- if env_var in os.environ:
- return os.environ[env_var]
- return os.path.normpath("/tmp")
-
-
-class JSTestCase(TestCase):
- """
- A jstest to execute.
- """
-
- def __init__(self,
- logger,
- js_filename,
- shell_executable=None,
- shell_options=None):
- "Initializes the JSTestCase with the JS file to run."
-
- TestCase.__init__(self, logger, "JSTest", js_filename)
-
- # Command line options override the YAML configuration.
- self.shell_executable = utils.default_if_none(config.MONGO_EXECUTABLE, shell_executable)
-
- self.js_filename = js_filename
- self.shell_options = utils.default_if_none(shell_options, {}).copy()
-
- def configure(self, fixture):
- TestCase.configure(self, fixture)
-
- if self.fixture.port is not None:
- self.shell_options["port"] = self.fixture.port
-
- global_vars = self.shell_options.get("global_vars", {}).copy()
- data_dir = self._get_data_dir(global_vars)
-
- # Set MongoRunner.dataPath if overridden at command line or not specified in YAML.
- if config.DBPATH_PREFIX is not None or "MongoRunner.dataPath" not in global_vars:
- # dataPath property is the dataDir property with a trailing slash.
- data_path = os.path.join(data_dir, "")
- else:
- data_path = global_vars["MongoRunner.dataPath"]
-
- global_vars["MongoRunner.dataDir"] = data_dir
- global_vars["MongoRunner.dataPath"] = data_path
-
- test_data = global_vars.get("TestData", {}).copy()
- test_data["minPort"] = core.network.PortAllocator.min_test_port(fixture.job_num)
- test_data["maxPort"] = core.network.PortAllocator.max_test_port(fixture.job_num)
-
- global_vars["TestData"] = test_data
- self.shell_options["global_vars"] = global_vars
-
- shutil.rmtree(data_dir, ignore_errors=True)
-
- try:
- os.makedirs(data_dir)
- except os.error:
- # Directory already exists.
- pass
-
- def _get_data_dir(self, global_vars):
- """
- Returns the value that the mongo shell should set for the
- MongoRunner.dataDir property.
- """
-
- # Command line options override the YAML configuration.
- data_dir_prefix = utils.default_if_none(config.DBPATH_PREFIX,
- global_vars.get("MongoRunner.dataDir"))
- data_dir_prefix = utils.default_if_none(data_dir_prefix, config.DEFAULT_DBPATH_PREFIX)
- return os.path.join(data_dir_prefix,
- "job%d" % (self.fixture.job_num),
- config.MONGO_RUNNER_SUBDIR)
-
- def run_test(self):
- try:
- shell = self._make_process()
- self._execute(shell)
- except self.failureException:
- raise
- except:
- self.logger.exception("Encountered an error running jstest %s.", self.basename())
- raise
-
- def _make_process(self):
- return core.programs.mongo_shell_program(self.logger,
- executable=self.shell_executable,
- filename=self.js_filename,
- **self.shell_options)
-
-
-class MongosTestCase(TestCase):
- """
- A TestCase which runs a mongos binary with the given parameters.
- """
-
- def __init__(self,
- logger,
- mongos_options):
- """
- Initializes the mongos test and saves the options.
- """
-
- self.mongos_executable = utils.default_if_none(config.MONGOS_EXECUTABLE,
- config.DEFAULT_MONGOS_EXECUTABLE)
- # Use the executable as the test name.
- TestCase.__init__(self, logger, "mongos", self.mongos_executable)
- self.options = mongos_options.copy()
-
- def configure(self, fixture):
- """
- Ensures the --test option is present in the mongos options.
- """
-
- TestCase.configure(self, fixture)
- # Always specify test option to ensure the mongos will terminate.
- if "test" not in self.options:
- self.options["test"] = ""
-
- def run_test(self):
- try:
- mongos = self._make_process()
- self._execute(mongos)
- except self.failureException:
- raise
- except:
- self.logger.exception("Encountered an error running %s.", mongos.as_command())
- raise
-
- def _make_process(self):
- return core.programs.mongos_program(self.logger,
- executable=self.mongos_executable,
- **self.options)
-
-
-_TEST_CASES = {
- "cpp_unit_test": CPPUnitTestCase,
- "cpp_integration_test": CPPIntegrationTestCase,
- "db_test": DBTestCase,
- "js_test": JSTestCase,
- "mongos_test": MongosTestCase,
-}
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/testgroup.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/testgroup.py
deleted file mode 100644
index 688d56c296d..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/testing/testgroup.py
+++ /dev/null
@@ -1,132 +0,0 @@
-"""
-Holder for the (test kind, list of tests) pair with additional metadata
-about when and how they execute.
-"""
-
-from __future__ import absolute_import
-
-import time
-
-from . import summary as _summary
-
-
-class TestGroup(object):
- """
- A class to encapsulate the results of running a group of tests
- of a particular kind (e.g. C++ unit tests, dbtests, jstests).
- """
-
- def __init__(self, test_kind, tests):
- """
- Initializes the TestGroup with a list of tests.
- """
-
- self.test_kind = test_kind
- self.tests = tests
-
- self.return_code = None # Set by the executor.
-
- self._start_times = []
- self._end_times = []
- self._reports = []
-
- def get_reports(self):
- """
- Returns the list of reports.
- """
- return self._reports
-
- def record_start(self):
- """
- Records the start time of an execution.
- """
- self._start_times.append(time.time())
-
- def record_end(self, report):
- """
- Records the end time of an execution.
- """
- self._end_times.append(time.time())
- self._reports.append(report)
-
- def summarize_latest(self, sb):
- """
- Returns a summary of the latest execution of the group and appends a
- summary of that execution onto the string builder 'sb'.
- """
- return self._summarize_execution(-1, sb)
-
- def summarize(self, sb):
- """
- Returns a summary of the execution(s) of the group and appends a
- summary of the execution(s) onto the string builder 'sb'.
- """
-
- if not self._reports:
- sb.append("No tests ran.")
- return _summary.Summary(0, 0.0, 0, 0, 0, 0)
-
- if len(self._reports) == 1:
- return self._summarize_execution(0, sb)
-
- return self._summarize_repeated(sb)
-
- def _summarize_repeated(self, sb):
- """
- Returns the summary information of all executions and appends
- each execution's summary onto the string builder 'sb'. Also
- appends information of how many repetitions there were.
- """
-
- num_iterations = len(self._reports)
- total_time_taken = self._end_times[-1] - self._start_times[0]
- sb.append("Executed %d times in %0.2f seconds:" % (num_iterations, total_time_taken))
-
- combined_summary = _summary.Summary(0, 0.0, 0, 0, 0, 0)
- for iteration in xrange(num_iterations):
- # Summarize each execution as a bulleted list of results.
- bulleter_sb = []
- summary = self._summarize_execution(iteration, bulleter_sb)
- combined_summary = _summary.combine(combined_summary, summary)
-
- for (i, line) in enumerate(bulleter_sb):
- # Only bullet first line, indent others.
- prefix = "* " if i == 0 else " "
- sb.append(prefix + line)
-
- return combined_summary
-
- def _summarize_execution(self, iteration, sb):
- """
- Returns the summary information of the execution given by
- 'iteration' and appends a summary of that execution onto the
- string builder 'sb'.
- """
-
- report = self._reports[iteration]
- time_taken = self._end_times[iteration] - self._start_times[iteration]
-
- num_run = report.num_succeeded + report.num_errored + report.num_failed
- num_skipped = len(self.tests) + report.num_dynamic - num_run
-
- if report.num_succeeded == num_run and num_skipped == 0:
- sb.append("All %d test(s) passed in %0.2f seconds." % (num_run, time_taken))
- return _summary.Summary(num_run, time_taken, num_run, 0, 0, 0)
-
- summary = _summary.Summary(num_run, time_taken, report.num_succeeded, num_skipped,
- report.num_failed, report.num_errored)
-
- sb.append("%d test(s) ran in %0.2f seconds"
- " (%d succeeded, %d were skipped, %d failed, %d errored)" % summary)
-
- if report.num_failed > 0:
- sb.append("The following tests failed (with exit code):")
- for test_info in report.get_failed():
- sb.append(" %s (%d)" % (test_info.test_id, test_info.return_code))
-
- if report.num_errored > 0:
- sb.append("The following tests had errors:")
- for test_info in report.get_errored():
- sb.append(" %s" % (test_info.test_id))
-
- return summary
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/__init__.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/__init__.py
deleted file mode 100644
index df387cc3323..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/__init__.py
+++ /dev/null
@@ -1,88 +0,0 @@
-"""
-Helper functions.
-"""
-
-from __future__ import absolute_import
-
-import os.path
-
-import pymongo
-import yaml
-
-
-def default_if_none(value, default):
- return value if value is not None else default
-
-
-def is_string_list(lst):
- """
- Returns true if 'lst' is a list of strings, and false otherwise.
- """
- return isinstance(lst, list) and all(isinstance(x, basestring) for x in lst)
-
-
-def is_string_set(value):
- """
- Returns true if 'value' is a set of strings, and false otherwise.
- """
- return isinstance(value, set) and all(isinstance(x, basestring) for x in value)
-
-
-def is_js_file(filename):
- """
- Returns true if 'filename' ends in .js, and false otherwise.
- """
- return os.path.splitext(filename)[1] == ".js"
-
-
-def is_yaml_file(filename):
- """
- Returns true if 'filename' ends in .yml or .yaml, and false
- otherwise.
- """
- return os.path.splitext(filename)[1] in (".yaml", ".yml")
-
-
-def load_yaml_file(filename):
- """
- Attempts to read 'filename' as YAML.
- """
- try:
- with open(filename, "r") as fp:
- return yaml.safe_load(fp)
- except yaml.YAMLError as err:
- raise ValueError("File '%s' contained invalid YAML: %s" % (filename, err))
-
-
-def dump_yaml(value):
- """
- Returns 'value' formatted as YAML.
- """
- # Use block (indented) style for formatting YAML.
- return yaml.safe_dump(value, default_flow_style=False).rstrip()
-
-def load_yaml(value):
- """
- Attempts to parse 'value' as YAML.
- """
- try:
- return yaml.safe_load(value)
- except yaml.YAMLError as err:
- raise ValueError("Attempted to parse invalid YAML value '%s': %s" % (value, err))
-
-
-def new_mongo_client(port, read_preference=pymongo.ReadPreference.PRIMARY, timeout_millis=30000):
- """
- Returns a pymongo.MongoClient connected on 'port' with a read
- preference of 'read_preference'.
-
- The PyMongo driver will wait up to 'timeout_millis' milliseconds
- before concluding that the server is unavailable.
- """
-
- kwargs = {"connectTimeoutMS": timeout_millis}
- if pymongo.version_tuple[0] >= 3:
- kwargs["serverSelectionTimeoutMS"] = timeout_millis
- kwargs["connect"] = True
-
- return pymongo.MongoClient(port=port, read_preference=read_preference, **kwargs)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/globstar.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/globstar.py
deleted file mode 100644
index 644ebfe3e38..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/globstar.py
+++ /dev/null
@@ -1,202 +0,0 @@
-"""
-Filename globbing utility.
-"""
-
-from __future__ import absolute_import
-
-import glob as _glob
-import os
-import os.path
-import re
-
-
-_GLOBSTAR = "**"
-_CONTAINS_GLOB_PATTERN = re.compile("[*?[]")
-
-
-def is_glob_pattern(s):
- """
- Returns true if 's' represents a glob pattern, and false otherwise.
- """
-
- # Copied from glob.has_magic().
- return _CONTAINS_GLOB_PATTERN.search(s) is not None
-
-
-def glob(globbed_pathname):
- """
- Return a list of pathnames matching the 'globbed_pathname' pattern.
-
- In addition to containing simple shell-style wildcards a la fnmatch,
- the pattern may also contain globstars ("**"), which is recursively
- expanded to match zero or more subdirectories.
- """
-
- return list(iglob(globbed_pathname))
-
-
-def iglob(globbed_pathname):
- """
- Emit a list of pathnames matching the 'globbed_pathname' pattern.
-
- In addition to containing simple shell-style wildcards a la fnmatch,
- the pattern may also contain globstars ("**"), which is recursively
- expanded to match zero or more subdirectories.
- """
-
- parts = _split_path(globbed_pathname)
- parts = _canonicalize(parts)
-
- index = _find_globstar(parts)
- if index == -1:
- for pathname in _glob.iglob(globbed_pathname):
- # Normalize 'pathname' so exact string comparison can be used later.
- yield os.path.normpath(pathname)
- return
-
- # **, **/, or **/a
- if index == 0:
- expand = _expand_curdir
-
- # a/** or a/**/ or a/**/b
- else:
- expand = _expand
-
- prefix_parts = parts[:index]
- suffix_parts = parts[index + 1:]
-
- prefix = os.path.join(*prefix_parts) if prefix_parts else os.curdir
- suffix = os.path.join(*suffix_parts) if suffix_parts else ""
-
- for (kind, path) in expand(prefix):
- if not suffix_parts:
- yield path
-
- # Avoid following symlinks to avoid an infinite loop
- elif suffix_parts and kind == "dir" and not os.path.islink(path):
- path = os.path.join(path, suffix)
- for pathname in iglob(path):
- yield pathname
-
-
-def _split_path(pathname):
- """
- Return 'pathname' as a list of path components.
- """
-
- parts = []
-
- while True:
- (dirname, basename) = os.path.split(pathname)
- parts.append(basename)
- if pathname == dirname:
- parts.append(dirname)
- break
- if not dirname:
- break
- pathname = dirname
-
- parts.reverse()
- return parts
-
-
-def _canonicalize(parts):
- """
- Return a copy of 'parts' with consecutive "**"s coalesced.
- Raise a ValueError for unsupported uses of "**".
- """
-
- res = []
-
- prev_was_globstar = False
- for p in parts:
- if p == _GLOBSTAR:
- # Skip consecutive **'s
- if not prev_was_globstar:
- prev_was_globstar = True
- res.append(p)
- elif _GLOBSTAR in p: # a/b**/c or a/**b/c
- raise ValueError("Can only specify glob patterns of the form a/**/b")
- else:
- prev_was_globstar = False
- res.append(p)
-
- return res
-
-
-def _find_globstar(parts):
- """
- Return the index of the first occurrence of "**" in 'parts'.
- Return -1 if "**" is not found in the list.
- """
-
- for (i, p) in enumerate(parts):
- if p == _GLOBSTAR:
- return i
- return -1
-
-
-def _list_dir(pathname):
- """
- Return a pair of the subdirectory names and filenames immediately
- contained within the 'pathname' directory.
-
- If 'pathname' does not exist, then None is returned.
- """
-
- try:
- (_root, dirs, files) = os.walk(pathname).next()
- return (dirs, files)
- except StopIteration:
- return None # 'pathname' directory does not exist
-
-
-def _expand(pathname):
- """
- Emit tuples of the form ("dir", dirname) and ("file", filename)
- of all directories and files contained within the 'pathname' directory.
- """
-
- res = _list_dir(pathname)
- if res is None:
- return
-
- (dirs, files) = res
-
- # Zero expansion
- if os.path.basename(pathname):
- yield ("dir", os.path.join(pathname, ""))
-
- for f in files:
- path = os.path.join(pathname, f)
- yield ("file", path)
-
- for d in dirs:
- path = os.path.join(pathname, d)
- for x in _expand(path):
- yield x
-
-
-def _expand_curdir(pathname):
- """
- Emit tuples of the form ("dir", dirname) and ("file", filename)
- of all directories and files contained within the 'pathname' directory.
-
- The returned pathnames omit a "./" prefix.
- """
-
- res = _list_dir(pathname)
- if res is None:
- return
-
- (dirs, files) = res
-
- # Zero expansion
- yield ("dir", "")
-
- for f in files:
- yield ("file", f)
-
- for d in dirs:
- for x in _expand(d):
- yield x
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/jscomment.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/jscomment.py
deleted file mode 100644
index 18da7885820..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/jscomment.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
-Utility for parsing JS comments.
-"""
-
-from __future__ import absolute_import
-
-import re
-
-import yaml
-
-
-# TODO: use a more robust regular expression for matching tags
-_JSTEST_TAGS_RE = re.compile(r".*@tags\s*:\s*(\[[^\]]*\])", re.DOTALL)
-
-
-def get_tags(pathname):
- """
- Returns the list of tags found in the (JS-style) comments of
- 'pathname'. The definition can span multiple lines, use unquoted,
- single-quoted, or double-quoted strings, and use the '#' character
- for inline commenting.
-
- e.g.
-
- /**
- * @tags: [ "tag1", # double quoted
- * 'tag2' # single quoted
- * # line with only a comment
- * , tag3 # no quotes
- * tag4, # trailing comma
- * ]
- */
- """
-
- with open(pathname) as fp:
- match = _JSTEST_TAGS_RE.match(fp.read())
- if match:
- try:
- # TODO: it might be worth supporting the block (indented) style of YAML lists in
- # addition to the flow (bracketed) style
- tags = yaml.safe_load(_strip_jscomments(match.group(1)))
- if not isinstance(tags, list) and all(isinstance(tag, basestring) for tag in tags):
- raise TypeError("Expected a list of string tags, but got '%s'" % (tags))
- return tags
- except yaml.YAMLError as err:
- raise ValueError("File '%s' contained invalid tags (expected YAML): %s"
- % (pathname, err))
-
- return []
-
-
-def _strip_jscomments(s):
- """
- Given a string 's' that represents the contents after the "@tags:"
- annotation in the JS file, this function returns a string that can
- be converted to YAML.
-
- e.g.
-
- [ "tag1", # double quoted
- * 'tag2' # single quoted
- * # line with only a comment
- * , tag3 # no quotes
- * tag4, # trailing comma
- * ]
-
- If the //-style JS comments were used, then the example remains the,
- same except with the '*' character is replaced by '//'.
- """
-
- yaml_lines = []
-
- for line in s.splitlines():
- # Remove leading whitespace and symbols that commonly appear in JS comments.
- line = line.lstrip("\t ").lstrip("*/")
- yaml_lines.append(line)
-
- return "\n".join(yaml_lines)
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/queue.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/queue.py
deleted file mode 100644
index 80da5e2cc66..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/queue.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""
-Extension to the Queue.Queue class.
-
-Added support for the join() method to take a timeout. This is necessary
-in order for KeyboardInterrupt exceptions to get propagated.
-
-See https://bugs.python.org/issue1167930 for more details.
-"""
-
-from __future__ import absolute_import
-
-import Queue
-import time
-
-
-# Exception that is raised when get_nowait() is called on an empty Queue.
-Empty = Queue.Empty
-
-
-class Queue(Queue.Queue):
- """
- A multi-producer, multi-consumer queue.
- """
-
- def join(self, timeout=None):
- """
- Wait until all items in the queue have been retrieved and processed,
- or until 'timeout' seconds have passed.
-
- The count of unfinished tasks is incremented whenever an item is added
- to the queue. The count is decremented whenever task_done() is called
- to indicate that all work on the retrieved item was completed.
-
- When the number of unfinished tasks reaches zero, True is returned.
- If the number of unfinished tasks remains nonzero after 'timeout'
- seconds have passed, then False is returned.
- """
- with self.all_tasks_done:
- if timeout is None:
- while self.unfinished_tasks:
- self.all_tasks_done.wait()
- elif timeout < 0:
- raise ValueError("timeout must be a nonnegative number")
- else:
- # Pass timeout down to lock acquisition
- deadline = time.time() + timeout
- while self.unfinished_tasks:
- remaining = deadline - time.time()
- if remaining <= 0.0:
- return False
- self.all_tasks_done.wait(remaining)
- return True
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/timer.py b/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/timer.py
deleted file mode 100644
index 80531d5db5c..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/resmokelib/utils/timer.py
+++ /dev/null
@@ -1,125 +0,0 @@
-"""
-Alternative to the threading.Timer class.
-
-Enables a timer to be restarted without needing to construct a new thread
-each time. This is necessary to execute periodic actions, e.g. flushing
-log messages to buildlogger, while avoiding errors related to "can't start
-new thread" that would otherwise occur on Windows.
-"""
-
-from __future__ import absolute_import
-
-import threading
-
-
-class AlarmClock(threading.Thread):
- """
- Calls a function after a specified number of seconds.
- """
-
- def __init__(self, interval, func, args=None, kwargs=None):
- """
- Initializes the timer with a function to periodically execute.
- """
-
- threading.Thread.__init__(self)
-
- # A non-dismissed timer should not prevent the program from exiting
- self.daemon = True
-
- self.interval = interval
- self.func = func
- self.args = args if args is not None else []
- self.kwargs = kwargs if kwargs is not None else {}
-
- self.lock = threading.Lock()
- self.cond = threading.Condition(self.lock)
-
- self.snoozed = False # canceled for one execution
- self.dismissed = False # canceled for all time
- self.restarted = False
-
- def dismiss(self):
- """
- Disables the timer.
- """
-
- with self.lock:
- self.dismissed = True
- self.cond.notify_all()
-
- self.join() # Tidy up the started thread.
-
- cancel = dismiss # Expose API compatible with that of threading.Timer.
-
- def snooze(self):
- """
- Skips the next execution of 'func' if it has not already started.
- """
-
- with self.lock:
- if self.dismissed:
- raise ValueError("Timer cannot be snoozed if it has been dismissed")
-
- self.snoozed = True
- self.restarted = False
- self.cond.notify_all()
-
- def reset(self):
- """
- Restarts the timer, causing it to wait 'interval' seconds before calling
- 'func' again.
- """
-
- with self.lock:
- if self.dismissed:
- raise ValueError("Timer cannot be reset if it has been dismissed")
-
- if not self.snoozed:
- raise ValueError("Timer cannot be reset if it has not been snoozed")
-
- self.restarted = True
- self.cond.notify_all()
-
- def run(self):
- """
- Repeatedly calls 'func' with a delay of 'interval' seconds between executions.
-
- If the timer is snoozed before 'func' is called, then it waits to be reset.
- After it has been reset, the timer will again wait 'interval' seconds and
- then try to call 'func'.
-
- If the timer is dismissed, then no subsequent executions of 'func' are made.
- """
-
- while True:
- with self.lock:
- if self.dismissed:
- return
-
- # Wait for the specified amount of time.
- self.cond.wait(self.interval)
-
- if self.dismissed:
- return
-
- # If the timer was snoozed, then it should wait to be reset.
- if self.snoozed:
- while not self.restarted:
- self.cond.wait()
-
- if self.dismissed:
- return
-
- self.restarted = False
- self.snoozed = False
- continue
-
- # Execute the function after the lock has been released to prevent potential deadlocks
- # with the invoked function.
- self.func(*self.args, **self.kwargs)
-
- # Reacquire the lock.
- with self.lock:
- # Ignore snoozes that took place while the function was being executed.
- self.snoozed = False
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/setup_multiversion_mongodb.py b/src/mongo/gotools/test/qa-tests/buildscripts/setup_multiversion_mongodb.py
deleted file mode 100755
index df6adc64ac2..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/setup_multiversion_mongodb.py
+++ /dev/null
@@ -1,291 +0,0 @@
-#!/usr/bin/python
-
-import re
-import sys
-import os
-import tempfile
-import urllib2
-import subprocess
-import tarfile
-import zipfile
-import shutil
-import errno
-# To ensure it exists on the system
-import gzip
-import argparse
-
-#
-# Useful script for installing multiple versions of MongoDB on a machine
-# Only really tested/works on Linux.
-#
-
-def version_tuple(version):
- """Returns a version tuple that can be used for numeric sorting
- of version strings such as '2.6.0-rc1' and '2.4.0'"""
-
- RC_OFFSET = -100
- version_parts = re.split(r'\.|-', version[0])
-
- if version_parts[-1].startswith("rc"):
- rc_part = version_parts.pop()
- rc_part = rc_part.split('rc')[1]
-
- # RC versions are weighted down to allow future RCs and general
- # releases to be sorted in ascending order (e.g., 2.6.0-rc1,
- # 2.6.0-rc2, 2.6.0).
- version_parts.append(int(rc_part) + RC_OFFSET)
- else:
- # Non-RC releases have an extra 0 appended so version tuples like
- # (2, 6, 0, -100) and (2, 6, 0, 0) sort in ascending order.
- version_parts.append(0)
-
- return tuple(map(int, version_parts))
-
-class MultiVersionDownloaderBase :
-
- def download_version(self, version):
-
- try:
- os.makedirs(self.install_dir)
- except OSError as exc:
- if exc.errno == errno.EEXIST and os.path.isdir(self.install_dir):
- pass
- else: raise
-
- url, full_version = self.gen_url(version)
-
- # this extracts the filename portion of the URL, without the extension.
- # for example: ttp://downloads.mongodb.org/osx/mongodb-osx-x86_64-2.4.12.tgz
- # extract_dir will become mongodb-osx-x86_64-2.4.12
- extract_dir = url.split("/")[-1][:-4]
-
- # only download if we don't already have the directory
- already_downloaded = os.path.isdir(os.path.join( self.install_dir, extract_dir))
- if already_downloaded:
- print "Skipping download for version %s (%s) since the dest already exists '%s'" \
- % (version, full_version, extract_dir)
- else:
- temp_dir = tempfile.mkdtemp()
- temp_file = tempfile.mktemp(suffix=".tgz")
-
- data = urllib2.urlopen(url)
-
- print "Downloading data for version %s (%s) from %s..." % (version, full_version, url)
-
- with open(temp_file, 'wb') as f:
- f.write(data.read())
- print "Uncompressing data for version %s (%s)..." % (version, full_version)
-
- try:
- tf = tarfile.open(temp_file, 'r:gz')
- tf.extractall(path=temp_dir)
- tf.close()
- except:
- # support for windows
- zfile = zipfile.ZipFile(temp_file)
- try:
- if not os.path.exists(temp_dir):
- os.makedirs(temp_dir)
- for name in zfile.namelist():
- _, filename = os.path.split(name)
- print "Decompressing " + filename + " on " + temp_dir
- zfile.extract(name, temp_dir)
- except:
- zfile.close()
- raise
- zfile.close()
- temp_install_dir = os.path.join(temp_dir, extract_dir)
- try:
- os.stat(temp_install_dir)
- except:
- dir = os.listdir(temp_dir)
- # TODO confirm that there is one and only one directory entry
- os.rename(os.path.join(temp_dir,dir[0]),temp_install_dir)
- shutil.move(temp_install_dir, self.install_dir)
- shutil.rmtree(temp_dir)
- try:
- os.remove(temp_file)
- except Exception as e:
- print e
- pass
- self.symlink_version(version, os.path.abspath(os.path.join(self.install_dir, extract_dir)))
-
- def symlink_version(self, version, installed_dir):
-
- try:
- os.makedirs(self.link_dir)
- except OSError as exc:
- if exc.errno == errno.EEXIST and os.path.isdir(self.link_dir):
- pass
- else: raise
-
- for executable in os.listdir(os.path.join(installed_dir, "bin")):
- link_name = "%s-%s" % (executable, version)
- # support for windows
- if executable.endswith(".exe") or executable.endswith(".pdb"):
- link_name = "%s-%s.%s" % (executable[:-4], version, executable[len(executable)-3:])
-
- try:
- os.symlink(os.path.join(installed_dir, "bin", executable),\
- os.path.join(self.link_dir, link_name))
- except Exception as exc:
- try:
- # support for windows
- shutil.copy2(os.path.join(installed_dir, "bin", executable),\
- os.path.join(self.link_dir, link_name))
- except:
- if exc.errno == errno.EEXIST:
- pass
- else:
- raise
-
-class MultiVersionDownloader(MultiVersionDownloaderBase) :
-
- def __init__(self, install_dir, link_dir, platform):
- self.install_dir = install_dir
- self.link_dir = link_dir
- match = re.compile("(.*)\/(.*)").match(platform)
- self.platform = match.group(1)
- self.arch = match.group(2)
- self._links = None
-
- @property
- def links(self):
- if self._links is None:
- self._links = self.download_links()
- return self._links
-
- def gen_url(self, version):
- urls = []
- for link_version, link_url in self.links.iteritems():
- if link_version.startswith(version):
- # If we have a "-" in our version, exact match only
- if version.find("-") >= 0:
- if link_version != version: continue
- elif link_version.find("-") >= 0:
- continue
-
- urls.append((link_version, link_url))
-
- if len(urls) == 0:
- raise Exception("Cannot find a link for version %s, versions %s found." \
- % (version, self.links))
-
- urls.sort(key=version_tuple)
- full_version = urls[-1][0]
- url = urls[-1][1]
- return url, full_version
-
- def download_links(self):
- href = "http://dl.mongodb.org/dl/%s/%s" \
- % (self.platform, self.arch)
-
- html = urllib2.urlopen(href).read()
- links = {}
- for line in html.split():
- match = None
- for ext in ["tgz", "zip"]:
- match = re.compile("http:\/\/downloads\.mongodb\.org\/%s/mongodb-%s-%s-([^\"]*)\.%s" \
- % (self.platform, self.platform, self.arch, ext)).search(line)
- if match != None:
- break
-
- if match == None:
- continue
- link = match.group(0)
- version = match.group(1)
- links[version] = link
-
- return links
-
-
-class LatestMultiVersionDownloader(MultiVersionDownloaderBase) :
-
- def __init__(self, install_dir, link_dir, platform, use_ssl, os):
- self.install_dir = install_dir
- self.link_dir = link_dir
- match = re.compile("(.*)\/(.*)").match(platform)
- self.platform = match.group(1)
- self.arch = match.group(2)
- self._links = None
- self.use_ssl = use_ssl
- self.os = os
-
- def gen_url(self, version):
- ext = "tgz"
- if "win" in self.platform:
- ext = "zip"
- if self.use_ssl:
- if version == "2.4":
- enterprise_string = "subscription"
- else:
- enterprise_string = "enterprise"
- full_version = self.os + "-v" + version + "-latest"
- url = "http://downloads.10gen.com/%s/mongodb-%s-%s-%s-%s.%s" % ( self.platform, self.platform, self.arch, enterprise_string, full_version, ext )
- else:
- full_version = "v" + version + "-latest"
- url = "http://downloads.mongodb.org/%s/mongodb-%s-%s-%s.%s" % ( self.platform, self.platform, self.arch, full_version, ext )
- return url, full_version
-
-CL_HELP_MESSAGE = \
-"""
-Downloads and installs particular mongodb versions (each binary is renamed to include its version)
-into an install directory and symlinks the binaries with versions to another directory.
-
-Usage: setup_multiversion_mongodb.py INSTALL_DIR LINK_DIR PLATFORM_AND_ARCH VERSION1 [VERSION2 VERSION3 ...]
-
-Ex: setup_multiversion_mongodb.py ./install ./link "Linux/x86_64" "2.0.6" "2.0.3-rc0" "2.0" "2.2" "2.3"
-Ex: setup_multiversion_mongodb.py ./install ./link "OSX/x86_64" "2.4" "2.2"
-
-After running the script you will have a directory structure like this:
-./install/[mongodb-osx-x86_64-2.4.9, mongodb-osx-x86_64-2.2.7]
-./link/[mongod-2.4.9, mongod-2.2.7, mongo-2.4.9...]
-
-You should then add ./link/ to your path so multi-version tests will work.
-
-Note: If "rc" is included in the version name, we'll use the exact rc, otherwise we'll pull the highest non-rc
-version compatible with the version specified.
-"""
-
-def parse_cl_args(args):
-
- parser = argparse.ArgumentParser(description=CL_HELP_MESSAGE)
-
- def raise_exception(msg):
- print CL_HELP_MESSAGE
- raise Exception(msg)
-
- parser.add_argument('install_dir', action="store" )
- parser.add_argument('link_dir', action="store" )
- parser.add_argument('platform_and_arch', action="store" )
- parser.add_argument('--latest', action="store_true" )
- parser.add_argument('--use-ssl', action="store_true" )
- parser.add_argument('--os', action="store" )
- parser.add_argument('version', action="store", nargs="+" )
-
- args = parser.parse_args()
-
- if re.compile(".*\/.*").match(args.platform_and_arch) == None:
- raise_exception("PLATFORM_AND_ARCH isn't of the correct format")
-
- if args.latest:
- if not args.os:
- raise_exception("using --use-ssl requires an --os parameter")
- return (LatestMultiVersionDownloader(args.install_dir, args.link_dir, args.platform_and_arch, args.use_ssl, args.os), args.version)
- else:
- if args.use_ssl:
- raise_exception("you can only use --use-ssl when using --latest")
- return (MultiVersionDownloader(args.install_dir, args.link_dir, args.platform_and_arch), args.version)
-
-def main():
-
- downloader, versions = parse_cl_args(sys.argv[1:])
-
- for version in versions:
- downloader.download_version(version)
-
-
-
-if __name__ == '__main__':
- main()
-
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/smoke.py b/src/mongo/gotools/test/qa-tests/buildscripts/smoke.py
deleted file mode 100755
index bbeec4b12b4..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/smoke.py
+++ /dev/null
@@ -1,1451 +0,0 @@
-#!/usr/bin/env python
-
-# smoke.py: run some mongo tests.
-
-# Bugs, TODOs:
-
-# 0 Some tests hard-code pathnames relative to the mongo repository,
-# so the smoke.py process and all its children must be run with the
-# mongo repo as current working directory. That's kinda icky.
-
-# 1 The tests that are implemented as standalone executables ("test"),
-# don't take arguments for the dbpath, but unconditionally use
-# "/tmp/unittest".
-
-# 2 mongod output gets intermingled with mongo output, and it's often
-# hard to find error messages in the slop. Maybe have smoke.py do
-# some fancier wrangling of child process output?
-
-# 3 Some test suites run their own mongods, and so don't need us to
-# run any mongods around their execution. (It's harmless to do so,
-# but adds noise in the output.)
-
-# 4 Running a separate mongo shell for each js file is slower than
-# loading js files into one mongo shell process. Maybe have runTest
-# queue up all filenames ending in ".js" and run them in one mongo
-# shell at the "end" of testing?
-
-# 5 Right now small-oplog implies master/slave replication. Maybe
-# running with replication should be an orthogonal concern. (And
-# maybe test replica set replication, too.)
-
-# 6 We use cleanbb.py to clear out the dbpath, but cleanbb.py kills
-# off all mongods on a box, which means you can't run two smoke.py
-# jobs on the same host at once. So something's gotta change.
-
-from datetime import datetime
-from itertools import izip
-import glob
-from optparse import OptionParser
-import os
-import pprint
-import re
-import shlex
-import signal
-import socket
-import stat
-from subprocess import (PIPE, Popen, STDOUT)
-import sys
-import time
-import threading
-import traceback
-
-from pymongo import MongoClient
-from pymongo.errors import OperationFailure
-from pymongo import ReadPreference
-
-import cleanbb
-import smoke
-import utils
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-
-try:
- from hashlib import md5 # new in 2.5
-except ImportError:
- from md5 import md5 # deprecated in 2.5
-
-try:
- import json
-except:
- try:
- import simplejson as json
- except:
- json = None
-
-
-# TODO clean this up so we don't need globals...
-mongo_repo = os.getcwd() #'./'
-failfile = os.path.join(mongo_repo, 'failfile.smoke')
-test_path = None
-mongod_executable = None
-mongod_port = None
-shell_executable = None
-continue_on_failure = None
-file_of_commands_mode = False
-start_mongod = True
-temp_path = None
-clean_every_n_tests = 1
-clean_whole_dbroot = False
-
-tests = []
-winners = []
-losers = {}
-fails = [] # like losers but in format of tests
-
-# For replication hash checking
-replicated_collections = []
-lost_in_slave = []
-lost_in_master = []
-screwy_in_slave = {}
-
-smoke_db_prefix = ''
-small_oplog = False
-small_oplog_rs = False
-
-test_report = { "results": [] }
-report_file = None
-
-# This class just implements the with statement API
-class NullMongod(object):
- def start(self):
- pass
-
- def stop(self):
- pass
-
- def __enter__(self):
- self.start()
- return self
-
- def __exit__(self, type, value, traceback):
- self.stop()
- return not isinstance(value, Exception)
-
-
-def dump_stacks(signal, frame):
- print "======================================"
- print "DUMPING STACKS due to SIGUSR1 signal"
- print "======================================"
- threads = threading.enumerate();
-
- print "Total Threads: " + str(len(threads))
-
- for id, stack in sys._current_frames().items():
- print "Thread %d" % (id)
- print "".join(traceback.format_stack(stack))
- print "======================================"
-
-
-def buildlogger(cmd, is_global=False):
- # if the environment variable MONGO_USE_BUILDLOGGER
- # is set to 'true', then wrap the command with a call
- # to buildlogger.py, which sends output to the buidlogger
- # machine; otherwise, return as usual.
- if os.environ.get('MONGO_USE_BUILDLOGGER', '').lower().strip() == 'true':
- if is_global:
- return [utils.find_python(), 'buildscripts/buildlogger.py', '-g'] + cmd
- else:
- return [utils.find_python(), 'buildscripts/buildlogger.py'] + cmd
- return cmd
-
-
-def clean_dbroot(dbroot="", nokill=False):
- # Clean entire /data/db dir if --with-cleanbb, else clean specific database path.
- if clean_whole_dbroot and not (small_oplog or small_oplog_rs):
- dbroot = os.path.normpath(smoke_db_prefix + "/data/db")
- if os.path.exists(dbroot):
- print("clean_dbroot: %s" % dbroot)
- cleanbb.cleanup(dbroot, nokill)
-
-
-class mongod(NullMongod):
- def __init__(self, **kwargs):
- self.kwargs = kwargs
- self.proc = None
- self.auth = False
-
- def ensure_test_dirs(self):
- utils.ensureDir(smoke_db_prefix + "/tmp/unittest/")
- utils.ensureDir(smoke_db_prefix + "/data/")
- utils.ensureDir(smoke_db_prefix + "/data/db/")
-
- def check_mongo_port(self, port=27017):
- sock = socket.socket()
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- sock.settimeout(1)
- sock.connect(("localhost", int(port)))
- sock.close()
-
- def is_mongod_up(self, port=mongod_port):
- if not start_mongod:
- return False
- try:
- self.check_mongo_port(int(port))
- return True
- except Exception,e:
- print >> sys.stderr, e
- return False
-
- def did_mongod_start(self, port=mongod_port, timeout=300):
- while timeout > 0:
- time.sleep(1)
- is_up = self.is_mongod_up(port)
- if is_up:
- return True
- timeout = timeout - 1
- print >> sys.stderr, "timeout starting mongod"
- return False
-
- def start(self):
- global mongod_port
- global mongod
- if self.proc:
- print >> sys.stderr, "probable bug: self.proc already set in start()"
- return
- self.ensure_test_dirs()
- dir_name = smoke_db_prefix + "/data/db/sconsTests/"
- self.port = int(mongod_port)
- self.slave = False
- if 'slave' in self.kwargs:
- dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/'
- srcport = mongod_port
- self.port += 1
- self.slave = True
-
- clean_dbroot(dbroot=dir_name, nokill=self.slave)
- utils.ensureDir(dir_name)
-
- argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name]
- # These parameters are always set for tests
- # SERVER-9137 Added httpinterface parameter to keep previous behavior
- argv += ['--setParameter', 'enableTestCommands=1', '--httpinterface']
- if self.kwargs.get('small_oplog'):
- if self.slave:
- argv += ['--slave', '--source', 'localhost:' + str(srcport)]
- else:
- argv += ["--master", "--oplogSize", "511"]
- if self.kwargs.get('storage_engine'):
- argv += ["--storageEngine", self.kwargs.get('storage_engine')]
- if self.kwargs.get('wiredtiger_engine_config_string'):
- argv += ["--wiredTigerEngineConfigString", self.kwargs.get('wiredtiger_engine_config_string')]
- if self.kwargs.get('wiredtiger_collection_config_string'):
- argv += ["--wiredTigerCollectionConfigString", self.kwargs.get('wiredtiger_collection_config_string')]
- if self.kwargs.get('wiredtiger_index_config_string'):
- argv += ["--wiredTigerIndexConfigString", self.kwargs.get('wiredtiger_index_config_string')]
- params = self.kwargs.get('set_parameters', None)
- if params:
- for p in params.split(','): argv += ['--setParameter', p]
- if self.kwargs.get('small_oplog_rs'):
- argv += ["--replSet", "foo", "--oplogSize", "511"]
- if self.kwargs.get('no_journal'):
- argv += ['--nojournal']
- if self.kwargs.get('no_preallocj'):
- argv += ['--nopreallocj']
- if self.kwargs.get('auth'):
- argv += ['--auth', '--setParameter', 'enableLocalhostAuthBypass=false']
- authMechanism = self.kwargs.get('authMechanism', 'SCRAM-SHA-1')
- if authMechanism != 'SCRAM-SHA-1':
- argv += ['--setParameter', 'authenticationMechanisms=' + authMechanism]
- self.auth = True
- if self.kwargs.get('keyFile'):
- argv += ['--keyFile', self.kwargs.get('keyFile')]
- if self.kwargs.get('use_ssl') or self.kwargs.get('use_x509'):
- argv += ['--sslMode', "requireSSL",
- '--sslPEMKeyFile', 'jstests/libs/server.pem',
- '--sslCAFile', 'jstests/libs/ca.pem',
- '--sslWeakCertificateValidation']
- if self.kwargs.get('use_x509'):
- argv += ['--clusterAuthMode','x509'];
- self.auth = True
- print "running " + " ".join(argv)
- self.proc = self._start(buildlogger(argv, is_global=True))
-
- if not self.did_mongod_start(self.port):
- raise Exception("Failed to start mongod")
-
- if self.slave:
- local = MongoClient(port=self.port,
- read_preference=ReadPreference.SECONDARY_PREFERRED).local
- synced = False
- while not synced:
- synced = True
- for source in local.sources.find({}, ["syncedTo"]):
- synced = synced and "syncedTo" in source and source["syncedTo"]
-
- def _start(self, argv):
- """In most cases, just call subprocess.Popen(). On windows,
- add the started process to a new Job Object, so that any
- child processes of this process can be killed with a single
- call to TerminateJobObject (see self.stop()).
- """
-
- if os.sys.platform == "win32":
- # Create a job object with the "kill on job close"
- # flag; this is inherited by child processes (ie
- # the mongod started on our behalf by buildlogger)
- # and lets us terminate the whole tree of processes
- # rather than orphaning the mongod.
- import win32job
-
- # Magic number needed to allow job reassignment in Windows 7
- # see: MSDN - Process Creation Flags - ms684863
- CREATE_BREAKAWAY_FROM_JOB = 0x01000000
-
- proc = Popen(argv, creationflags=CREATE_BREAKAWAY_FROM_JOB)
-
- self.job_object = win32job.CreateJobObject(None, '')
-
- job_info = win32job.QueryInformationJobObject(
- self.job_object, win32job.JobObjectExtendedLimitInformation)
- job_info['BasicLimitInformation']['LimitFlags'] |= win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
- win32job.SetInformationJobObject(
- self.job_object,
- win32job.JobObjectExtendedLimitInformation,
- job_info)
-
- win32job.AssignProcessToJobObject(self.job_object, proc._handle)
-
- else:
- proc = Popen(argv)
-
- return proc
-
- def stop(self):
- if not self.proc:
- print >> sys.stderr, "probable bug: self.proc unset in stop()"
- return
- try:
- if os.sys.platform == "win32":
- import win32job
- win32job.TerminateJobObject(self.job_object, -1)
- # Windows doesn't seem to kill the process immediately, so give it some time to die
- time.sleep(5)
- elif hasattr(self.proc, "terminate"):
- # This method added in Python 2.6
- self.proc.terminate()
- else:
- os.kill(self.proc.pid, 15)
- except Exception, e:
- print >> sys.stderr, "error shutting down mongod"
- print >> sys.stderr, e
- self.proc.wait()
- sys.stderr.flush()
- sys.stdout.flush()
-
- # Fail hard if mongod terminates with an error. That might indicate that an
- # instrumented build (e.g. LSAN) has detected an error. For now we aren't doing this on
- # windows because the exit code seems to be unpredictable. We don't have LSAN there
- # anyway.
- retcode = self.proc.returncode
- if os.sys.platform != "win32" and retcode != 0:
- raise(Exception('mongod process exited with non-zero code %d' % retcode))
-
- def wait_for_repl(self):
- print "Awaiting replicated (w:2, wtimeout:5min) insert (port:" + str(self.port) + ")"
- MongoClient(port=self.port).testing.smokeWait.insert({}, w=2, wtimeout=5*60*1000)
- print "Replicated write completed -- done wait_for_repl"
-
-class Bug(Exception):
- def __str__(self):
- return 'bug in smoke.py: ' + super(Bug, self).__str__()
-
-class TestFailure(Exception):
- pass
-
-class TestExitFailure(TestFailure):
- def __init__(self, *args):
- self.path = args[0]
- self.status=args[1]
-
- def __str__(self):
- return "test %s exited with status %d" % (self.path, self.status)
-
-class TestServerFailure(TestFailure):
- def __init__(self, *args):
- self.path = args[0]
- self.status = -1 # this is meaningless as an exit code, but
- # that's the point.
- def __str__(self):
- return 'mongod not running after executing test %s' % self.path
-
-def check_db_hashes(master, slave):
- # Need to pause a bit so a slave might catch up...
- if not slave.slave:
- raise(Bug("slave instance doesn't have slave attribute set"))
-
- master.wait_for_repl()
-
- # FIXME: maybe make this run dbhash on all databases?
- for mongod in [master, slave]:
- client = MongoClient(port=mongod.port, read_preference=ReadPreference.SECONDARY_PREFERRED)
- mongod.dbhash = client.test.command("dbhash")
- mongod.dict = mongod.dbhash["collections"]
-
- global lost_in_slave, lost_in_master, screwy_in_slave, replicated_collections
-
- replicated_collections += master.dict.keys()
-
- for coll in replicated_collections:
- if coll not in slave.dict and coll not in lost_in_slave:
- lost_in_slave.append(coll)
- mhash = master.dict[coll]
- shash = slave.dict[coll]
- if mhash != shash:
- mTestDB = MongoClient(port=master.port).test
- sTestDB = MongoClient(port=slave.port,
- read_preference=ReadPreference.SECONDARY_PREFERRED).test
- mCount = mTestDB[coll].count()
- sCount = sTestDB[coll].count()
- stats = {'hashes': {'master': mhash, 'slave': shash},
- 'counts':{'master': mCount, 'slave': sCount}}
- try:
- mDocs = list(mTestDB[coll].find().sort("_id", 1))
- sDocs = list(sTestDB[coll].find().sort("_id", 1))
- mDiffDocs = list()
- sDiffDocs = list()
- for left, right in izip(mDocs, sDocs):
- if left != right:
- mDiffDocs.append(left)
- sDiffDocs.append(right)
-
- stats["docs"] = {'master': mDiffDocs, 'slave': sDiffDocs }
- except Exception, e:
- stats["error-docs"] = e;
-
- screwy_in_slave[coll] = stats
- if mhash == "no _id _index":
- mOplog = mTestDB.connection.local["oplog.$main"];
- oplog_entries = list(mOplog.find({"$or": [{"ns":mTestDB[coll].full_name}, \
- {"op":"c"}]}).sort("$natural", 1))
- print "oplog for %s" % mTestDB[coll].full_name
- for doc in oplog_entries:
- pprint.pprint(doc, width=200)
-
-
- for db in slave.dict.keys():
- if db not in master.dict and db not in lost_in_master:
- lost_in_master.append(db)
-
-
-def ternary( b , l="true", r="false" ):
- if b:
- return l
- return r
-
-# Blech.
-def skipTest(path):
- basename = os.path.basename(path)
- parentPath = os.path.dirname(path)
- parentDir = os.path.basename(parentPath)
- if small_oplog or small_oplog_rs: # For tests running in parallel
- if basename in ["cursor8.js", "indexh.js", "dropdb.js", "dropdb_race.js",
- "connections_opened.js", "opcounters_write_cmd.js", "dbadmin.js"]:
- return True
- if use_ssl:
- # Skip tests using mongobridge since it does not support SSL
- # TODO: Remove when SERVER-10910 has been resolved.
- if basename in ["gridfs.js", "initial_sync3.js", "majority.js", "no_chaining.js",
- "rollback4.js", "slavedelay3.js", "sync2.js", "tags.js"]:
- return True
- # TODO: For now skip tests using MongodRunner, remove when SERVER-10909 has been resolved
- if basename in ["fastsync.js", "index_retry.js", "ttl_repl_maintenance.js",
- "unix_socket1.js"]:
- return True;
- if auth or keyFile or use_x509: # For tests running with auth
- # Skip any tests that run with auth explicitly
- if parentDir.lower() == "auth" or "auth" in basename.lower():
- return True
- if parentPath == mongo_repo: # Skip client tests
- return True
- if parentDir == "tool": # SERVER-6368
- return True
- if parentDir == "dur": # SERVER-7317
- return True
- if parentDir == "disk": # SERVER-7356
- return True
-
- authTestsToSkip = [("jstests", "drop2.js"), # SERVER-8589,
- ("jstests", "killop.js"), # SERVER-10128
- ("sharding", "sync3.js"), # SERVER-6388 for this and those below
- ("sharding", "sync6.js"),
- ("sharding", "parallel.js"),
- ("sharding", "copydb_from_mongos.js"), # SERVER-13080
- ("jstests", "bench_test1.js"),
- ("jstests", "bench_test2.js"),
- ("jstests", "bench_test3.js"),
- ("jstests", "bench_test_insert.js"),
- ("core", "bench_test1.js"),
- ("core", "bench_test2.js"),
- ("core", "bench_test3.js"),
- ("core", "bench_test_insert.js"),
- ]
-
- if os.path.join(parentDir,basename) in [ os.path.join(*test) for test in authTestsToSkip ]:
- return True
-
- return False
-
-legacyWriteRE = re.compile(r"jstests[/\\]multiVersion")
-def setShellWriteModeForTest(path, argv):
- swm = shell_write_mode
- if legacyWriteRE.search(path):
- swm = "legacy"
- argv += ["--writeMode", swm]
-
-def runTest(test, result):
- # result is a map containing test result details, like result["url"]
-
- # test is a tuple of ( filename , usedb<bool> )
- # filename should be a js file to run
- # usedb is true if the test expects a mongod to be running
-
- (path, usedb) = test
- (ignore, ext) = os.path.splitext(path)
- test_mongod = mongod()
- mongod_is_up = test_mongod.is_mongod_up(mongod_port)
- result["mongod_running_at_start"] = mongod_is_up;
-
- if file_of_commands_mode:
- # smoke.py was invoked like "--mode files --from-file foo",
- # so don't try to interpret the test path too much
- if os.sys.platform == "win32":
- argv = [path]
- else:
- argv = shlex.split(path)
- path = argv[0]
- # if the command is a python script, use the script name
- if os.path.basename(path) in ('python', 'python.exe'):
- path = argv[1]
- elif ext == ".js":
- argv = [shell_executable, "--port", mongod_port]
-
- setShellWriteModeForTest(path, argv)
-
- if not usedb:
- argv += ["--nodb"]
- if small_oplog or small_oplog_rs:
- argv += ["--eval", 'testingReplication = true;']
- if use_ssl:
- argv += ["--ssl",
- "--sslPEMKeyFile", "jstests/libs/client.pem",
- "--sslCAFile", "jstests/libs/ca.pem",
- "--sslAllowInvalidCertificates"]
- argv += [path]
- elif ext in ["", ".exe"]:
- # Blech.
- if os.path.basename(path) in ["dbtest", "dbtest.exe"]:
- argv = [path]
- # default data directory for dbtest is /tmp/unittest
- if smoke_db_prefix:
- dir_name = smoke_db_prefix + '/unittests'
- argv.extend(["--dbpath", dir_name] )
-
- if storage_engine:
- argv.extend(["--storageEngine", storage_engine])
- if wiredtiger_engine_config_string:
- argv.extend(["--wiredTigerEngineConfigString", wiredtiger_engine_config_string])
- if wiredtiger_collection_config_string:
- argv.extend(["--wiredTigerCollectionConfigString", wiredtiger_collection_config_string])
- if wiredtiger_index_config_string:
- argv.extend(["--wiredTigerIndexConfigString", wiredtiger_index_config_string])
-
- # more blech
- elif os.path.basename(path) in ['mongos', 'mongos.exe']:
- argv = [path, "--test"]
- else:
- argv = [test_path and os.path.abspath(os.path.join(test_path, path)) or path,
- "--port", mongod_port]
- else:
- raise Bug("fell off in extension case: %s" % path)
-
- mongo_test_filename = os.path.basename(path)
-
- # sys.stdout.write() is more atomic than print, so using it prevents
- # lines being interrupted by, e.g., child processes
- sys.stdout.write(" *******************************************\n")
- sys.stdout.write(" Test : %s ...\n" % mongo_test_filename)
- sys.stdout.flush()
-
- # FIXME: we don't handle the case where the subprocess
- # hangs... that's bad.
- if ( argv[0].endswith( 'mongo' ) or argv[0].endswith( 'mongo.exe' ) ) and not '--eval' in argv :
- evalString = 'load("jstests/libs/servers.js");load("jstests/libs/servers_misc.js");' +\
- 'TestData = new Object();' + \
- 'TestData.storageEngine = "' + ternary( storage_engine, storage_engine, "" ) + '";' + \
- 'TestData.wiredTigerEngineConfigString = "' + ternary( wiredtiger_engine_config_string, wiredtiger_engine_config_string, "" ) + '";' + \
- 'TestData.wiredTigerCollectionConfigString = "' + ternary( wiredtiger_collection_config_string, wiredtiger_collection_config_string, "" ) + '";' + \
- 'TestData.wiredTigerIndexConfigString = "' + ternary( wiredtiger_index_config_string, wiredtiger_index_config_string, "" ) + '";' + \
- 'TestData.testPath = "' + path + '";' + \
- 'TestData.testFile = "' + os.path.basename( path ) + '";' + \
- 'TestData.testName = "' + re.sub( ".js$", "", os.path.basename( path ) ) + '";' + \
- 'TestData.setParameters = "' + ternary( set_parameters, set_parameters, "" ) + '";' + \
- 'TestData.setParametersMongos = "' + ternary( set_parameters_mongos, set_parameters_mongos, "" ) + '";' + \
- 'TestData.noJournal = ' + ternary( no_journal ) + ";" + \
- 'TestData.noJournalPrealloc = ' + ternary( no_preallocj ) + ";" + \
- 'TestData.auth = ' + ternary( auth ) + ";" + \
- 'TestData.keyFile = ' + ternary( keyFile , '"' + str(keyFile) + '"' , 'null' ) + ";" + \
- 'TestData.keyFileData = ' + ternary( keyFile , '"' + str(keyFileData) + '"' , 'null' ) + ";" + \
- 'TestData.authMechanism = ' + ternary( authMechanism,
- '"' + str(authMechanism) + '"', 'null') + ";" + \
- 'TestData.useSSL = ' + ternary( use_ssl ) + ";" + \
- 'TestData.useX509 = ' + ternary( use_x509 ) + ";"
- # this updates the default data directory for mongod processes started through shell (src/mongo/shell/servers.js)
- evalString += 'MongoRunner.dataDir = "' + os.path.abspath(smoke_db_prefix + '/data/db') + '";'
- evalString += 'MongoRunner.dataPath = MongoRunner.dataDir + "/";'
- if temp_path:
- evalString += 'TestData.tmpPath = "' + temp_path + '";'
- if os.sys.platform == "win32":
- # double quotes in the evalString on windows; this
- # prevents the backslashes from being removed when
- # the shell (i.e. bash) evaluates this string. yuck.
- evalString = evalString.replace('\\', '\\\\')
-
- if auth and usedb:
- evalString += 'jsTest.authenticate(db.getMongo());'
-
- if os.getenv('SMOKE_EVAL') is not None:
- evalString += os.getenv('SMOKE_EVAL')
-
- argv = argv + [ '--eval', evalString]
-
-
- if argv[0].endswith( 'dbtest' ) or argv[0].endswith( 'dbtest.exe' ):
- if no_preallocj :
- argv = argv + [ '--nopreallocj' ]
- if temp_path:
- argv = argv + [ '--tempPath', temp_path ]
-
-
- sys.stdout.write(" Command : %s\n" % ' '.join(argv))
- sys.stdout.write(" Date : %s\n" % datetime.now().ctime())
- sys.stdout.flush()
-
- os.environ['MONGO_TEST_FILENAME'] = mongo_test_filename
- t1 = time.time()
-
- proc = Popen(buildlogger(argv), cwd=test_path, stdout=PIPE, stderr=STDOUT, bufsize=0)
- first_line = proc.stdout.readline() # Get suppressed output URL
- m = re.search(r"\s*\(output suppressed; see (?P<url>.*)\)" + os.linesep, first_line)
- if m:
- result["url"] = m.group("url")
- sys.stdout.write(first_line)
- sys.stdout.flush()
- while True:
- # print until subprocess's stdout closed.
- # Not using "for line in file" since that has unwanted buffering.
- line = proc.stdout.readline()
- if not line:
- break;
-
- sys.stdout.write(line)
- sys.stdout.flush()
-
- proc.wait() # wait if stdout is closed before subprocess exits.
- r = proc.returncode
-
- t2 = time.time()
- del os.environ['MONGO_TEST_FILENAME']
-
- timediff = t2 - t1
- # timediff is seconds by default
- scale = 1
- suffix = "seconds"
- # if timediff is less than 10 seconds use ms
- if timediff < 10:
- scale = 1000
- suffix = "ms"
- # if timediff is more than 60 seconds use minutes
- elif timediff > 60:
- scale = 1.0 / 60.0
- suffix = "minutes"
- sys.stdout.write(" %10.4f %s\n" % ((timediff) * scale, suffix))
- sys.stdout.flush()
-
- result["exit_code"] = r
-
-
- is_mongod_still_up = test_mongod.is_mongod_up(mongod_port)
- if start_mongod and not is_mongod_still_up:
- print "mongod is not running after test"
- result["mongod_running_at_end"] = is_mongod_still_up;
- raise TestServerFailure(path)
-
- result["mongod_running_at_end"] = is_mongod_still_up;
-
- if r != 0:
- raise TestExitFailure(path, r)
-
- print ""
-
-def run_tests(tests):
- # FIXME: some suites of tests start their own mongod, so don't
- # need this. (So long as there are no conflicts with port,
- # dbpath, etc., and so long as we shut ours down properly,
- # starting this mongod shouldn't break anything, though.)
-
- # The reason we want to use "with" is so that we get __exit__ semantics
- # but "with" is only supported on Python 2.5+
-
- master = NullMongod()
- slave = NullMongod()
-
- try:
- if start_mongod:
- master = mongod(small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- storage_engine=storage_engine,
- wiredtiger_engine_config_string=wiredtiger_engine_config_string,
- wiredtiger_collection_config_string=wiredtiger_collection_config_string,
- wiredtiger_index_config_string=wiredtiger_index_config_string,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509)
- master.start()
-
- if small_oplog:
- slave = mongod(slave=True,
- small_oplog=True,
- small_oplog_rs=False,
- storage_engine=storage_engine,
- wiredtiger_engine_config_string=wiredtiger_engine_config_string,
- wiredtiger_collection_config_string=wiredtiger_collection_config_string,
- wiredtiger_index_config_string=wiredtiger_index_config_string,
- set_parameters=set_parameters)
- slave.start()
- elif small_oplog_rs:
- slave = mongod(slave=True,
- small_oplog_rs=True,
- small_oplog=False,
- no_journal=no_journal,
- storage_engine=storage_engine,
- wiredtiger_engine_config_string=wiredtiger_engine_config_string,
- wiredtiger_collection_config_string=wiredtiger_collection_config_string,
- wiredtiger_index_config_string=wiredtiger_index_config_string,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509)
- slave.start()
- primary = MongoClient(port=master.port);
-
- primary.admin.command({'replSetInitiate' : {'_id' : 'foo', 'members' : [
- {'_id': 0, 'host':'localhost:%s' % master.port},
- {'_id': 1, 'host':'localhost:%s' % slave.port,'priority':0}]}})
-
- # Wait for primary and secondary to finish initial sync and election
- ismaster = False
- while not ismaster:
- result = primary.admin.command("ismaster");
- ismaster = result["ismaster"]
- if not ismaster:
- print "waiting for primary to be available ..."
- time.sleep(.2)
-
- secondaryUp = False
- sConn = MongoClient(port=slave.port,
- read_preference=ReadPreference.SECONDARY_PREFERRED);
- while not secondaryUp:
- result = sConn.admin.command("ismaster");
- secondaryUp = result["secondary"]
- if not secondaryUp:
- print "waiting for secondary to be available ..."
- time.sleep(.2)
-
- if small_oplog or small_oplog_rs:
- master.wait_for_repl()
-
- for tests_run, test in enumerate(tests):
- tests_run += 1 # enumerate from 1, python 2.5 compatible
- test_result = { "start": time.time() }
-
- (test_path, use_db) = test
-
- if test_path.startswith(mongo_repo + os.path.sep):
- test_result["test_file"] = test_path[len(mongo_repo)+1:]
- else:
- # user could specify a file not in repo. leave it alone.
- test_result["test_file"] = test_path
-
- try:
- if skipTest(test_path):
- test_result["status"] = "skip"
-
- print "skipping " + test_path
- else:
- fails.append(test)
- runTest(test, test_result)
- fails.pop()
- winners.append(test)
-
- test_result["status"] = "pass"
-
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_report["results"].append( test_result )
- if small_oplog or small_oplog_rs:
- master.wait_for_repl()
- # check the db_hashes
- if isinstance(slave, mongod):
- check_db_hashes(master, slave)
- check_and_report_replication_dbhashes()
-
- elif use_db: # reach inside test and see if "usedb" is true
- if clean_every_n_tests and (tests_run % clean_every_n_tests) == 0:
- # Restart mongod periodically to clean accumulated test data
- # clean_dbroot() is invoked by mongod.start()
- master.stop()
- master = mongod(small_oplog_rs=small_oplog_rs,
- small_oplog=small_oplog,
- no_journal=no_journal,
- storage_engine=storage_engine,
- wiredtiger_engine_config_string=wiredtiger_engine_config_string,
- wiredtiger_collection_config_string=wiredtiger_collection_config_string,
- wiredtiger_index_config_string=wiredtiger_index_config_string,
- set_parameters=set_parameters,
- no_preallocj=no_preallocj,
- auth=auth,
- authMechanism=authMechanism,
- keyFile=keyFile,
- use_ssl=use_ssl,
- use_x509=use_x509)
- master.start()
-
- except TestFailure, f:
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_result["error"] = str(f)
- test_result["status"] = "fail"
- test_report["results"].append( test_result )
- try:
- print f
- # Record the failing test and re-raise.
- losers[f.path] = f.status
- raise f
- except TestServerFailure, f:
- return 2
- except TestFailure, f:
- if not continue_on_failure:
- return 1
- if isinstance(slave, mongod):
- check_db_hashes(master, slave)
-
- finally:
- slave.stop()
- master.stop()
- return 0
-
-
-def check_and_report_replication_dbhashes():
- def missing(lst, src, dst):
- if lst:
- print """The following collections were present in the %s but not the %s
-at the end of testing:""" % (src, dst)
- for db in lst:
- print db
-
- missing(lost_in_slave, "master", "slave")
- missing(lost_in_master, "slave", "master")
- if screwy_in_slave:
- print """The following collections has different hashes in master and slave
-at the end of testing:"""
- for coll in screwy_in_slave.keys():
- stats = screwy_in_slave[coll]
- # Counts are "approx" because they are collected after the dbhash runs and may not
- # reflect the states of the collections that were hashed. If the hashes differ, one
- # possibility is that a test exited with writes still in-flight.
- print "collection: %s\t (master/slave) hashes: %s/%s counts (approx): %i/%i" % (coll, stats['hashes']['master'], stats['hashes']['slave'], stats['counts']['master'], stats['counts']['slave'])
- if "docs" in stats:
- if (("master" in stats["docs"] and len(stats["docs"]["master"]) != 0) or
- ("slave" in stats["docs"] and len(stats["docs"]["slave"]) != 0)):
- print "All docs matched!"
- else:
- print "Different Docs"
- print "Master docs:"
- pprint.pprint(stats["docs"]["master"], indent=2)
- print "Slave docs:"
- pprint.pprint(stats["docs"]["slave"], indent=2)
- if "error-docs" in stats:
- print "Error getting docs to diff:"
- pprint.pprint(stats["error-docs"])
- return True
-
- if (small_oplog or small_oplog_rs) and not (lost_in_master or lost_in_slave or screwy_in_slave):
- print "replication ok for %d collections" % (len(replicated_collections))
-
- return False
-
-
-def report():
- print "%d tests succeeded" % len(winners)
- num_missed = len(tests) - (len(winners) + len(losers.keys()))
- if num_missed:
- print "%d tests didn't get run" % num_missed
- if losers:
- print "The following tests failed (with exit code):"
- for loser in losers:
- print "%s\t%d" % (loser, losers[loser])
-
- test_result = { "start": time.time() }
- if check_and_report_replication_dbhashes():
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] - test_result["start"]
- test_result["test_file"] = "/#dbhash#"
- test_result["error"] = "dbhash mismatch"
- test_result["status"] = "fail"
- test_report["results"].append( test_result )
-
- if report_file:
- f = open( report_file, "wb" )
- f.write( json.dumps( test_report ) )
- f.close()
-
- if losers or lost_in_slave or lost_in_master or screwy_in_slave:
- raise Exception("Test failures")
-
-# Keys are the suite names (passed on the command line to smoke.py)
-# Values are pairs: (filenames, <start mongod before running tests>)
-
-suiteGlobalConfig = { "files": ("files/*.js", False),
- "restore": ("restore/*.js", False),
- "stat": ("stat/*.js", False),
- "top": ("top/*.js", False),
- "bson": ("bson/*.js", False),
- "export": ("export/*.js", False),
- "dump": ("dump/*.js", False),
- "oplog": ("oplog/*.js", False),
- "import": ("import/*.js", False),
- "ssl": ("ssl/*.js", False),
- "unstable": ("unstable/*.js", False),
- }
-
-def get_module_suites():
- """Attempts to discover and return information about module test suites
-
- Returns a dictionary of module suites in the format:
-
- {
- "<suite_name>" : "<full_path_to_suite_directory/[!_]*.js>",
- ...
- }
-
- This means the values of this dictionary can be used as "glob"s to match all jstests in the
- suite directory that don't start with an underscore
-
- The module tests should be put in 'src/mongo/db/modules/<module_name>/<suite_name>/*.js'
-
- NOTE: This assumes that if we have more than one module the suite names don't conflict
- """
- modules_directory = 'src/mongo/db/modules'
- test_suites = {}
-
- # Return no suites if we have no modules
- if not os.path.exists(modules_directory) or not os.path.isdir(modules_directory):
- return {}
-
- module_directories = os.listdir(modules_directory)
- for module_directory in module_directories:
-
- test_directory = os.path.join(modules_directory, module_directory, "jstests")
-
- # Skip this module if it has no "jstests" directory
- if not os.path.exists(test_directory) or not os.path.isdir(test_directory):
- continue
-
- # Get all suites for this module
- for test_suite in os.listdir(test_directory):
- test_suites[test_suite] = os.path.join(test_directory, test_suite, "[!_]*.js")
-
- return test_suites
-
-def expand_suites(suites,expandUseDB=True):
- """Takes a list of suites and expands to a list of tests according to a set of rules.
-
- Keyword arguments:
- suites -- list of suites specified by the user
- expandUseDB -- expand globs (such as [!_]*.js) for tests that are run against a database
- (default True)
-
- This function handles expansion of globs (such as [!_]*.js), aliases (such as "client" and
- "all"), detection of suites in the "modules" directory, and enumerating the test files in a
- given suite. It returns a list of tests of the form (path_to_test, usedb), where the second
- part of the tuple specifies whether the test is run against the database (see --nodb in the
- mongo shell)
-
- """
- globstr = None
- tests = []
- module_suites = get_module_suites()
- for suite in suites:
- if suite == 'all':
- return expand_suites(['dbtest',
- 'jsCore',
- 'jsPerf',
- 'mmap_v1',
- 'noPassthroughWithMongod',
- 'noPassthrough',
- 'clone',
- 'parallel',
- 'concurrency',
- 'repl',
- 'auth',
- 'sharding',
- 'slow1',
- 'slow2',
- 'tool'],
- expandUseDB=expandUseDB)
- if suite == 'dbtest' or suite == 'test':
- if os.sys.platform == "win32":
- program = 'dbtest.exe'
- else:
- program = 'dbtest'
- (globstr, usedb) = (program, False)
- elif suite == 'mongosTest':
- if os.sys.platform == "win32":
- program = 'mongos.exe'
- else:
- program = 'mongos'
- tests += [(os.path.join(mongo_repo, program), False)]
- elif os.path.exists( suite ):
- usedb = True
- for name in suiteGlobalConfig:
- if suite in glob.glob( "jstests/" + suiteGlobalConfig[name][0] ):
- usedb = suiteGlobalConfig[name][1]
- break
- tests += [ ( os.path.join( mongo_repo , suite ) , usedb ) ]
- elif suite in module_suites:
- # Currently we connect to a database in all module tests since there's no mechanism yet
- # to configure it independently
- usedb = True
- paths = glob.glob(module_suites[suite])
- paths.sort()
- tests += [(path, usedb) for path in paths]
- else:
- try:
- globstr, usedb = suiteGlobalConfig[suite]
- except KeyError:
- raise Exception('unknown test suite %s' % suite)
-
- if globstr:
- if usedb and not expandUseDB:
- tests += [ (suite,False) ]
- else:
- if globstr.endswith('.js'):
- loc = 'jstests/'
- else:
- loc = ''
- globstr = os.path.join(mongo_repo, (os.path.join(loc, globstr)))
- globstr = os.path.normpath(globstr)
- paths = glob.glob(globstr)
- paths.sort()
- tests += [(path, usedb) for path in paths]
-
- return tests
-
-
-def filter_tests_by_tag(tests, tag_query):
- """Selects tests from a list based on a query over the tags in the tests."""
-
- test_map = {}
- roots = []
- for test in tests:
- root = os.path.abspath(test[0])
- roots.append(root)
- test_map[root] = test
-
- new_style_tests = smoke.tests.build_tests(roots, extract_metadata=True)
- new_style_tests = smoke.suites.build_suite(new_style_tests, tag_query)
-
- print "\nTag query matches %s tests out of %s.\n" % (len(new_style_tests),
- len(tests))
-
- tests = []
- for new_style_test in new_style_tests:
- tests.append(test_map[os.path.abspath(new_style_test.filename)])
-
- return tests
-
-
-def add_exe(e):
- if os.sys.platform.startswith( "win" ) and not e.endswith( ".exe" ):
- e += ".exe"
- return e
-
-
-def set_globals(options, tests):
- global mongod_executable, mongod_port, shell_executable, continue_on_failure
- global small_oplog, small_oplog_rs
- global no_journal, set_parameters, set_parameters_mongos, no_preallocj, storage_engine, wiredtiger_engine_config_string, wiredtiger_collection_config_string, wiredtiger_index_config_string
- global auth, authMechanism, keyFile, keyFileData, smoke_db_prefix, test_path, start_mongod
- global use_ssl, use_x509
- global file_of_commands_mode
- global report_file, shell_write_mode, use_write_commands
- global temp_path
- global clean_every_n_tests
- global clean_whole_dbroot
-
- start_mongod = options.start_mongod
- if hasattr(options, 'use_ssl'):
- use_ssl = options.use_ssl
- if hasattr(options, 'use_x509'):
- use_x509 = options.use_x509
- use_ssl = use_ssl or use_x509
- #Careful, this can be called multiple times
- test_path = options.test_path
-
- mongod_executable = add_exe(options.mongod_executable)
- if not os.path.exists(mongod_executable):
- raise Exception("no mongod found in this directory.")
-
- mongod_port = options.mongod_port
-
- shell_executable = add_exe( options.shell_executable )
- if not os.path.exists(shell_executable):
- raise Exception("no mongo shell found in this directory.")
-
- continue_on_failure = options.continue_on_failure
- smoke_db_prefix = options.smoke_db_prefix
- small_oplog = options.small_oplog
- if hasattr(options, "small_oplog_rs"):
- small_oplog_rs = options.small_oplog_rs
- no_journal = options.no_journal
- storage_engine = options.storage_engine
- wiredtiger_engine_config_string = options.wiredtiger_engine_config_string
- wiredtiger_collection_config_string = options.wiredtiger_collection_config_string
- wiredtiger_index_config_string = options.wiredtiger_index_config_string
- set_parameters = options.set_parameters
- set_parameters_mongos = options.set_parameters_mongos
- no_preallocj = options.no_preallocj
- auth = options.auth
- authMechanism = options.authMechanism
- keyFile = options.keyFile
-
- clean_every_n_tests = options.clean_every_n_tests
- clean_whole_dbroot = options.with_cleanbb
-
- if auth and not keyFile:
- # if only --auth was given to smoke.py, load the
- # default keyFile from jstests/libs/authTestsKey
- keyFile = os.path.join(mongo_repo, 'jstests', 'libs', 'authTestsKey')
-
- if keyFile:
- f = open(keyFile, 'r')
- keyFileData = re.sub(r'\s', '', f.read()) # Remove all whitespace
- f.close()
- os.chmod(keyFile, stat.S_IRUSR | stat.S_IWUSR)
- else:
- keyFileData = None
-
- # if smoke.py is running a list of commands read from a
- # file (or stdin) rather than running a suite of js tests
- file_of_commands_mode = options.File and options.mode == 'files'
- # generate json report
- report_file = options.report_file
- temp_path = options.temp_path
-
- use_write_commands = options.use_write_commands
- shell_write_mode = options.shell_write_mode
-
-def file_version():
- return md5(open(__file__, 'r').read()).hexdigest()
-
-def clear_failfile():
- if os.path.exists(failfile):
- os.remove(failfile)
-
-def run_old_fails():
- global tests
-
- try:
- f = open(failfile, 'r')
- state = pickle.load(f)
- f.close()
- except Exception:
- try:
- f.close()
- except:
- pass
- clear_failfile()
- return # This counts as passing so we will run all tests
-
- if ('version' not in state or state['version'] != file_version()):
- print "warning: old version of failfile.smoke detected. skipping recent fails"
- clear_failfile()
- return
-
- testsAndOptions = state['testsAndOptions']
- tests = [x[0] for x in testsAndOptions]
- passed = []
- try:
- for (i, (test, options)) in enumerate(testsAndOptions):
- # SERVER-5102: until we can figure out a better way to manage
- # dependencies of the --only-old-fails build phase, just skip
- # tests which we can't safely run at this point
- path, usedb = test
-
- if not os.path.exists(path):
- passed.append(i)
- winners.append(test)
- continue
-
- filename = os.path.basename(path)
- if filename in ('dbtest', 'dbtest.exe') or filename.endswith('.js'):
- set_globals(options, [filename])
- oldWinners = len(winners)
- run_tests([test])
- if len(winners) != oldWinners: # can't use return value due to continue_on_failure
- passed.append(i)
- finally:
- for offset, i in enumerate(passed):
- testsAndOptions.pop(i - offset)
-
- if testsAndOptions:
- f = open(failfile, 'w')
- state = {'version':file_version(), 'testsAndOptions':testsAndOptions}
- pickle.dump(state, f)
- else:
- clear_failfile()
-
- report() # exits with failure code if there is an error
-
-def add_to_failfile(tests, options):
- try:
- f = open(failfile, 'r')
- testsAndOptions = pickle.load(f)["testsAndOptions"]
- except Exception:
- testsAndOptions = []
-
- for test in tests:
- if (test, options) not in testsAndOptions:
- testsAndOptions.append( (test, options) )
-
- state = {'version':file_version(), 'testsAndOptions':testsAndOptions}
- f = open(failfile, 'w')
- pickle.dump(state, f)
-
-
-
-def main():
- global mongod_executable, mongod_port, shell_executable, continue_on_failure, small_oplog
- global no_journal, set_parameters, set_parameters_mongos, no_preallocj, auth, storage_engine, wiredtiger_engine_config_string, wiredtiger_collection_config_string, wiredtiger_index_config_string
- global keyFile, smoke_db_prefix, test_path, use_write_commands
-
- try:
- signal.signal(signal.SIGUSR1, dump_stacks)
- except AttributeError:
- print "Cannot catch signals on Windows"
-
- parser = OptionParser(usage="usage: smoke.py [OPTIONS] ARGS*")
- parser.add_option('--mode', dest='mode', default='suite',
- help='If "files", ARGS are filenames; if "suite", ARGS are sets of tests (%default)')
- # Some of our tests hard-code pathnames e.g., to execute, so until
- # that changes we don't have the freedom to run from anyplace.
- # parser.add_option('--mongo-repo', dest='mongo_repo', default=None,
- parser.add_option('--test-path', dest='test_path', default=None,
- help="Path to the test executables to run, "
- "currently only used for 'client' (%default)")
- parser.add_option('--mongod', dest='mongod_executable', default=os.path.join(mongo_repo, 'mongod'),
- help='Path to mongod to run (%default)')
- parser.add_option('--port', dest='mongod_port', default="27999",
- help='Port the mongod will bind to (%default)')
- parser.add_option('--mongo', dest='shell_executable', default=os.path.join(mongo_repo, 'mongo'),
- help='Path to mongo, for .js test files (%default)')
- parser.add_option('--continue-on-failure', dest='continue_on_failure',
- action="store_true", default=False,
- help='If supplied, continue testing even after a test fails')
- parser.add_option('--from-file', dest='File',
- help="Run tests/suites named in FILE, one test per line, '-' means stdin")
- parser.add_option('--smoke-db-prefix', dest='smoke_db_prefix', default=smoke_db_prefix,
- help="Prefix to use for the mongods' dbpaths ('%default')")
- parser.add_option('--small-oplog', dest='small_oplog', default=False,
- action="store_true",
- help='Run tests with master/slave replication & use a small oplog')
- parser.add_option('--small-oplog-rs', dest='small_oplog_rs', default=False,
- action="store_true",
- help='Run tests with replica set replication & use a small oplog')
- parser.add_option('--storageEngine', dest='storage_engine', default=None,
- help='What storage engine to start mongod with')
- parser.add_option('--wiredTigerEngineConfig', dest='wiredtiger_engine_config_string', default=None,
- help='Wired Tiger configuration to pass through to mongod')
- parser.add_option('--wiredTigerCollectionConfig', dest='wiredtiger_collection_config_string', default=None,
- help='Wired Tiger collection configuration to pass through to mongod')
- parser.add_option('--wiredTigerIndexConfig', dest='wiredtiger_index_config_string', default=None,
- help='Wired Tiger index configuration to pass through to mongod')
- parser.add_option('--nojournal', dest='no_journal', default=False,
- action="store_true",
- help='Do not turn on journaling in tests')
- parser.add_option('--nopreallocj', dest='no_preallocj', default=False,
- action="store_true",
- help='Do not preallocate journal files in tests')
- parser.add_option('--auth', dest='auth', default=False,
- action="store_true",
- help='Run standalone mongods in tests with authentication enabled')
- parser.add_option('--use-x509', dest='use_x509', default=False,
- action="store_true",
- help='Use x509 auth for internal cluster authentication')
- parser.add_option('--authMechanism', dest='authMechanism', default='SCRAM-SHA-1',
- help='Use the given authentication mechanism, when --auth is used.')
- parser.add_option('--keyFile', dest='keyFile', default=None,
- help='Path to keyFile to use to run replSet and sharding tests with authentication enabled')
- parser.add_option('--ignore', dest='ignore_files', default=None,
- help='Pattern of files to ignore in tests')
- parser.add_option('--only-old-fails', dest='only_old_fails', default=False,
- action="store_true",
- help='Check the failfile and only run all tests that failed last time')
- parser.add_option('--reset-old-fails', dest='reset_old_fails', default=False,
- action="store_true",
- help='Clear the failfile. Do this if all tests pass')
- parser.add_option('--with-cleanbb', dest='with_cleanbb', action="store_true",
- default=False,
- help='Clear database files before first test')
- parser.add_option('--clean-every', dest='clean_every_n_tests', type='int',
- default=(1 if 'detect_leaks=1' in os.getenv("ASAN_OPTIONS", "") else 20),
- help='Clear database files every N tests [default %default]')
- parser.add_option('--dont-start-mongod', dest='start_mongod', default=True,
- action='store_false',
- help='Do not start mongod before commencing test running')
- parser.add_option('--use-ssl', dest='use_ssl', default=False,
- action='store_true',
- help='Run mongo shell and mongod instances with SSL encryption')
- parser.add_option('--set-parameters', dest='set_parameters', default="",
- help='Adds --setParameter to mongod for each passed in item in the csv list - ex. "param1=1,param2=foo" ')
- parser.add_option('--set-parameters-mongos', dest='set_parameters_mongos', default="",
- help='Adds --setParameter to mongos for each passed in item in the csv list - ex. "param1=1,param2=foo" ')
- parser.add_option('--temp-path', dest='temp_path', default=None,
- help='If present, passed as --tempPath to unittests and dbtests or TestData.tmpPath to mongo')
- # Buildlogger invocation from command line
- parser.add_option('--buildlogger-builder', dest='buildlogger_builder', default=None,
- action="store", help='Set the "builder name" for buildlogger')
- parser.add_option('--buildlogger-buildnum', dest='buildlogger_buildnum', default=None,
- action="store", help='Set the "build number" for buildlogger')
- parser.add_option('--buildlogger-url', dest='buildlogger_url', default=None,
- action="store", help='Set the url root for the buildlogger service')
- parser.add_option('--buildlogger-credentials', dest='buildlogger_credentials', default=None,
- action="store", help='Path to Python file containing buildlogger credentials')
- parser.add_option('--buildlogger-phase', dest='buildlogger_phase', default=None,
- action="store", help='Set the "phase" for buildlogger (e.g. "core", "auth") for display in the webapp (optional)')
- parser.add_option('--report-file', dest='report_file', default=None,
- action='store',
- help='Path to generate detailed json report containing all test details')
- parser.add_option('--use-write-commands', dest='use_write_commands', default=False,
- action='store_true',
- help='Deprecated(use --shell-write-mode): Sets the shell to use write commands by default')
- parser.add_option('--shell-write-mode', dest='shell_write_mode', default="commands",
- help='Sets the shell to use a specific write mode: commands/compatibility/legacy (default:legacy)')
-
- parser.add_option('--include-tags', dest='include_tags', default="", action='store',
- help='Filters jstests run by tag regex(es) - a tag in the test must match the regexes. ' +
- 'Specify single regex string or JSON array.')
-
- parser.add_option('--exclude-tags', dest='exclude_tags', default="", action='store',
- help='Filters jstests run by tag regex(es) - no tags in the test must match the regexes. ' +
- 'Specify single regex string or JSON array.')
-
- global tests
- (options, tests) = parser.parse_args()
-
- set_globals(options, tests)
-
- buildlogger_opts = (options.buildlogger_builder, options.buildlogger_buildnum, options.buildlogger_credentials)
- if all(buildlogger_opts):
- os.environ['MONGO_USE_BUILDLOGGER'] = 'true'
- os.environ['MONGO_BUILDER_NAME'] = options.buildlogger_builder
- os.environ['MONGO_BUILD_NUMBER'] = options.buildlogger_buildnum
- os.environ['BUILDLOGGER_CREDENTIALS'] = options.buildlogger_credentials
- if options.buildlogger_phase:
- os.environ['MONGO_PHASE'] = options.buildlogger_phase
- elif any(buildlogger_opts):
- # some but not all of the required options were sete
- raise Exception("you must set all of --buildlogger-builder, --buildlogger-buildnum, --buildlogger-credentials")
-
- if options.buildlogger_url: #optional; if None, defaults to const in buildlogger.py
- os.environ['BUILDLOGGER_URL'] = options.buildlogger_url
-
- if options.File:
- if options.File == '-':
- tests = sys.stdin.readlines()
- else:
- f = open(options.File)
- tests = f.readlines()
- tests = [t.rstrip('\n') for t in tests]
-
- if options.only_old_fails:
- run_old_fails()
- return
- elif options.reset_old_fails:
- clear_failfile()
- return
-
- # If we're in suite mode, tests is a list of names of sets of tests.
- if options.mode == 'suite':
- tests = expand_suites(tests)
- elif options.mode == 'files':
- tests = [(os.path.abspath(test), start_mongod) for test in tests]
-
- if options.ignore_files != None :
- ignore_patt = re.compile( options.ignore_files )
- print "Ignoring files with pattern: ", ignore_patt
-
- def ignore_test( test ):
- if ignore_patt.search( test[0] ) != None:
- print "Ignoring test ", test[0]
- return False
- else:
- return True
-
- tests = filter( ignore_test, tests )
-
- if options.include_tags or options.exclude_tags:
-
- def to_regex_array(tags_option):
- if not tags_option:
- return []
-
- tags_list = smoke.json_options.json_coerce(tags_option)
- if isinstance(tags_list, basestring):
- tags_list = [tags_list]
-
- return map(re.compile, tags_list)
-
- tests = filter_tests_by_tag(tests,
- smoke.suites.RegexQuery(include_res=to_regex_array(options.include_tags),
- exclude_res=to_regex_array(options.exclude_tags)))
-
- if not tests:
- print "warning: no tests specified"
- return
-
- if options.with_cleanbb:
- clean_dbroot(nokill=True)
-
- test_report["start"] = time.time()
- test_report["mongod_running_at_start"] = mongod().is_mongod_up(mongod_port)
- try:
- run_tests(tests)
- finally:
- add_to_failfile(fails, options)
-
- test_report["end"] = time.time()
- test_report["elapsed"] = test_report["end"] - test_report["start"]
- test_report["failures"] = len(losers.keys())
- test_report["mongod_running_at_end"] = mongod().is_mongod_up(mongod_port)
- if report_file:
- f = open( report_file, "wb" )
- f.write( json.dumps( test_report, indent=4, separators=(',', ': ')) )
- f.close()
-
- report()
-
-if __name__ == "__main__":
- main()
diff --git a/src/mongo/gotools/test/qa-tests/buildscripts/utils.py b/src/mongo/gotools/test/qa-tests/buildscripts/utils.py
deleted file mode 100644
index 0a46ef440d4..00000000000
--- a/src/mongo/gotools/test/qa-tests/buildscripts/utils.py
+++ /dev/null
@@ -1,235 +0,0 @@
-
-import codecs
-import re
-import socket
-import time
-import os
-import os.path
-import itertools
-import subprocess
-import sys
-import hashlib
-
-# various utilities that are handy
-
-def getAllSourceFiles( arr=None , prefix="." ):
- if arr is None:
- arr = []
-
- if not os.path.isdir( prefix ):
- # assume a file
- arr.append( prefix )
- return arr
-
- for x in os.listdir( prefix ):
- if x.startswith( "." ) or x.startswith( "pcre-" ) or x.startswith( "32bit" ) or x.startswith( "mongodb-" ) or x.startswith("debian") or x.startswith( "mongo-cxx-driver" ):
- continue
- # XXX: Avoid conflict between v8 and v8-3.25 source files in
- # src/mongo/scripting
- # Remove after v8-3.25 migration.
- if x.find("v8-3.25") != -1:
- continue
- full = prefix + "/" + x
- if os.path.isdir( full ) and not os.path.islink( full ):
- getAllSourceFiles( arr , full )
- else:
- if full.endswith( ".cpp" ) or full.endswith( ".h" ) or full.endswith( ".c" ):
- full = full.replace( "//" , "/" )
- arr.append( full )
-
- return arr
-
-
-def getGitBranch():
- if not os.path.exists( ".git" ) or not os.path.isdir(".git"):
- return None
-
- version = open( ".git/HEAD" ,'r' ).read().strip()
- if not version.startswith( "ref: " ):
- return version
- version = version.split( "/" )
- version = version[len(version)-1]
- return version
-
-def getGitBranchString( prefix="" , postfix="" ):
- t = re.compile( '[/\\\]' ).split( os.getcwd() )
- if len(t) > 2 and t[len(t)-1] == "mongo":
- par = t[len(t)-2]
- m = re.compile( ".*_([vV]\d+\.\d+)$" ).match( par )
- if m is not None:
- return prefix + m.group(1).lower() + postfix
- if par.find("Nightly") > 0:
- return ""
-
-
- b = getGitBranch()
- if b == None or b == "master":
- return ""
- return prefix + b + postfix
-
-def getGitVersion():
- if not os.path.exists( ".git" ) or not os.path.isdir(".git"):
- return "nogitversion"
-
- version = open( ".git/HEAD" ,'r' ).read().strip()
- if not version.startswith( "ref: " ):
- return version
- version = version[5:]
- f = ".git/" + version
- if not os.path.exists( f ):
- return version
- return open( f , 'r' ).read().strip()
-
-def execsys( args ):
- import subprocess
- if isinstance( args , str ):
- r = re.compile( "\s+" )
- args = r.split( args )
- p = subprocess.Popen( args , stdout=subprocess.PIPE , stderr=subprocess.PIPE )
- r = p.communicate()
- return r;
-
-def getprocesslist():
- raw = ""
- try:
- raw = execsys( "/bin/ps axww" )[0]
- except Exception,e:
- print( "can't get processlist: " + str( e ) )
-
- r = re.compile( "[\r\n]+" )
- return r.split( raw )
-
-def removeIfInList( lst , thing ):
- if thing in lst:
- lst.remove( thing )
-
-def findVersion( root , choices ):
- for c in choices:
- if ( os.path.exists( root + c ) ):
- return root + c
- raise "can't find a version of [" + root + "] choices: " + choices
-
-def choosePathExist( choices , default=None):
- for c in choices:
- if c != None and os.path.exists( c ):
- return c
- return default
-
-def filterExists(paths):
- return filter(os.path.exists, paths)
-
-def ensureDir( name ):
- d = os.path.dirname( name )
- if not os.path.exists( d ):
- print( "Creating dir: " + name );
- os.makedirs( d )
- if not os.path.exists( d ):
- raise "Failed to create dir: " + name
-
-
-def distinctAsString( arr ):
- s = set()
- for x in arr:
- s.add( str(x) )
- return list(s)
-
-def checkMongoPort( port=27017 ):
- sock = socket.socket()
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- sock.settimeout(1)
- sock.connect(("localhost", port))
- sock.close()
-
-
-def didMongodStart( port=27017 , timeout=20 ):
- while timeout > 0:
- time.sleep( 1 )
- try:
- checkMongoPort( port )
- return True
- except Exception,e:
- print( e )
- timeout = timeout - 1
- return False
-
-def which(executable):
- if sys.platform == 'win32':
- paths = os.environ.get('Path', '').split(';')
- else:
- paths = os.environ.get('PATH', '').split(':')
-
- for path in paths:
- path = os.path.expandvars(path)
- path = os.path.expanduser(path)
- path = os.path.abspath(path)
- executable_path = os.path.join(path, executable)
- if os.path.exists(executable_path):
- return executable_path
-
- return executable
-
-def md5sum( file ):
- #TODO error handling, etc..
- return execsys( "md5sum " + file )[0].partition(" ")[0]
-
-def md5string( a_string ):
- return hashlib.md5(a_string).hexdigest()
-
-def find_python(min_version=(2, 5)):
- try:
- if sys.version_info >= min_version:
- return sys.executable
- except AttributeError:
- # In case the version of Python is somehow missing sys.version_info or sys.executable.
- pass
-
- version = re.compile(r'[Pp]ython ([\d\.]+)', re.MULTILINE)
- binaries = ('python27', 'python2.7', 'python26', 'python2.6', 'python25', 'python2.5', 'python')
- for binary in binaries:
- try:
- out, err = subprocess.Popen([binary, '-V'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
- for stream in (out, err):
- match = version.search(stream)
- if match:
- versiontuple = tuple(map(int, match.group(1).split('.')))
- if versiontuple >= min_version:
- return which(binary)
- except:
- pass
-
- raise Exception('could not find suitable Python (version >= %s)' % '.'.join(str(v) for v in min_version))
-
-def smoke_command(*args):
- # return a list of arguments that comprises a complete
- # invocation of smoke.py
- here = os.path.dirname(__file__)
- smoke_py = os.path.abspath(os.path.join(here, 'smoke.py'))
- # the --with-cleanbb argument causes smoke.py to run
- # buildscripts/cleanbb.py before each test phase; this
- # prevents us from running out of disk space on slaves
- return [find_python(), smoke_py, '--with-cleanbb'] + list(args)
-
-def run_smoke_command(*args):
- # to run a command line script from a scons Alias (or any
- # Action), the command sequence must be enclosed in a list,
- # otherwise SCons treats it as a list of dependencies.
- return [smoke_command(*args)]
-
-# unicode is a pain. some strings cannot be unicode()'d
-# but we want to just preserve the bytes in a human-readable
-# fashion. this codec error handler will substitute the
-# repr() of the offending bytes into the decoded string
-# at the position they occurred
-def replace_with_repr(unicode_error):
- offender = unicode_error.object[unicode_error.start:unicode_error.end]
- return (unicode(repr(offender).strip("'").strip('"')), unicode_error.end)
-
-codecs.register_error('repr', replace_with_repr)
-
-def unicode_dammit(string, encoding='utf8'):
- # convert a string to a unicode, using the Python
- # representation of non-ascii bytes when necessary
- #
- # name inpsired by BeautifulSoup's "UnicodeDammit"
- return string.decode(encoding, 'repr')
-
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/all_types.js b/src/mongo/gotools/test/qa-tests/jstests/bson/all_types.js
deleted file mode 100644
index 70d169685c8..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/all_types.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// This test runs bsondump on a .bson file containing non-deprecated BSON types
-// and makes sure their debug type values exist in the output.
-(function() {
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var x = _runMongoProgram("bsondump", "--type=debug", "jstests/bson/testdata/all_types.bson");
- assert.eq(x, 0, "bsondump should exit successfully with 0");
-
- var results;
- assert.eq.soon(22, function() {
- results = rawMongoProgramOutput();
- return (results.match(/--- new object ---/g) || []).length;
- }, "should see all documents from the test data");
-
- assert.strContains("type: 1", results, "bson type '1' should be present in the debug output");
- assert.strContains("type: 2", results, "bson type '2' should be present in the debug output");
- assert.strContains("type: 3", results, "bson type '3' should be present in the debug output");
- assert.strContains("type: 4", results, "bson type '4' should be present in the debug output");
- assert.strContains("type: 5", results, "bson type '5' should be present in the debug output");
- assert.strContains("type: 6", results, "bson type '6' should be present in the debug output");
- assert.strContains("type: 7", results, "bson type '7' should be present in the debug output");
- assert.strContains("type: 8", results, "bson type '8' should be present in the debug output");
- assert.strContains("type: 9", results, "bson type '9' should be present in the debug output");
- assert.strContains("type: 10", results, "bson type '10' should be present in the debug output");
- assert.strContains("type: 11", results, "bson type '11' should be present in the debug output");
- assert.strContains("type: 12", results, "bson type '12' should be present in the debug output");
- assert.strContains("type: 13", results, "bson type '13' should be present in the debug output");
- assert.strContains("type: 17", results, "bson type '17' should be present in the debug output");
- assert.strContains("type: 18", results, "bson type '18' should be present in the debug output");
- assert.strContains("type: -1", results, "bson type '-1' should be present in the debug output");
- assert.strContains("type: 127", results, "bson type '127' should be present in the debug output");
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/all_types_json.js b/src/mongo/gotools/test/qa-tests/jstests/bson/all_types_json.js
deleted file mode 100644
index f64050cee28..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/all_types_json.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// This test runs bsondump on a .bson file containing non-deprecated BSON types
-// and makes sure their JSON type representations exist in the output.
-(function() {
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var x = _runMongoProgram("bsondump", "--type=json", "jstests/bson/testdata/all_types.bson");
- assert.eq(x, 0, "bsondump should exit successfully with 0");
-
- assert.strContains.soon("20 objects found", rawMongoProgramOutput,
- "should print out all top-level documents from the test data");
-
- var results = rawMongoProgramOutput();
- assert.strContains("$binary", results, "bson type 'binary' should be present in the debug output");
- assert.strContains("$date", results, "bson type 'date' should be present in the debug output");
- assert.strContains("$timestamp", results, "bson type 'timestamp' should be present in the debug output");
- assert.strContains("$regex", results, "bson type 'regex' should be present in the debug output");
- assert.strContains("$oid", results, "bson type 'oid' should be present in the debug output");
- assert.strContains("$undefined", results, "bson type 'undefined' should be present in the debug output");
- assert.strContains("$minKey", results, "bson type 'min' should be present in the debug output");
- assert.strContains("$maxKey", results, "bson type 'max' should be present in the debug output");
- assert.strContains("$numberLong", results, "bson type 'long' should be present in the debug output");
- assert.strContains("$ref", results, "bson type 'dbref' should be present in the debug output");
- assert.strContains("$id", results, "bson type 'dbref' should be present in the debug output");
- assert.strContains("$code", results, "bson type 'javascript' should be present in the debug output");
- assert.strContains("null", results, "bson type 'null' should be present in the debug output");
- assert.strContains("true", results, "bson type 'true' should be present in the debug output");
- assert.strContains("false", results, "bson type 'false' should be present in the debug output");
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/bad_files.js b/src/mongo/gotools/test/qa-tests/jstests/bson/bad_files.js
deleted file mode 100644
index 1728aa1e999..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/bad_files.js
+++ /dev/null
@@ -1,41 +0,0 @@
-// This test makes sure that certain invalid BSON succeeds or fails
-// with both JSON and debug output types AND --objcheck
-(function() {
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var x = _runMongoProgram("bsondump", "--objcheck", "jstests/bson/testdata/random_bytes.bson");
- assert.neq(x, 0, "bsondump should exit with an error when given random bytes");
- x = _runMongoProgram("bsondump", "--objcheck", "jstests/bson/testdata/bad_cstring.bson");
- assert.neq(x, 0, "bsondump should exit with an error when given a non-terminated cstring");
- x = _runMongoProgram("bsondump", "--objcheck", "jstests/bson/testdata/bad_type.bson");
- assert.neq(x, 0, "bsondump should exit with an error when given a bad type value");
- x = _runMongoProgram("bsondump", "--objcheck", "jstests/bson/testdata/partial_file.bson");
- assert.neq(x, 0, "bsondump should exit with an error when given only the start of a file");
- x = _runMongoProgram("bsondump", "--objcheck", "jstests/bson/testdata/invalid_field_name.bson");
- assert.neq(x, 0, "bsondump should exit with an error given invalid field names");
- x = _runMongoProgram("bsondump", "--objcheck", "--type=debug", "jstests/bson/testdata/random_bytes.bson");
- assert.neq(x, 0, "bsondump should exit with an error when given random bytes");
- x = _runMongoProgram("bsondump", "--objcheck", "--type=debug", "jstests/bson/testdata/bad_cstring.bson");
- assert.neq(x, 0, "bsondump should exit with an error when given a non-terminated cstring");
- x = _runMongoProgram("bsondump", "--objcheck", "--type=debug", "jstests/bson/testdata/bad_type.bson");
- assert.neq(x, 0, "bsondump should exit with an error when given a bad type value");
- x = _runMongoProgram("bsondump", "--objcheck", "--type=debug", "jstests/bson/testdata/partial_file.bson");
- assert.neq(x, 0, "bsondump should exit with an error when given only the start of a file");
- x = _runMongoProgram("bsondump", "--objcheck", "--type=debug", "jstests/bson/testdata/invalid_field_name.bson");
- assert.neq(x, 0, "bsondump should exit with an error given invalid field names");
-
- // This should pass, but the content of the output might be erroneous
- x = _runMongoProgram("bsondump", "--objcheck", "jstests/bson/testdata/broken_array.bson");
- assert.eq(x, 0, "bsondump should exit with success when given a bad array document");
- x = _runMongoProgram("bsondump", "--objcheck", "--type=debug", "jstests/bson/testdata/broken_array.bson");
- assert.eq(x, 0, "bsondump should exit with success when given a bad array document");
-
- // Make sure recoverable cases do not return an error by default
- clearRawMongoProgramOutput();
- x = _runMongoProgram("bsondump", "jstests/bson/testdata/bad_cstring.bson");
- assert.eq(x, 0, "bsondump should not exit with an error when given a non-terminated cstring without --objcheck");
- assert.strContains.soon("corrupted", rawMongoProgramOutput,
- "one of the documents should have been labelled as corrupted");
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/bsondump_broken_pipe.js b/src/mongo/gotools/test/qa-tests/jstests/bson/bsondump_broken_pipe.js
deleted file mode 100644
index e96733320e7..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/bsondump_broken_pipe.js
+++ /dev/null
@@ -1,18 +0,0 @@
-(function() {
- var bsondumpArgs = ['bsondump', '--type=json', 'jstests/bson/testdata/all_types.bson'];
- var ddArgs = ['dd', 'count=1000000', 'bs=1024', 'of=/dev/null'];
- if (_isWindows()) {
- bsondumpArgs[0] += '.exe';
- }
- bsondumpArgs.unshift('set -o pipefail', '&&', 'PATH=.:$PATH');
-
- var ret = runProgram('bash', '-c', bsondumpArgs.concat('|', ddArgs).join(' '));
- assert.eq(0, ret, "bash execution should succeed");
-
- ddArgs = ['dd', 'count=0', 'bs=1', 'of=/dev/null'];
- ret = runProgram('bash', '-c', bsondumpArgs.concat('|', ddArgs).join(' '));
- assert.neq(0, ret, "bash execution should fail");
- assert.soon(function() {
- return rawMongoProgramOutput().search(/broken pipe|The pipe is being closed/);
- }, 'should print an error message');
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/bsondump_options.js b/src/mongo/gotools/test/qa-tests/jstests/bson/bsondump_options.js
deleted file mode 100644
index 1b81c2a419e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/bsondump_options.js
+++ /dev/null
@@ -1,57 +0,0 @@
-// This test checks reasonable and unreasonable option configurations for bsondump
-(function() {
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var sampleFilepath = "jstests/bson/testdata/sample.bson";
- var x = _runMongoProgram("bsondump", "--type=fake", sampleFilepath);
- assert.neq(x, 0, "bsondump should exit with failure when given a non-existent type");
-
- x = _runMongoProgram("bsondump", "jstests/bson/testdata/asdfasdfasdf");
- assert.neq(x, 0, "bsondump should exit with failure when given a non-existent file");
-
- x = _runMongoProgram("bsondump", "--noobjcheck", sampleFilepath);
- assert.neq(x, 0, "bsondump should exit with failure when given --noobjcheck");
-
- x = _runMongoProgram("bsondump", "--collection", sampleFilepath);
- assert.neq(x, 0, "bsondump should exit with failure when given --collection");
-
- x = _runMongoProgram("bsondump", sampleFilepath, sampleFilepath);
- assert.neq(x, 0, "bsondump should exit with failure when given multiple files");
-
- x = _runMongoProgram("bsondump", '--bsonFile', sampleFilepath, sampleFilepath);
- assert.neq(x, 0, "bsondump should exit with failure when given both an out file and a positional argument");
-
- x = _runMongoProgram("bsondump", "-vvvv", sampleFilepath);
- assert.eq(x, 0, "bsondump should exit with success when given verbosity");
- x = _runMongoProgram("bsondump", "--verbose", sampleFilepath);
- assert.eq(x, 0, "bsondump should exit with success when given verbosity");
-
- clearRawMongoProgramOutput();
- var pid = _startMongoProgram("bsondump", "--quiet", sampleFilepath);
- assert.eq(waitProgram(pid), 0, "bsondump should exit with success when given --quiet");
- assert.strContains.soon("I am a string", rawMongoProgramOutput,
- "found docs should still be printed when --quiet is used");
- assert.eq.soon(-1, function() {
- return rawMongoProgramOutput()
- .split("\n")
- .filter(function(line) {
- return line.indexOf("sh"+pid+"| ") === 0;
- })
- .join("\n")
- .indexOf("found");
- }, "only the found docs should be printed when --quiet is used");
-
- clearRawMongoProgramOutput();
- x = _runMongoProgram("bsondump", "--help");
- assert.eq(x, 0, "bsondump should exit with success when given --help");
- assert.strContains.soon("Usage", rawMongoProgramOutput,
- "help text should be printed when given --help");
-
- clearRawMongoProgramOutput();
- x = _runMongoProgram("bsondump", "--version");
- assert.eq(x, 0, "bsondump should exit with success when given --version");
- assert.strContains.soon("version", rawMongoProgramOutput,
- "version info should be printed when given --version");
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/deep_nested.js b/src/mongo/gotools/test/qa-tests/jstests/bson/deep_nested.js
deleted file mode 100644
index 1a226c81fc3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/deep_nested.js
+++ /dev/null
@@ -1,8 +0,0 @@
-// This test checks that bsondump can handle a deeply nested document without breaking
-
-(function() {
- var x = _runMongoProgram("bsondump", "--type=json", "jstests/bson/testdata/deep_nested.bson");
- assert.eq(x, 0, "bsondump should exit successfully with 0");
- x = _runMongoProgram("bsondump", "--type=debug", "jstests/bson/testdata/deep_nested.bson");
- assert.eq(x, 0, "bsondump should exit successfully with 0");
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/output_file.js b/src/mongo/gotools/test/qa-tests/jstests/bson/output_file.js
deleted file mode 100644
index 1e339d6dcb1..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/output_file.js
+++ /dev/null
@@ -1,71 +0,0 @@
-/**
- * output_file.js
- *
- * This file tests outputting bsondump to a file when the input is from a file.
- */
-
-(function() {
- 'use strict';
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = getToolTest('bson_output_file');
- var commonToolArgs = getCommonToolArguments();
-
- // The db and collections we'll use.
- var testDB = toolTest.db.getSiblingDB('test');
- var destColl = testDB.bsondump;
-
- // Test using a flag to specify the output file..
- var exportTarget = 'bson_dump.json';
- removeFile(exportTarget);
-
- var ret = _runMongoProgram("bsondump",
- "--type=json",
- "--bsonFile", "jstests/bson/testdata/sample.bson",
- "--outFile", exportTarget);
- assert.eq(ret, 0, "bsondump should exit successfully with 0");
-
- // Import the data into the destination collection to check correctness.
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'bsondump',
- '--type', 'json']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // Make sure everything was dumped.
- assert.eq(1, destColl.count({a: 1.0}));
- assert.eq(1, destColl.count({a: 2.5}));
- assert.eq(1, destColl.count({a: 4.0}));
- assert.eq(1, destColl.count({a: 4.01}));
-
-
- // Test using a positional argument to specify the output file.
- removeFile(exportTarget);
-
- ret = _runMongoProgram("bsondump",
- "--type=json",
- "--outFile", exportTarget,
- "jstests/bson/testdata/sample.bson");
- assert.eq(ret, 0, "bsondump should exit successfully with 0");
-
- // Import the data into the destination collection to check correctness.
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'bsondump',
- '--type', 'json']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // Make sure everything was dumped.
- assert.eq(1, destColl.count({a: 1.0}));
- assert.eq(1, destColl.count({a: 2.5}));
- assert.eq(1, destColl.count({a: 4.0}));
- assert.eq(1, destColl.count({a: 4.01}));
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/all_types.bson b/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/all_types.bson
deleted file mode 100644
index 1a1f3a923f9..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/all_types.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/bad_cstring.bson b/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/bad_cstring.bson
deleted file mode 100644
index 70e2f9c273a..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/bad_cstring.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/bad_type.bson b/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/bad_type.bson
deleted file mode 100644
index b21ed025bc2..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/bad_type.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/broken_array.bson b/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/broken_array.bson
deleted file mode 100644
index 35d9a4f6b5d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/broken_array.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/deep_nested.bson b/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/deep_nested.bson
deleted file mode 100644
index 68477b2aca3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/deep_nested.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/invalid_field_name.bson b/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/invalid_field_name.bson
deleted file mode 100644
index 2a3aabd80a6..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/invalid_field_name.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/partial_file.bson b/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/partial_file.bson
deleted file mode 100644
index a571f10d06c..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/partial_file.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/random_bytes.bson b/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/random_bytes.bson
deleted file mode 100644
index d2558fd11c2..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/random_bytes.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/sample.bson b/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/sample.bson
deleted file mode 100644
index 35b3b843d66..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/bson/testdata/sample.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/common/check_version.js b/src/mongo/gotools/test/qa-tests/jstests/common/check_version.js
deleted file mode 100644
index cf151e41e75..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/common/check_version.js
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Given a MongoDB version, parses it into its major/minor/patch components,
- * discounting '-pre' and '-rcX'. Useful for parsing the output of
- * `db.version()` into an appropriate form for comparisons.
- *
- * Examples:
- * getVersionComponents('2.7.8'); // { major: 2, minor: 7, patch: 8 }
- * getVersionComponents('2.8.0-rc0'); // { major: 2, minor: 8, patch: 0 }
- */
-var getVersionComponents = function(version) {
- var splitVersion = version.split('.');
- assert.eq(3, splitVersion.length);
- var major = parseInt(splitVersion[0], 10);
- var minor = parseInt(splitVersion[1], 10);
-
- var patchEnd = splitVersion[2].indexOf('-') !== -1 ?
- splitVersion[2].indexOf('-') :
- undefined;
- var patch = parseInt(splitVersion[2].substr(0, patchEnd));
- return {
- major: major,
- minor: minor,
- patch: patch,
- };
-};
-
-/**
- * Given two versions, returns true if the first version is >= the second.
- *
- * Examples:
- * isAtLeastVersion('2.7.8', '2.7.8'); // true
- * isAtLeastVersion('2.8.0-rc0', '2.7.8'); // true
- * isAtLeastVersion('2.6.6', '2.7.8'); // false
- * isAtLeastVersion('1.8.5', '2.7.8'); // false
- */
-/* exported isAtLeastVersion */
-var isAtLeastVersion = function(serverVersion, checkVersion) {
- serverVersion = getVersionComponents(serverVersion);
- checkVersion = getVersionComponents(checkVersion);
-
- return (checkVersion.major < serverVersion.major) ||
- (checkVersion.major === serverVersion.major &&
- checkVersion.minor < serverVersion.minor) ||
- (checkVersion.major === serverVersion.major &&
- checkVersion.minor === serverVersion.minor &&
- checkVersion.patch <= serverVersion.patch);
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/common/topology_helper.js b/src/mongo/gotools/test/qa-tests/jstests/common/topology_helper.js
deleted file mode 100644
index a3bd8773a9b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/common/topology_helper.js
+++ /dev/null
@@ -1,164 +0,0 @@
-// topology_helper.js; contains utility functions to run tests
-
-// auth related variables
-var authUser = 'user';
-var authPassword = 'password';
-var authArgs = [
- '--authenticationDatabase', 'admin',
- '--authenticationMechanism', 'SCRAM-SHA-1',
- '-u', authUser,
- '-p', authPassword
-];
-var keyFile = 'jstests/libs/key1';
-
-// topology startup settings
-var auth = {
- name: 'auth',
- args: authArgs,
-};
-
-var plain = {
- name: 'plain',
- args: [],
-};
-
-/* exported passthroughs */
-// passthroughs while running all tests
-var passthroughs = [plain, auth];
-
-/* helper functions */
-
-// runAuthSetup creates a user with root role on the admin database
-var runAuthSetup = function(topology) {
- jsTest.log('Running auth setup');
-
- var conn = topology.connection();
- var db = conn.getDB('test');
-
- db.getSiblingDB('admin').createUser({
- user: authUser,
- pwd: authPassword,
- roles: ['root'],
- });
-
- assert.eq(db.getSiblingDB('admin').auth(authUser, authPassword), 1, 'authentication failed');
-};
-
-// buildStartupArgs constructs the proper object to be passed as arguments in
-// starting mongod
-var buildStartupArgs = function(passthrough) {
- var startupArgs = {};
- if (passthrough.name === auth.name) {
- startupArgs.auth = '';
- startupArgs.keyFile = keyFile;
- }
- return startupArgs;
-};
-
-// requiresAuth returns a boolean indicating if the passthrough requires authentication
-var requiresAuth = function(passthrough) {
- return passthrough.name === auth.name;
-};
-
-/* standalone topology */
-/* exported standaloneTopology */
-var standaloneTopology = {
- init: function(passthrough) {
- jsTest.log('Using standalone topology');
-
- passthrough = passthrough || [];
- var startupArgs = buildStartupArgs(passthrough);
- startupArgs.port = allocatePorts(1)[0];
- this.conn = MongoRunner.runMongod(startupArgs);
-
- // set up the auth user if needed
- if (requiresAuth(passthrough)) {
- runAuthSetup(this);
- }
- return this;
- },
- connection: function() {
- return this.conn;
- },
- stop: function() {
- MongoRunner.stopMongod(this.conn);
- },
-};
-
-
-/* replica set topology */
-/* exported replicaSetTopology */
-var replicaSetTopology = {
- init: function(passthrough) {
- jsTest.log('Using replica set topology');
-
- passthrough = passthrough || [];
- var startupArgs = buildStartupArgs(passthrough);
- startupArgs.name = testName;
- startupArgs.nodes = 2;
- this.replTest = new ReplSetTest(startupArgs);
-
- // start the replica set
- this.replTest.startSet();
- jsTest.log('Started replica set');
-
- // initiate the replica set with a default config
- this.replTest.initiate();
- jsTest.log('Initiated replica set');
-
- // block till the set is fully operational
- this.replTest.awaitSecondaryNodes();
- jsTest.log('Replica set fully operational');
-
- // set up the auth user if needed
- if (requiresAuth(passthrough)) {
- runAuthSetup(this);
- }
- return this;
- },
- connection: function() {
- return this.replTest.getPrimary();
- },
- stop: function() {
- this.replTest.stopSet();
- },
-};
-
-
-/* sharded cluster topology */
-/* exported shardedClusterTopology */
-var shardedClusterTopology = {
- init: function(passthrough) {
- jsTest.log('Using sharded cluster topology');
-
- passthrough = passthrough || [];
- var other = buildStartupArgs(passthrough);
- var startupArgs = {};
- startupArgs.name = testName;
- startupArgs.mongos = 1;
- startupArgs.shards = 1;
-
- // set up the auth user if needed
- if (requiresAuth(passthrough)) {
- startupArgs.keyFile = keyFile;
- startupArgs.other = {
- shardOptions: other,
- };
- this.shardingTest = new ShardingTest(startupArgs);
- runAuthSetup(this);
- } else {
- startupArgs.other = {
- shardOptions: other,
- };
- this.shardingTest = new ShardingTest(startupArgs);
- }
- return this;
- },
- connection: function() {
- return this.shardingTest.s;
- },
- stop: function() {
- this.shardingTest.stop();
- },
-};
-
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/archive_targets.js b/src/mongo/gotools/test/qa-tests/jstests/configs/archive_targets.js
deleted file mode 100644
index 9bdf3b4c5b3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/archive_targets.js
+++ /dev/null
@@ -1,32 +0,0 @@
-var getDumpTarget;
-
-(function() {
- if (getDumpTarget === undefined) {
- getDumpTarget = function(target) {
- if (!target) {
- return ["--archive=dump.archive"];
- }
- return ["--archive="+target];
- };
- }
-}());
-
-var getRestoreTarget;
-
-/* exported dump_targets */
-var dump_targets = "archive";
-
-(function() {
- if (getRestoreTarget === undefined) {
- getRestoreTarget = function(target) {
- if (!target) {
- return ["--archive=dump.archive"];
- }
- targetParts = target.split("/");
- if (targetParts[0] === "dump") {
- return ["--archive=dump.archive"];
- }
- return ["--archive="+targetParts[0]];
- };
- }
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/auth_28.config.js b/src/mongo/gotools/test/qa-tests/jstests/configs/auth_28.config.js
deleted file mode 100644
index 744bb237d79..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/auth_28.config.js
+++ /dev/null
@@ -1,38 +0,0 @@
-/* exported getToolTest */
-var getToolTest;
-var AUTH_USER = 'passwordIsTaco';
-var AUTH_PASSWORD = 'Taco';
-
-(function() {
- var TOOLS_TEST_CONFIG = {
- binVersion: '',
- auth: '',
- };
-
- getToolTest = function(name) {
- var toolTest = new ToolTest(name, TOOLS_TEST_CONFIG);
- var db = toolTest.startDB();
-
- db.getSiblingDB('admin').createUser({
- user: AUTH_USER,
- pwd: AUTH_PASSWORD,
- roles: ['__system'],
- });
-
- db.getSiblingDB('admin').auth(AUTH_USER, AUTH_PASSWORD);
-
- toolTest.authCommand = "db.getSiblingDB('admin').auth('" + AUTH_USER
- + "', '" + AUTH_PASSWORD + "');";
-
- return toolTest;
- };
-}());
-
-/* exported getCommonToolArguments */
-var getCommonToolArguments = function() {
- return [
- '--username', AUTH_USER,
- '--password', AUTH_PASSWORD,
- '--authenticationDatabase', 'admin'
- ];
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/gzip_targets.js b/src/mongo/gotools/test/qa-tests/jstests/configs/gzip_targets.js
deleted file mode 100644
index d57b0c55954..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/gzip_targets.js
+++ /dev/null
@@ -1,36 +0,0 @@
-var getDumpTarget;
-
-(function() {
- if (getDumpTarget === undefined) {
- getDumpTarget = function(target) {
- if (target === undefined) {
- return ["--gzip"];
- }
- if (target.indexOf(".bson", target.length - 5) !== -1) {
- return ["--gzip", "--out="+target+".gz"];
- }
- return ["--gzip", "--out="+target];
- };
- }
-}());
-
-var dump_targets;
-if (!dump_targets) {
- dump_targets = "gzip";
-}
-
-var getRestoreTarget;
-
-(function() {
- if (getRestoreTarget === undefined) {
- getRestoreTarget = function(target) {
- if (target === undefined) {
- return ["--gzip"];
- }
- if (target.indexOf(".bson", target.length - 5) !== -1) {
- return ["--gzip", "--dir="+target+".gz"];
- }
- return ["--gzip", "--dir="+target];
- };
- }
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos.config.yml b/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos.config.yml
deleted file mode 100644
index a2f7fdc5202..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos.config.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-security:
- authorization: enabled
- sasl:
- serviceName: mockservice
- hostName: kdc.10gen.me
-setParameter:
- authenticationMechanisms: GSSAPI
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos.linux.sh b/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos.linux.sh
deleted file mode 100644
index d2f54971f1a..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos.linux.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-echo "107.23.89.149 kdc.10gen.me" | sudo tee -a /etc/hosts
-echo "127.0.0.1 testserver.10gen.me" | sudo tee -a /etc/hosts
-sudo hostname "testserver.10gen.me"
-sudo cp jstests/libs/mockkrb5.conf /etc/krb5.conf
-kinit -p mockuser@10GEN.ME -k -t jstests/libs/mockuser.keytab
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos_28.config.js b/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos_28.config.js
deleted file mode 100644
index c00e8819e9a..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos_28.config.js
+++ /dev/null
@@ -1,39 +0,0 @@
-/* exported getToolTest */
-var getToolTest;
-
-(function() {
- var AUTH_USER = 'mockuser@10GEN.ME';
-
- var TOOLS_TEST_CONFIG = {
- binVersion: '',
- config: 'jstests/configs/kerberos.config.yml',
- };
-
- getToolTest = function(name) {
- var toolTest = new ToolTest(name, TOOLS_TEST_CONFIG);
- var db = toolTest.startDB();
-
- db.getSiblingDB('$external').createUser({
- user: AUTH_USER,
- roles: [{role: '__system', db: 'admin'}],
- });
-
- db.getSiblingDB('$external').auth({user: AUTH_USER, mechanism: 'GSSAPI', serviceName: 'mockservice', serviceHostname: 'kdc.10gen.me'});
-
- toolTest.authCommand = "db.getSiblingDB('$external').auth({ user: '"
- + AUTH_USER + "', mechanism: 'GSSAPI', serviceName: 'mockservice', serviceHostname: 'kdc.10gen.me' });";
-
- return toolTest;
- };
-}());
-
-/* exported getCommonToolArguments */
-var getCommonToolArguments = function() {
- return [
- '--username', 'mockuser@10GEN.ME',
- '--authenticationDatabase', '$external',
- '--authenticationMechanism', 'GSSAPI',
- '--gssapiServiceName', 'mockservice',
- '--gssapiHostName', 'kdc.10gen.me'
- ];
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos_28_windows.config.js b/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos_28_windows.config.js
deleted file mode 100644
index 481862f23f3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/kerberos_28_windows.config.js
+++ /dev/null
@@ -1,59 +0,0 @@
-/** NOTE: this config uses a static Kerberos instance running on an EC2
- * machine outside our security group. It should NOT be used for
- * automated tests, because its a single instance and there's no
- * automated way to generate more instances just yet. */
-
-/** NOTE: you need to add a registry entry for the MADHACKER.BIZ Kerberos
- * realm before using this:
- * cmd /c "REG ADD HKLM\SYSTEM\ControlSet001\Control\Lsa\Kerberos\Domains\MADHACKER.BIZ /v KdcNames /d karpov.madhacker.biz /t REG_MULTI_SZ /f"
- */
-
-/* exported getToolTest */
-var getToolTest;
-
-(function() {
- getToolTest = function(name) {
- var toolTest = new ToolTest(name, {});
- var db;
-
- db = toolTest.db = new Mongo(AUTH_HOSTNAME + ':27017').getDB('test');
-
- /** Overwrite so toolTest.runTool doesn't append --host */
- ToolTest.prototype.runTool = function() {
- arguments[0] = 'mongo' + arguments[0];
- return runMongoProgram.apply(null, arguments);
- };
-
- db.getSiblingDB('$external').auth({
- user: AUTH_USER,
- pwd: AUTH_PASSWORD,
- mechanism: 'GSSAPI',
- serviceName: 'mongodb',
- serviceHostname: AUTH_HOSTNAME,
- });
-
- toolTest.authCommand = "db.getSiblingDB('$external').auth({ user: '"
- + AUTH_USER + "', pwd: '" + AUTH_PASSWORD
- + "', mechanism: 'GSSAPI', serviceName: 'mongodb', serviceHostname: '"
- + AUTH_HOSTNAME + "' });";
-
- toolTest.stop = function() {
- print('No need to stop on Kerberos windows config. Test succeeded');
- };
-
- return toolTest;
- };
-}());
-
-/* exported getCommonToolArguments */
-var getCommonToolArguments = function() {
- return [
- '--username', AUTH_USER,
- '--password', AUTH_PASSWORD,
- '--host', AUTH_HOSTNAME,
- '--authenticationDatabase', '$external',
- '--authenticationMechanism', 'GSSAPI',
- '--gssapiServiceName', 'mongodb',
- '--gssapiHostName', AUTH_HOSTNAME
- ];
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/plain_26.config.js b/src/mongo/gotools/test/qa-tests/jstests/configs/plain_26.config.js
deleted file mode 100644
index 4f6cd93f8e3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/plain_26.config.js
+++ /dev/null
@@ -1,19 +0,0 @@
-/* exported getToolTest */
-var getToolTest;
-
-(function() {
- var TOOLS_TEST_CONFIG = {
- binVersion: '2.6',
- };
-
- getToolTest = function(name) {
- var toolTest = new ToolTest(name, TOOLS_TEST_CONFIG);
- toolTest.startDB();
- return toolTest;
- };
-}());
-
-/* exported getCommonToolArguments */
-var getCommonToolArguments = function() {
- return [];
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/plain_28.config.js b/src/mongo/gotools/test/qa-tests/jstests/configs/plain_28.config.js
deleted file mode 100644
index bf71e5ca079..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/plain_28.config.js
+++ /dev/null
@@ -1,21 +0,0 @@
-load("jstests/configs/standard_dump_targets.config.js");
-
-/* exported getToolTest */
-var getToolTest;
-
-(function() {
- var TOOLS_TEST_CONFIG = {
- binVersion: '',
- };
-
- getToolTest = function(name) {
- var toolTest = new ToolTest(name, TOOLS_TEST_CONFIG);
- toolTest.startDB();
- return toolTest;
- };
-}());
-
-/* exported getCommonToolArguments */
-var getCommonToolArguments = function() {
- return [];
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/replset_28.config.js b/src/mongo/gotools/test/qa-tests/jstests/configs/replset_28.config.js
deleted file mode 100644
index 82e9a10891d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/replset_28.config.js
+++ /dev/null
@@ -1,39 +0,0 @@
-load("jstests/configs/standard_dump_targets.config.js");
-
-/* exported getToolTest */
-var getToolTest;
-
-(function() {
- getToolTest = function(name) {
- var toolTest = new ToolTest(name, null);
-
- var replTest = new ReplSetTest({
- name: 'tool_replset',
- nodes: 3,
- oplogSize: 5,
- });
-
- replTest.startSet();
- replTest.initiate();
- var master = replTest.getPrimary();
-
- toolTest.m = master;
- toolTest.db = master.getDB(name);
- toolTest.port = replTest.getPort(master);
-
- var oldStop = toolTest.stop;
- toolTest.stop = function() {
- replTest.stopSet();
- oldStop.apply(toolTest, arguments);
- };
-
- toolTest.isReplicaSet = true;
-
- return toolTest;
- };
-}());
-
-/* exported getCommonToolArguments */
-var getCommonToolArguments = function() {
- return [];
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/replset_auth_28.config.js b/src/mongo/gotools/test/qa-tests/jstests/configs/replset_auth_28.config.js
deleted file mode 100644
index ed4f0c9b5e5..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/replset_auth_28.config.js
+++ /dev/null
@@ -1,58 +0,0 @@
-/* exported getToolTest */
-var getToolTest;
-
-var AUTH_USER = 'passwordIsTaco';
-var AUTH_PASSWORD = 'Taco';
-
-(function() {
- getToolTest = function(name) {
- var toolTest = new ToolTest(name, null);
-
- var replTest = new ReplSetTest({
- name: 'tool_replset',
- nodes: 3,
- oplogSize: 5,
- auth: '',
- keyFile: 'jstests/libs/key1',
- });
-
- nodes = replTest.startSet();
- replTest.initiate();
- var master = replTest.getPrimary();
-
- toolTest.m = master;
- toolTest.db = master.getDB(name);
- toolTest.port = replTest.getPort(master);
-
- var db = toolTest.db;
- db.getSiblingDB('admin').createUser({
- user: AUTH_USER,
- pwd: AUTH_PASSWORD,
- roles: ['__system'],
- });
-
- db.getSiblingDB('admin').auth(AUTH_USER, AUTH_PASSWORD);
-
- var oldStop = toolTest.stop;
- toolTest.stop = function() {
- replTest.stopSet();
- oldStop.apply(toolTest, arguments);
- };
-
- toolTest.authCommand = 'db.getSiblingDB(\'admin\').auth(\'' +
- AUTH_USER + '\', \'' + AUTH_PASSWORD + '\');';
-
- toolTest.isReplicaSet = true;
-
- return toolTest;
- };
-}());
-
-/* exported getCommonToolArguments */
-var getCommonToolArguments = function() {
- return [
- '--username', AUTH_USER,
- '--password', AUTH_PASSWORD,
- '--authenticationDatabase', 'admin'
- ];
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/sharding_28.config.js b/src/mongo/gotools/test/qa-tests/jstests/configs/sharding_28.config.js
deleted file mode 100644
index 24577f44938..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/sharding_28.config.js
+++ /dev/null
@@ -1,40 +0,0 @@
-load("jstests/configs/standard_dump_targets.config.js");
-
-/* exported getToolTest */
-var getToolTest;
-
-(function() {
- getToolTest = function(name) {
- var toolTest = new ToolTest(name, null);
-
- var shardingTest = new ShardingTest({name: name,
- shards: 2,
- verbose: 0,
- mongos: 3,
- other: {
- chunksize: 1,
- enableBalancer: 0
- }
- });
- shardingTest.adminCommand({enablesharding: name});
-
- toolTest.m = shardingTest.s0;
- toolTest.db = shardingTest.getDB(name);
- toolTest.port = shardingTest.s0.port;
-
- var oldStop = toolTest.stop;
- toolTest.stop = function() {
- shardingTest.stop();
- oldStop.apply(toolTest, arguments);
- };
-
- toolTest.isSharded = true;
-
- return toolTest;
- };
-}());
-
-/* exported getCommonToolArguments */
-var getCommonToolArguments = function() {
- return [];
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/ssl_28.config.js b/src/mongo/gotools/test/qa-tests/jstests/configs/ssl_28.config.js
deleted file mode 100644
index 17c2fb492a0..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/ssl_28.config.js
+++ /dev/null
@@ -1,26 +0,0 @@
-/* exported getToolTest */
-var getToolTest;
-
-(function() {
- var TOOLS_TEST_CONFIG = {
- binVersion: '',
- sslMode: 'requireSSL',
- sslPEMKeyFile: 'jstests/libs/server.pem',
- sslCAFile: 'jstests/libs/ca.pem',
- };
-
- getToolTest = function(name) {
- var toolTest = new ToolTest(name, TOOLS_TEST_CONFIG);
- toolTest.startDB();
- toolTest.usesSSL = true;
- return toolTest;
- };
-}());
-
-/* exported getCommonToolArguments */
-var getCommonToolArguments = function() {
- return [
- '--ssl',
- '--sslPEMKeyFile', 'jstests/libs/client.pem'
- ];
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/configs/standard_dump_targets.config.js b/src/mongo/gotools/test/qa-tests/jstests/configs/standard_dump_targets.config.js
deleted file mode 100644
index fe68b171246..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/configs/standard_dump_targets.config.js
+++ /dev/null
@@ -1,30 +0,0 @@
-var getDumpTarget;
-
-(function() {
- if (getDumpTarget === undefined) {
- getDumpTarget = function(target) {
- if (target === undefined) {
- return [];
- }
- return ["--out="+target];
- };
- }
-}());
-
-var dump_targets;
-if (!dump_targets) {
- dump_targets = "standard";
-}
-
-var getRestoreTarget;
-
-(function() {
- if (getRestoreTarget === undefined) {
- getRestoreTarget = function(target) {
- if (target === undefined) {
- return [];
- }
- return ["--dir="+target];
- };
- }
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/bad_options.js b/src/mongo/gotools/test/qa-tests/jstests/export/bad_options.js
deleted file mode 100644
index cbbeea4f346..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/bad_options.js
+++ /dev/null
@@ -1,47 +0,0 @@
-(function() {
-
- // Tests running mongoexport with bad command line options.
-
- jsTest.log('Testing running mongoexport with bad command line options');
-
- var toolTest = new ToolTest('bad_options');
- toolTest.startDB('foo');
-
- // run mongoexport with a missing --collection argument
- var ret = toolTest.runTool('export', '--db', 'test');
- assert.neq(0, ret);
-
- // run mongoexport with bad json as the --query
- ret = toolTest.runTool('export', '--db', 'test', '--collection', 'data',
- '--query', '{ hello }');
- assert.neq(0, ret);
-
- // run mongoexport with a bad argument to --skip
- ret = toolTest.runTool('export', '--db', 'test', '--collection', 'data',
- '--sort', '{a: 1}', '--skip', 'jamesearljones');
- assert.neq(0, ret);
-
- // run mongoexport with a bad argument to --sort
- ret = toolTest.runTool('export', '--db', 'test', '--collection', 'data',
- '--sort', '{ hello }');
- assert.neq(0, ret);
-
- // run mongoexport with a bad argument to --limit
- ret = toolTest.runTool('export', '--db', 'test', '--collection', 'data',
- '--sort', '{a: 1}', '--limit', 'jamesearljones');
- assert.neq(0, ret);
-
- // run mongoexport with --query and --queryFile
- ret = toolTest.runTool('export', '--db', 'test', '--collection', 'data',
- '--query', '{a:1}', '--queryFile', 'jstests/export/testdata/query.json');
- assert.neq(0, ret);
-
- // run mongoexport with a --queryFile that doesn't exist
- ret = toolTest.runTool('export', '--db', 'test', '--collection', 'data',
- '--queryFile', 'jstests/nope');
- assert.neq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/basic_data.js b/src/mongo/gotools/test/qa-tests/jstests/export/basic_data.js
deleted file mode 100644
index 7a000390de3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/basic_data.js
+++ /dev/null
@@ -1,60 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongoexport with some basic data, and bringing it back
- // in with import.
-
- jsTest.log('Testing exporting, then importing, some basic data');
-
- var toolTest = getToolTest('basic_data');
- var commonToolArgs = getCommonToolArguments();
-
- // the export target
- var exportTarget = 'basic_data_export.json';
- removeFile(exportTarget);
-
- // the db and collection we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.data;
-
- // insert some data
- var data = [];
- for (var i = 0; i < 50; i++) {
- data.push({_id: i});
- }
- testColl.insertMany(data);
- // sanity check the insertion worked
- assert.eq(50, testColl.count());
-
- // export the data
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'data']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database
- testDB.dropDatabase();
-
- // import the data back in
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'data']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data is correct
- assert.eq(50, testColl.count());
- for (i = 0; i < 50; i++) {
- assert.eq(1, testColl.count({_id: i}));
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/data_types.js b/src/mongo/gotools/test/qa-tests/jstests/export/data_types.js
deleted file mode 100644
index ce6a6ac8ed3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/data_types.js
+++ /dev/null
@@ -1,70 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongoexport with different data types, and bringing it back
- // in with import.
-
- jsTest.log('Testing exporting, then importing, different data types');
-
- var toolTest = getToolTest('data_types');
- var commonToolArgs = getCommonToolArguments();
-
- // the export target
- var exportTarget = 'data_types_export.json';
- removeFile(exportTarget);
-
- // the db and collection we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.data;
-
- // insert some data, of different types
- testColl.insert({num: 1});
- testColl.insert({flt: 1.0});
- testColl.insert({str: '1'});
- testColl.insert({obj: {a: 1}});
- testColl.insert({arr: [0, 1]});
- testColl.insert({bd: new BinData(0, '1234')});
- testColl.insert({date: ISODate('2009-08-27T12:34:56.789')});
- testColl.insert({ts: new Timestamp(1234, 5678)});
- testColl.insert({rx: /foo*"bar"/i});
- // sanity check the insertion worked
- assert.eq(9, testColl.count());
-
- // export the data
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'data']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database
- testDB.dropDatabase();
-
- // import the data back in
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'data']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data is correct
- assert.eq(9, testColl.count());
- assert.eq(1, testColl.count({num: 1}));
- assert.eq(1, testColl.count({flt: 1.0}));
- assert.eq(1, testColl.count({str: '1'}));
- assert.eq(1, testColl.count({obj: {a: 1}}));
- assert.eq(1, testColl.count({arr: [0, 1]}));
- assert.eq(1, testColl.count({bd: new BinData(0, '1234')}));
- assert.eq(1, testColl.count({date: ISODate('2009-08-27T12:34:56.789')}));
- assert.eq(1, testColl.count({ts: new Timestamp(1234, 5678)}));
- assert.eq(1, testColl.count({rx: {$exists: true}}));
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/export_broken_pipe.js b/src/mongo/gotools/test/qa-tests/jstests/export/export_broken_pipe.js
deleted file mode 100644
index e1eb41d71cf..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/export_broken_pipe.js
+++ /dev/null
@@ -1,46 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var toolTest = getToolTest('export_broken_pipe');
- var baseArgs = getCommonToolArguments();
- baseArgs = baseArgs.concat('--port', toolTest.port);
-
- if (toolTest.useSSL) {
- baseArgs = baseArgs.concat([
- '--ssl',
- '--sslPEMKeyFile', 'jstests/libs/server.pem',
- '--sslCAFile', 'jstests/libs/ca.pem',
- '--sslAllowInvalidHostnames']);
- }
- var exportArgs = ['mongoexport', '-d', toolTest.db.getName(), '-c', 'foo'].concat(baseArgs);
- var ddArgs = ['dd', 'count=1000000', 'bs=1024', 'of=/dev/null'];
- if (_isWindows()) {
- exportArgs[0] += '.exe';
- }
- exportArgs.unshift('set -o pipefail', '&&', 'PATH=.:$PATH');
-
- var testDb = toolTest.db;
- testDb.dropDatabase();
- for (var i = 0; i < 500; i++) {
- testDb.foo.insert({i: i});
- }
- assert.eq(500, testDb.foo.count(), 'foo should have our test documents');
-
- var ret = runProgram('bash', '-c', exportArgs.concat('|', ddArgs).join(' '));
- assert.eq(0, ret, "bash execution should succeed");
- assert.strContains.soon('exported 500 records', rawMongoProgramOutput, 'should print the success message');
-
- ddArgs = ['dd', 'count=100', 'bs=1', 'of=/dev/null'];
- ret = runProgram('bash', '-c', exportArgs.concat('|', ddArgs).join(' '));
- assert.neq(0, ret, "bash execution should fail");
- assert.soon(function() {
- return rawMongoProgramOutput().search(/broken pipe|The pipe is being closed/);
- }, 'should print an error message');
-
- testDb.dropDatabase();
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/field_file.js b/src/mongo/gotools/test/qa-tests/jstests/export/field_file.js
deleted file mode 100644
index e7517690f06..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/field_file.js
+++ /dev/null
@@ -1,60 +0,0 @@
-(function() {
-
- if (typeof getToolTest === "undefined") {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongoexport exporting to csv using the --fieldFile option
- jsTest.log('Testing exporting to csv using the --fieldFile option');
-
- var toolTest = getToolTest('field_file');
- var commonToolArgs = getCommonToolArguments();
-
- // the db and collections we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var sourceColl = testDB.source;
- var destColl = testDB.dest;
-
- // the export target
- var exportTarget = 'jstests/export/testdata/field_file_export.csv';
- removeFile(exportTarget);
-
- // insert some data
- sourceColl.insert({a: 1});
- sourceColl.insert({a: 1, b: 1});
- sourceColl.insert({a: 1, b: 2, c: 3});
- // sanity check the insertion worked
- assert.eq(3, sourceColl.count());
-
- // export the data, using a field file that specifies 'a' and 'b'
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--type=csv',
- '--fieldFile', 'jstests/export/testdata/simple_field_file']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
-
- // import the data into the destination collection
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest',
- '--type=csv',
- '--fields', 'a,b,c']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
-
- // make sure only the specified fields were exported
- assert.eq(3, destColl.count({a: 1}));
- assert.eq(1, destColl.count({b: 1}));
- assert.eq(1, destColl.count({b: 2}));
- assert.eq(0, destColl.count({c: 3}));
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/fields_csv.js b/src/mongo/gotools/test/qa-tests/jstests/export/fields_csv.js
deleted file mode 100644
index 531fdb93c9e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/fields_csv.js
+++ /dev/null
@@ -1,173 +0,0 @@
-(function() {
-
- if (typeof getToolTest === "undefined") {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongoexport exporting to csv using the --fields option.
-
- jsTest.log('Testing exporting to csv using the --fields option');
-
- var toolTest = getToolTest('fields_csv');
- var commonToolArgs = getCommonToolArguments();
-
- // the db and collections we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var sourceColl = testDB.source;
- var destColl = testDB.dest;
-
- // the export target
- var exportTarget = 'fields_export.csv';
- removeFile(exportTarget);
-
- // insert some data
- sourceColl.insert({a: 1});
- sourceColl.insert({a: 1, b: 1});
- sourceColl.insert({a: 1, b: 2, c: 3});
- // sanity check the insertion worked
- assert.eq(3, sourceColl.count());
-
- // export the data, specifying only one field
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--csv',
- '--fields', 'a']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // import the data into the destination collection
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest',
- '--type', 'csv',
- '--fields', 'a,b,c']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure only the specified field was exported
- assert.eq(3, destColl.count({a: 1}));
- assert.eq(0, destColl.count({b: 1}));
- assert.eq(0, destColl.count({b: 2}));
- assert.eq(0, destColl.count({c: 3}));
-
- // remove the export, clear the destination collection
- removeFile(exportTarget);
- destColl.remove({});
-
- // export the data, specifying all fields
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--csv',
- '--fields', 'a,b,c']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // import the data into the destination collection
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest',
- '--type', 'csv',
- '--fields', 'a,b,c']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure everything was exported
- assert.eq(3, destColl.count({a: 1}));
- assert.eq(1, destColl.count({b: 1}));
- assert.eq(1, destColl.count({b: 2}));
- assert.eq(1, destColl.count({c: 3}));
-
- // make sure the _id was NOT exported - the _id for the
- // corresponding documents in the two collections should
- // be different
- var fromSource = sourceColl.findOne({a: 1, b: 1});
- var fromDest = destColl.findOne({a: 1, b: 1});
- assert.neq(fromSource._id, fromDest._id);
-
-
- /* Test passing positional arguments to --fields */
-
- // outputMatchesExpected takes an output string and returns
- // a boolean indicating if any line of the output matched
- // the expected string.
- var outputMatchesExpected = function(output, expected) {
- var found = false;
- output.split('\n').forEach(function(line) {
- if (line.match(expected)) {
- found = true;
- }
- });
- return found;
- };
-
- // remove the export, clear the destination collection
- removeFile(exportTarget);
- sourceColl.remove({});
-
- // ensure source collection is empty
- assert.eq(0, sourceColl.count());
-
- // insert some data
- sourceColl.insert({a: [1, 2, 3, 4, 5], b: {c: [-1, -2, -3, -4]}});
- sourceColl.insert({a: 1, b: 2, c: 3, d: {e: [4, 5, 6]}});
- sourceColl.insert({a: 1, b: 2, c: 3, d: 5, e: {"0": ["foo", "bar", "baz"]}});
- sourceColl.insert({a: 1, b: 2, c: 3, d: [4, 5, 6], e: [{"0": 0, "1": 1}, {"2": 2, "3": 3}]});
-
- // ensure the insertion worked
- assert.eq(4, sourceColl.count());
-
- // use the following fields as filters:
- var cases = [
- {field: 'd.e.2', expected: /6/}, // specify nested field with array value
- {field: 'e.0.0', expected: /foo/}, // specify nested field with numeric array value
- {field: 'b,d.1,e.1.3', expected: /2,5,3/}, // specify varying levels of field nesting
- ];
-
- var output;
-
- for (var i = 0; i < cases.length; i++) {
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--fields', cases[i].field,
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--csv']
- .concat(commonToolArgs));
- assert.eq(0, ret);
- output = cat(exportTarget);
- jsTest.log("Fields Test " + (i + 1) + ": \n" + output);
- assert.eq(outputMatchesExpected(output, cases[i].expected), true);
- }
-
- // test with $ projection and query
- cases = [
- {query: '{ d: 4 }', field: 'd.$', expected: /[4]/},
- {query: '{ a: { $gt: 1 } }', field: 'a.$', expected: /[2]/},
- {query: '{ "b.c": -1 }', field: 'b.c.$', expected: /[-1]/},
- ];
-
- for (i = 0; i < cases.length; i++) {
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--query', cases[i].query,
- '--fields', cases[i].field,
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--csv']
- .concat(commonToolArgs));
- assert.eq(0, ret);
- output = cat(exportTarget);
- jsTest.log("Fields + Query Test " + (i + 1) + ": \n" + output);
- assert.eq(outputMatchesExpected(output, cases[i].expected), true);
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/fields_json.js b/src/mongo/gotools/test/qa-tests/jstests/export/fields_json.js
deleted file mode 100644
index ff79f79f63a..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/fields_json.js
+++ /dev/null
@@ -1,92 +0,0 @@
-(function() {
-
- if (typeof getToolTest === "undefined") {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongoexport exporting to json with the --fields option
-
- jsTest.log('Testing exporting to json using the --fields option');
-
- var toolTest = getToolTest('fields_json');
- var commonToolArgs = getCommonToolArguments();
-
- // the db and collections we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var sourceColl = testDB.source;
- var destColl = testDB.dest;
-
- // the export target
- var exportTarget = 'fields_export.json';
- removeFile(exportTarget);
-
- // insert some data
- sourceColl.insert({a: 1});
- sourceColl.insert({a: 1, b: 1});
- sourceColl.insert({a: 1, b: 2, c: 3});
- // sanity check the insertion worked
- assert.eq(3, sourceColl.count());
-
- // export the data, specifying only one field
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--fields', 'a']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // import the data into the destination collection
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest',
- '--type', 'json']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure only the specified field was exported
- assert.eq(3, destColl.count({a: 1}));
- assert.eq(0, destColl.count({b: 1}));
- assert.eq(0, destColl.count({b: 2}));
- assert.eq(0, destColl.count({c: 3}));
-
- // remove the export, clear the destination collection
- removeFile(exportTarget);
- destColl.remove({});
-
- // export the data, specifying all fields
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--fields', 'a,b,c']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // import the data into the destination collection
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest',
- '--type', 'json']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure everything was exported
- assert.eq(3, destColl.count({a: 1}));
- assert.eq(1, destColl.count({b: 1}));
- assert.eq(1, destColl.count({b: 2}));
- assert.eq(1, destColl.count({c: 3}));
-
- // make sure the _id was exported - the _id for the
- // corresponding documents in the two collections should
- // be the same
- var fromSource = sourceColl.findOne({a: 1, b: 1});
- var fromDest = destColl.findOne({a: 1, b: 1});
- assert.eq(fromSource._id, fromDest._id);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/force_table_scan.js b/src/mongo/gotools/test/qa-tests/jstests/export/force_table_scan.js
deleted file mode 100644
index 8a79aa092ae..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/force_table_scan.js
+++ /dev/null
@@ -1,113 +0,0 @@
-(function() {
-
- if (typeof getToolTest === "undefined") {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongoexport with --forceTableScan specified.
-
- jsTest.log('Testing exporting with --forceTableScan');
-
- var toolTest = getToolTest('force_table_scan');
- var commonToolArgs = getCommonToolArguments();
-
- // the export target
- var exportTarget = 'force_table_scan_export.json';
- removeFile(exportTarget);
-
- // the db and collection we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.data;
-
- // insert some data
- var data = [];
- for (var i = 0; i < 50; i++) {
- data.push({_id: i});
- }
- testColl.insertMany(data);
- // sanity check the insertion worked
- assert.eq(50, testColl.count());
-
- // set the profiling level to high, so that
- // we can inspect all queries
- assert.eq(1, testDB.setProfilingLevel(2).ok);
-
- // the profiling collection
- var profilingColl = testDB.system.profile;
-
- // run mongoexport without --forceTableScan
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'data']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // grab the query from the profiling collection
- var queries = profilingColl.find({op: 'query', ns: 'test.data'}).toArray();
-
- // there should only be one query so far, and it should have snapshot set
- assert.eq(1, queries.length);
- assert.eq(true, queries[0].query.$snapshot || queries[0].query.snapshot);
-
- // remove the export file
- removeFile(exportTarget);
-
- // run mongoexport again, with --forceTableScan
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'data',
- '--forceTableScan']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // grab the queries again
- queries = profilingColl.find({op: 'query', ns: 'test.data'}).sort({ts: 1}).toArray();
-
- // there should be two queries, and the second one should not
- // have snapshot set
- assert.eq(2, queries.length);
- assert(!queries[1].query['$snapshot']);
-
- // wipe the collection
- testColl.remove({});
-
- // import the data back in
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'data']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure that the export with --forceTableScan exported the correct data
- assert.eq(50, testColl.count());
- for (i = 0; i < 50; i++) {
- assert.eq(1, testColl.count({_id: i}));
- }
-
- // remove the export file
- removeFile(exportTarget);
-
- // run mongoexport again, without --forceTableScan but with --sort. --forceTableScan
- // should be implicitly set
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'data',
- '--sort', '{_id:1}']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // grab the queries again
- queries = profilingColl.find({op: 'query', ns: 'test.data'}).sort({ts: 1}).toArray();
-
- // there should be 3 queries, and the last one should not have snapshot set
- assert.eq(3, queries.length);
- assert(!queries[2].query.$snapshot);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/json_array.js b/src/mongo/gotools/test/qa-tests/jstests/export/json_array.js
deleted file mode 100644
index 7d4f0849783..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/json_array.js
+++ /dev/null
@@ -1,57 +0,0 @@
-(function() {
-
- // Tests running mongoexport with the --jsonArray output option.
-
- jsTest.log('Testing exporting with --jsonArray specified');
-
- var toolTest = new ToolTest('json_array');
- toolTest.startDB('foo');
-
- // the db and collection we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.data;
-
- // the export target
- var exportTarget = 'json_array_export.json';
- removeFile(exportTarget);
-
- // insert some data
- var data = [];
- for (var i = 0; i < 20; i++) {
- data.push({_id: i});
- }
- testColl.insertMany(data);
- // sanity check the insertion worked
- assert.eq(20, testColl.count());
-
- // export the data
- var ret = toolTest.runTool('export', '--out', exportTarget,
- '--db', 'test', '--collection', 'data', '--jsonArray');
- assert.eq(0, ret);
-
- // drop the data
- testDB.dropDatabase();
-
- // make sure that mongoimport without --jsonArray does not work
- ret = toolTest.runTool('import', '--file', exportTarget,
- '--db', 'test', '--collection', 'data');
- assert.neq(0, ret);
-
- // make sure nothing was imported
- assert.eq(0, testColl.count());
-
- // run mongoimport again, with --jsonArray
- ret = toolTest.runTool('import', '--file', exportTarget,
- '--db', 'test', '--collection', 'data', '--jsonArray');
- assert.eq(0, ret);
-
- // make sure the data was imported
- assert.eq(20, testColl.count());
- for (i = 0; i < 20; i++) {
- assert.eq(1, testColl.count({_id: i}));
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/limit.js b/src/mongo/gotools/test/qa-tests/jstests/export/limit.js
deleted file mode 100644
index 0c291222ff0..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/limit.js
+++ /dev/null
@@ -1,61 +0,0 @@
-(function() {
-
- if (typeof getToolTest === "undefined") {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongoexport with --limit specified.
-
- jsTest.log('Testing exporting with --limit');
-
- var toolTest = getToolTest('limit');
- var commonToolArgs = getCommonToolArguments();
-
- // the export target
- var exportTarget = 'limit_export.json';
- removeFile(exportTarget);
-
- // the db and collection we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.data;
-
- // insert some data
- var data = [];
- for (var i = 0; i < 50; i++) {
- data.push({a: i});
- }
- testColl.insertMany(data);
- // sanity check the insertion worked
- assert.eq(50, testColl.count());
-
- // export the data, using --limit
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'data',
- '--sort', '{a:1}',
- '--limit', '20']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database
- testDB.dropDatabase();
-
- // import the data back in
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'data']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the limit was applied to the export
- assert.eq(20, testColl.count());
- for (i = 0; i < 20; i++) {
- assert.eq(1, testColl.count({a: i}));
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/namespace_validation.js b/src/mongo/gotools/test/qa-tests/jstests/export/namespace_validation.js
deleted file mode 100644
index 410b8986223..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/namespace_validation.js
+++ /dev/null
@@ -1,25 +0,0 @@
-(function() {
-
- // Tests running mongoexport with bad command line options.
-
- jsTest.log('Testing exporting valid or invalid namespaces');
-
- var toolTest = new ToolTest('system_collection');
- toolTest.startDB('foo');
-
- // run mongoexport with an dot in the db name
- ret = toolTest.runTool('export', '--db', 'test.bar', '--collection', 'foo');
- assert.neq(0, ret);
-
- // run mongoexport with an " in the db name
- ret = toolTest.runTool('export', '--db', 'test"bar', '--collection', 'foo');
- assert.neq(0, ret);
-
- // run mongoexport with a system collection
- ret = toolTest.runTool('export', '--db', 'test', '--collection', 'system.foobar');
- assert.eq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/nested_fields_csv.js b/src/mongo/gotools/test/qa-tests/jstests/export/nested_fields_csv.js
deleted file mode 100644
index 8a1954ec5dd..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/nested_fields_csv.js
+++ /dev/null
@@ -1,65 +0,0 @@
-(function() {
-
- if (typeof getToolTest === "undefined") {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests exporting nested fields to csv.
-
- jsTest.log('Testing exporting nested fields to csv');
-
- var toolTest = getToolTest('nested_fields_csv');
- var commonToolArgs = getCommonToolArguments();
-
- // the db and collections we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var sourceColl = testDB.source;
- var destColl = testDB.dest;
-
- // the export target
- var exportTarget = 'nested_fields_export.csv';
- removeFile(exportTarget);
-
- // insert some data
- sourceColl.insert({a: 1});
- sourceColl.insert({a: 2, b: {c: 2}});
- sourceColl.insert({a: 3, b: {c: 3, d: {e: 3}}});
- sourceColl.insert({a: 4, x: null});
- // sanity check the insertion worked
- assert.eq(4, sourceColl.count());
-
- // export the data, specifying nested fields to export
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--csv',
- '--fields', 'a,b.d.e,x.y']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // import the data
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest',
- '--type', 'csv',
- '--headerline']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure that the non-specified fields were ignored, and the
- // specified fields were added correctly
- assert.eq(0, destColl.count({'b.c': 2}));
- assert.eq(0, destColl.count({'b.c': 3}));
- assert.eq(1, destColl.count({'b.d.e': 3}));
- assert.eq(3, destColl.count({'b.d.e': ''}));
- assert.eq(1, destColl.count({a: 1}));
- assert.eq(1, destColl.count({a: 2}));
- assert.eq(1, destColl.count({a: 3}));
- assert.eq(4, destColl.count({'x.y': ''}));
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/no_data.js b/src/mongo/gotools/test/qa-tests/jstests/export/no_data.js
deleted file mode 100644
index cfc9248bb5f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/no_data.js
+++ /dev/null
@@ -1,21 +0,0 @@
-(function() {
-
- // Tests running mongoexport with no data in the target collection.
-
- jsTest.log('Testing exporting no data');
-
- var toolTest = new ToolTest('no_data');
- toolTest.startDB('foo');
-
- // run mongoexport with no data, make sure it doesn't error out
- var ret = toolTest.runTool('export', '--db', 'test', '--collection', 'data');
- assert.eq(0, ret);
-
- // but it should fail if --assertExists specified
- ret = toolTest.runTool('export', '--db', 'test', '--collection', 'data', '--assertExists');
- assert.neq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/pretty.js b/src/mongo/gotools/test/qa-tests/jstests/export/pretty.js
deleted file mode 100644
index db922794b6f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/pretty.js
+++ /dev/null
@@ -1,33 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = getToolTest('fields_json');
- var commonToolArgs = getCommonToolArguments();
- var testDB = toolTest.db.getSiblingDB('test');
- var sourceColl = testDB.source;
-
- // insert some data
- sourceColl.insert({a: 1});
- sourceColl.insert({a: 1, b: 1});
- sourceColl.insert({a: 1, b: 2, c: 3});
-
- // export it with pretty
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', "pretty.json",
- '--db', 'test',
- '--collection', 'source',
- '--pretty',
- '--jsonArray']
- .concat(commonToolArgs));
- assert.eq(0, ret);
- parsed = JSON.parse(cat('pretty.json'));
- assert.eq(parsed[0].a, 1);
- assert.eq(parsed[1].b, 1);
- assert.eq(parsed[2].b, 2);
- assert.eq(parsed[2].c, 3);
-
-}());
-
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/query.js b/src/mongo/gotools/test/qa-tests/jstests/export/query.js
deleted file mode 100644
index 9c44913a366..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/query.js
+++ /dev/null
@@ -1,198 +0,0 @@
-(function() {
-
- if (typeof getToolTest === "undefined") {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongoexport with --query specified.
-
- jsTest.log('Testing exporting with --query');
-
- var toolTest = getToolTest('query');
- var commonToolArgs = getCommonToolArguments();
-
- // the export target
- var exportTarget = 'query_export.json';
- removeFile(exportTarget);
-
- // the db and collections we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var sourceColl = testDB.source;
- var destColl = testDB.dest;
-
- // insert some data
- sourceColl.insert({a: 1, x: {b: '1'}});
- sourceColl.insert({a: 2, x: {b: '1', c: '2'}});
- sourceColl.insert({a: 1, c: '1'});
- sourceColl.insert({a: 2, c: '2'});
- // sanity check the insertion worked
- assert.eq(4, sourceColl.count());
-
- // export the data, with a query that will match nothing
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--query', '{a:3}']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // import the data into the destination collection
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the export was blank
- assert.eq(0, destColl.count());
-
- // remove the export
- removeFile(exportTarget);
-
- // export the data, with a query matching a single element
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--query', "{a:1, c:'1'}"]
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // import the data into the destination collection
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the query was applied correctly
- assert.eq(1, destColl.count());
- assert.eq(1, destColl.count({a: 1, c: '1'}));
-
- // remove the export, clear the destination collection
- removeFile(exportTarget);
- destColl.remove({});
-
- // TOOLS-716 export the data, with a queryFile matching a single element
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--queryFile', "jstests/export/testdata/query.json"]
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // import the data into the destination collection
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the query was applied correctly
- assert.eq(1, destColl.count());
- assert.eq(1, destColl.count({a: 1, c: '1'}));
-
- // remove the export, clear the destination collection
- removeFile(exportTarget);
- destColl.remove({});
-
-
- // export the data, with a query on an embedded document
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--query', "{a:2, 'x.c':'2'}"]
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // import the data into the destination collection
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the query was applied correctly
- assert.eq(1, destColl.count());
- assert.eq(1, destColl.count({a: 2, "x.c": '2'}));
-
- // remove the export, clear the destination collection
- removeFile(exportTarget);
- destColl.remove({});
-
- // export the data, with a blank query (should match everything)
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--query', "{}"]
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // import the data into the destination collection
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the query was applied correctly
- assert.eq(4, destColl.count());
-
- // TOOLS-469 test queries containing extended JSON field (like dates)
- sourceColl.drop();
- destColl.drop();
- sourceColl.insert({
- a: 1,
- x: ISODate("2014-12-11T13:52:39.498Z"),
- y: ISODate("2014-12-13T13:52:39.498Z")
- });
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--query', '{x:{$gt:Date(1418305949498), $lt:Date(1418305979498)}, y:{$gt:{$date:1418478749498}, $lt:{$date:1418478769498}}}']
- .concat(commonToolArgs));
- assert.eq(0, ret);
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest']
- .concat(commonToolArgs));
- assert.eq(0, ret); assert.eq(1, destColl.count());
-
- // TOOLS-530 add support for ISODate and string formatting for query flag
- sourceColl.drop();
- destColl.drop();
- sourceColl.insert({
- a: 1,
- x: ISODate("2014-12-11T13:52:39.498Z"),
- y: ISODate("2014-12-13T13:52:39.498Z")
- });
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--query', '{x:{$gt:ISODate("2014-12-11T13:52:39.3Z"), $lt:ISODate("2014-12-11T13:52:39.5Z")}}']
- .concat(commonToolArgs));
- assert.eq(0, ret);
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'dest']
- .concat(commonToolArgs));
- assert.eq(0, ret);
- assert.eq(1, destColl.count());
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/slave_ok.js b/src/mongo/gotools/test/qa-tests/jstests/export/slave_ok.js
deleted file mode 100644
index 08936eda33e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/slave_ok.js
+++ /dev/null
@@ -1,63 +0,0 @@
-(function() {
- // Tests running mongoexport with --slaveOk.
-
- jsTest.log('Testing exporting with --slaveOk');
-
- // bring up a replica set with 3 nodes
- var replTest = new ReplSetTest({
- name: 'slave_ok',
- nodes: 3,
- oplogSize: 5,
- useHostName: true,
- });
- var nodes = replTest.startSet();
- replTest.initiate();
- replTest.awaitSecondaryNodes();
-
- // cache the primary
- var primary = replTest.getPrimary();
-
- // the export target
- var exportTarget = 'slave_ok_export.json';
- removeFile(exportTarget);
-
- // insert some data
- var testDB = primary.getDB('test');
- var data = [];
- for (var i = 0; i < 10; i++) {
- data.push({_id: i});
- }
- testDB.data.insertMany(data);
- replTest.awaitReplication();
-
- // sanity check the insertion worked
- assert.eq(10, testDB.data.count());
-
- // make sure that exporting from any of the nodes works with --slaveOk
- nodes.forEach(function(node) {
- // remove the export, clean the destination collection
- removeFile(exportTarget);
- testDB.dest.remove({});
- printjson(replTest.status());
-
- var ret = runMongoProgram('mongoexport',
- '--db', 'test',
- '--collection', 'data',
- '--host', node.host,
- '--slaveOk',
- '--out', exportTarget);
- assert.eq(0, ret);
-
- ret = runMongoProgram('mongoimport',
- '--db', 'test',
- '--collection', 'dest',
- '--host', primary.host,
- '--file', exportTarget);
- assert.eq(0, ret);
- assert.eq(10, testDB.dest.count());
- });
-
- // success
- replTest.stopSet();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/sort_and_skip.js b/src/mongo/gotools/test/qa-tests/jstests/export/sort_and_skip.js
deleted file mode 100644
index 790baa29fed..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/sort_and_skip.js
+++ /dev/null
@@ -1,69 +0,0 @@
-(function() {
-
- if (typeof getToolTest === "undefined") {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongoexport with --sort and --skip specified.
-
- jsTest.log('Testing exporting with --sort and --skip');
-
- var toolTest = getToolTest('sort_and_skip');
- var commonToolArgs = getCommonToolArguments();
-
- // the export target
- var exportTarget = 'sort_and_skip_export.json';
- removeFile(exportTarget);
-
- // the db and collection we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.data;
-
- // insert some data, in a different order than we'll be sorting it
- var data = [];
- for (var i = 30; i > 20; i--) {
- data.push({a: i});
- }
- for (i = 31; i < 50; i++) {
- data.push({a: i});
- }
- for (i = 20; i >= 0; i--) {
- data.push({a: i});
- }
- testColl.insertMany(data, {ordered: true});
- // sanity check the insertion worked
- assert.eq(50, testColl.count());
-
- // export the data, using --skip
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'data',
- '--sort', '{a:1}',
- '--skip', '20']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database
- testDB.dropDatabase();
-
- // import the data back in
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', exportTarget,
- '--db', 'test',
- '--collection', 'data']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the skip was applied to the export, and that
- // the sort functioned so that the correct documents
- // were skipped
- assert.eq(30, testColl.count());
- for (i = 20; i < 50; i++) {
- assert.eq(1, testColl.count({a: i}));
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/stdout.js b/src/mongo/gotools/test/qa-tests/jstests/export/stdout.js
deleted file mode 100644
index 3a1cbc97b89..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/stdout.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// Tests running mongoexport writing to stdout.
-(function() {
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- jsTest.log('Testing exporting to stdout');
-
- var toolTest = new ToolTest('stdout');
- toolTest.startDB('foo');
-
- // the db and collection we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.data;
-
- // insert some data
- var data = [];
- for (var i = 0; i < 20; i++) {
- data.push({_id: i});
- }
- testColl.insertMany(data);
- // sanity check the insertion worked
- assert.eq(20, testColl.count());
-
- // export the data, writing to stdout
- var ret = toolTest.runTool('export', '--db', 'test', '--collection', 'data');
- assert.eq(0, ret);
-
- // wait for full output to appear
- assert.strContains.soon('exported 20 records', rawMongoProgramOutput,
- 'should show number of exported records');
-
- // grab the raw output
- var output = rawMongoProgramOutput();
-
- // make sure it contains the json output
- for (i = 0; i < 20; i++) {
- assert.neq(-1, output.indexOf('{"_id":'+i+'.0}'));
- }
-
- // success
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/testdata/query.json b/src/mongo/gotools/test/qa-tests/jstests/export/testdata/query.json
deleted file mode 100644
index 5e9b73d037e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/testdata/query.json
+++ /dev/null
@@ -1 +0,0 @@
-{a:1, c:'1'}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/testdata/simple_field_file b/src/mongo/gotools/test/qa-tests/jstests/export/testdata/simple_field_file
deleted file mode 100644
index 422c2b7ab3b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/testdata/simple_field_file
+++ /dev/null
@@ -1,2 +0,0 @@
-a
-b
diff --git a/src/mongo/gotools/test/qa-tests/jstests/export/type_case.js b/src/mongo/gotools/test/qa-tests/jstests/export/type_case.js
deleted file mode 100644
index ea2a11bddfe..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/export/type_case.js
+++ /dev/null
@@ -1,115 +0,0 @@
-(function() {
-
- if (typeof getToolTest === "undefined") {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Testing exporting with various type specifiers
-
- jsTest.log('Testing exporting with various type specifiers');
-
- var toolTest = getToolTest('export_types');
- var commonToolArgs = getCommonToolArguments();
-
- // the db and collections we'll use
- var testDB = toolTest.db.getSiblingDB('test');
- var sourceColl = testDB.source;
-
- // the export target
- var exportTarget = 'type_export';
-
- // insert some data
- sourceColl.insert({a: 1});
- sourceColl.insert({a: 1, b: 1});
- sourceColl.insert({a: 1, b: 2, c: 3});
- // sanity check the insertion worked
- assert.eq(3, sourceColl.count());
-
- // first validate that invalid types are rejected
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget,
- '--db', 'test',
- '--collection', 'source',
- '--type="foobar"',
- '--fields', 'a']
- .concat(commonToolArgs));
- assert.eq(3, ret);
-
- // create a dump file using a lowercase csv type
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget + ".csv",
- '--db', 'test',
- '--collection', 'source',
- '--type="csv"',
- '--fields', 'a']
- .concat(commonToolArgs));
- assert.eq(0, ret);
- var csvmd5 = md5sumFile(exportTarget + ".csv");
-
- // create a dump file using a uppercase csv type
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget + ".CSV",
- '--db', 'test',
- '--collection', 'source',
- '--type="CSV"',
- '--fields', 'a']
- .concat(commonToolArgs));
- var CSVmd5 = md5sumFile(exportTarget + ".CSV");
- // the files for the uppercase and lowercase types should match
- assert.eq(csvmd5, CSVmd5);
-
- // create a dump file using a mixedcase csv type
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget + ".cSv",
- '--db', 'test',
- '--collection', 'source',
- '--type="cSv"',
- '--fields', 'a']
- .concat(commonToolArgs));
- var cSvmd5 = md5sumFile(exportTarget + ".cSv");
- // the files for the uppercase and lowercase types should match
- assert.eq(csvmd5, cSvmd5);
-
- // then some json type tests
-
- // create a dump file using a lowercase json type
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget + ".json",
- '--db', 'test',
- '--collection', 'source',
- '--type="json"',
- '--fields', 'a']
- .concat(commonToolArgs));
- assert.eq(0, ret);
- var jsonmd5 = md5sumFile(exportTarget + ".json");
-
- // create a dump file using a uppercase json type
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget + ".JSON",
- '--db', 'test',
- '--collection', 'source',
- '--type="JSON"',
- '--fields', 'a']
- .concat(commonToolArgs));
- assert.eq(0, ret);
- var JSONmd5 = md5sumFile(exportTarget + ".JSON");
-
- // create a dump file using a uppercase blank (json) type
- ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', exportTarget + ".blank",
- '--db', 'test',
- '--collection', 'source',
- '--fields', 'a']
- .concat(commonToolArgs));
- assert.eq(0, ret);
- var blankmd5 = md5sumFile(exportTarget + ".blank");
- assert.eq(JSONmd5, jsonmd5);
- assert.eq(blankmd5, jsonmd5);
-
- // sanity check
- assert.neq(csvmd5, jsonmd5);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_db.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_db.js
deleted file mode 100644
index fd647d03bff..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_db.js
+++ /dev/null
@@ -1,61 +0,0 @@
-// mongofiles_db.js; ensure that running mongofiles using the db flag works as
-// expected
-var testName = 'mognofiles_db';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles --host option');
-
- var runTests = function(topology, passthrough) {
- jsTest.log('Putting file with valid host name with ' + passthrough.name + ' passthrough');
- var t = topology.init(passthrough);
- var conn = t.connection();
- var db = conn.getDB('otherdb');
-
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--db', 'otherdb',
- '--port', conn.port,
- '--host', 'localhost',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put 1 failed');
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--db', 'otherdb',
- '--port', conn.port,
- '--host', 'localhost',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put 2 failed');
-
- // ensure the files were inserted into the right db
- assert.eq(2, db.getCollection('fs.files').count(), 'unexpected fs.files count 1');
-
- // test short form
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '-d', 'otherdb',
- '--port', conn.port,
- '--host', 'localhost',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put 3 failed');
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '-d', 'otherdb',
- '--port', conn.port,
- '--host', 'localhost',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put 4 failed');
-
- // ensure the file was inserted into the right db
- assert.eq(4, db.getCollection('fs.files').count(), 'unexpected fs.files count 2s');
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_delete.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_delete.js
deleted file mode 100644
index 89e72b3f366..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_delete.js
+++ /dev/null
@@ -1,47 +0,0 @@
-// mongofiles_delete.js; ensure that delete command works as expected
-var testName = 'mongofiles_delete';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles delete command');
-
- var runTests = function(topology, passthrough) {
- jsTest.log('Putting file with ' + passthrough.name + ' passthrough');
-
- var t = topology.init(passthrough);
- var conn = t.connection();
- var db = conn.getDB('test');
-
- // ensure tool runs without error
- for (var i = 0; i < 10; i++) {
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put failed');
- }
-
- // ensure all the files were written
- assert.eq(10, db.fs.files.count(), 'unexpected fs.files count');
-
- jsTest.log('Deleting file');
-
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'delete', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'delete failed');
-
- // ensure all the files were deleted
- assert.eq(0, db.fs.files.count(), 'unexpected fs.files count');
- assert.eq(0, db.fs.chunks.count(), 'unexpected fs.chunks count');
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_get.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_get.js
deleted file mode 100644
index 04a8c93832a..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_get.js
+++ /dev/null
@@ -1,81 +0,0 @@
-// mongofiles_get.js; ensure that get command works as expected
-var testName = 'mongofiles_get';
-(function() {
- jsTest.log('Testing mongofiles get command');
- load('jstests/files/util/mongofiles_common.js');
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var runTests = function(topology, passthrough) {
- var t = topology.init(passthrough);
- var conn = t.connection();
- var db = conn.getDB('test');
- var getFile = testName + (Math.random() + 1).toString(36).substring(7);
-
- jsTest.log('Putting file with ' + passthrough.name + ' passthrough');
-
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put 1 failed');
-
- // ensure the file was inserted
- assert.eq(1, db.fs.files.count(), 'unexpected fs.files count 1');
- var fileId = db.fs.files.findOne()._id;
-
- jsTest.log('Getting file with ' + passthrough.name + ' passthrough');
-
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--local', getFile,
- 'get', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'get failed');
-
- // ensure the retrieved file is exactly the same as that inserted
- var actual = md5sumFile(filesToInsert[0]);
- var expected = md5sumFile(getFile);
-
- assert.eq(actual, expected, 'mismatched md5 sum - expected ' + expected + ' got ' + actual);
-
- // ensure tool runs get_id without error
- var idAsJSON = fileId.tojson();
- if (_isWindows()) {
- idAsJSON = '"' + idAsJSON.replace(/"/g, '\\"') + '"';
- }
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--local', getFile,
- 'get_id', idAsJSON]
- .concat(passthrough.args)),
- 0, 'get_id failed');
- expected = md5sumFile(getFile);
- assert.eq(actual, expected, 'mismatched md5 sum on _id - expected ' + expected + ' got ' + actual);
-
- // clear the output buffer
- clearRawMongoProgramOutput();
-
- // test getting to stdout
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--local', '-',
- 'get', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'get stdout failed');
- var expectedContent = "this is a text file";
- assert.strContains.soon(expectedContent, rawMongoProgramOutput,
- "stdout get didn't match expected file content");
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_host.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_host.js
deleted file mode 100644
index 8fac1af3c81..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_host.js
+++ /dev/null
@@ -1,59 +0,0 @@
-// mongofiles_host.js; ensure that running mongofiles using valid and invalid
-// host names or IP addresses succeeds/fails as expected
-var testName = 'mongofiles_host';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles --host option');
-
- var runTests = function(topology, passthrough) {
- jsTest.log('Putting file with valid host name with ' + passthrough.name + ' passthrough');
- var t = topology.init(passthrough);
- var conn = t.connection();
- var db = conn.getDB('test');
-
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--host', 'localhost',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put 1 failed');
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--host', '127.0.0.1',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put 2 failed');
-
- // ensure the file was inserted
- assert.eq(2, db.getCollection('fs.files').count(), 'unexpected fs.files count 1');
-
- jsTest.log('Putting file with invalid host name with ' + passthrough.name + ' passthrough');
-
- // ensure tool exits with a non-zero exit code when supplied invalid hosts
- assert.neq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--host', 'does-not-exist',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'expected mongofiles to fail but it succeeded 1');
- assert.neq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--host', '555.555.555.555',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'expected mongofiles to fail but it succeeded 2');
-
- // ensure the file was not inserted
- assert.eq(2, db.getCollection('fs.files').count(), 'unexpected fs.files count 2');
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_invalid.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_invalid.js
deleted file mode 100644
index 0cd8bca00a9..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_invalid.js
+++ /dev/null
@@ -1,37 +0,0 @@
-// mongofiles_invalid.js; runs mongofiles with an invalid command and
-// option - ensures it fails in all cases
-var testName = 'mongofiles_invalid';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles with invalid commands and options');
-
- var runTests = function(topology, passthrough) {
- var t = topology.init(passthrough);
- var conn = t.connection();
-
- jsTest.log('Running with file with invalid options onw passthrough ' + passthrough.name);
-
- // run with invalid option
- assert.neq(runMongoProgram.apply(this, ['mongofiles',
- '--invalid', conn.port,
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'invalid-option: mongofiles succeeded when it should have failed');
-
- // run with invalid command
- assert.neq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'invalid', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'invalid-command: mongofiles succeeded when it should have failed');
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_list.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_list.js
deleted file mode 100644
index d48994ebc54..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_list.js
+++ /dev/null
@@ -1,96 +0,0 @@
-// mongofiles_list.js; tests the mongofiles list option by doing the following:
-//
-// 1. Inserts the mongod/mongo binaries using mongofiles put
-// 2. Checks that the actual md5 of the file matches what's stored in the database
-// 3. Runs the mongofiles list command to view all files stored.
-// 4. Ensures that all the files inserted and returned.
-// 5. Ensures that the returned list matches thae actual filesToInsert[0] and size of
-// files inserted.
-var testName = 'mongofiles_list';
-(function() {
- jsTest.log('Testing mongofiles list command');
- load('jstests/libs/extended_assert.js');
- load('jstests/files/util/mongofiles_common.js');
- var assert = extendedAssert;
-
- var putFile = function(passthrough, conn, file) {
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', file]
- .concat(passthrough.args)),
- 0, 'put for ' + file + 'failed');
- var db = conn.getDB('test');
- var fileObj = db.fs.files.findOne({
- filename: file,
- });
- assert(fileObj, 'could not find put file ' + file);
- assert.eq(md5sumFile(file), fileObj.md5, file + ' md5 did not match - expected ' + md5sumFile(file) + ' got ' + fileObj.md5);
- return fileObj.length;
- };
-
- var runTests = function(topology, passthrough) {
- jsTest.log('Putting GridFS files with ' + passthrough.name + ' passthrough');
-
- var inputFileRegex = /^sh.*files.*/;
- var whitespaceSplitRegex = /,?\s+/;
- var fileSizes = [];
-
- var t = topology.init(passthrough);
- var conn = t.connection();
-
- filesToInsert.forEach(function(file) {
- var fileSize = putFile(passthrough, conn, file);
- fileSizes.push(fileSize);
- });
-
- jsTest.log('Running mongofiles list');
-
- // clear the output buffer
- clearRawMongoProgramOutput();
-
- // ensure tool runs without error
- var pid = startMongoProgramNoConnect.apply(this, ['mongofiles',
- '--port', conn.port,
- '--quiet', 'list']
- .concat(passthrough.args));
- assert.eq(waitProgram(pid), 0, 'list command failed but was expected to succeed');
-
- jsTest.log('Verifying list output');
-
- var files;
- assert.neq.soon(0, function() {
- files = rawMongoProgramOutput()
- .split('\n')
- .filter(function(line) {
- return line.indexOf('sh'+pid) !== -1 && line.match(inputFileRegex);
- });
- return files.length;
- }, 'should find some files');
-
- // ensure that the returned files and their sizes are as expected
- files.forEach(function(currentFile, index) {
- // should print mongod and then mongo
- var fileEntry = currentFile.split(whitespaceSplitRegex);
-
- // the list command should have 2 entries - the file name and its size
- // we check for 3 files because of the sh. prefix in our js test framework
- assert.eq(fileEntry.length, 3, 'unexpected list output on ' + currentFile + ' - expected 3 but got ' + fileEntry.length);
-
- // ensure the expected file name is what is printed
- assert.eq(fileEntry[1], filesToInsert[index], 'expected file ' + filesToInsert[1] + ' got ' + fileEntry[1]);
-
- // ensure the expected file size is what is printed
- assert.eq(fileEntry[2], fileSizes[index], 'expected size ' + fileSizes[2] + ' got ' + fileEntry[2]);
- });
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_local.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_local.js
deleted file mode 100644
index bc874499cba..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_local.js
+++ /dev/null
@@ -1,102 +0,0 @@
-// mongofiles_local.js; ensure that when --local is passed:
-// a. for puts, the supplied argument is read and stored using the gridfs filename
-// b. for gets, the supplied argument is used to store the retrieved file
-// c. for puts, if the supplied argument is the empty string, an error should occur
-// d. for gets, if the supplied argument is the empty string, the file name is used
-var testName = 'mongofiles_local';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles --local option');
-
- var runTests = function(topology, passthrough) {
- var t = topology.init(passthrough);
- var conn = t.connection();
- var db = conn.getDB('test');
-
- // generate a random GridFS name for the file
- var putFSName = testName + (Math.random() + 1).toString(36).substring(7);
- var getFSName = testName + (Math.random() + 1).toString(36).substring(7);
-
- jsTest.log('Running put on file with --local');
-
- // ensure tool runs without error with a non-empty --local argument
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '-l', filesToInsert[0],
- 'put', putFSName]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded 1');
-
- // ensure the file exists
- assert(db.fs.files.findOne({
- filename: putFSName
- }), 'did not find expected GridFS file - ' + putFSName);
-
- // ensure tool returns an error if the --local argument does not exist
- assert.neq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--local', filesToInsert[0] + '?',
- 'put', putFSName]
- .concat(passthrough.args)),
- 0, 'put succeeded when it should have failed 2');
-
- // if the argument is empty, use the putFSName - which should cause an error since it doesn't exist
- assert.neq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--local', '',
- 'put', putFSName]
- .concat(passthrough.args)),
- 0, 'put succeeded when it should have failed 3');
-
- // if the argument is empty, and the GridFS file exists, it should run
- // without error on linux and fails on windows
- var comparison = 'eq';
- if (_isWindows()) {
- comparison = 'neq';
- }
- assert[comparison](runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--local', '',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded 2');
-
- jsTest.log('Running get on file with --local');
-
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--local', getFSName,
- 'get', putFSName]
- .concat(passthrough.args)),
- 0, 'get failed when it should have succeeded 1');
-
- // ensure the right file name was written
- assert.eq(md5sumFile(filesToInsert[0]), md5sumFile(getFSName), 'files do not match!');
-
- // ensure tool uses the GridFS name if the --local argument is empty on linux
- // and fails on windows
- comparison = 'eq';
- if (_isWindows()) {
- comparison = 'neq';
- }
- assert[comparison](runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--local', '',
- 'get', putFSName]
- .concat(passthrough.args)),
- 0, 'get failed unexpectedly');
-
- if (!_isWindows()) {
- assert.eq(md5sumFile(filesToInsert[0]), md5sumFile(putFSName), 'md5sums do not match - expected ' + md5sumFile(filesToInsert[0]) + ' got ' + md5sumFile(putFSName));
- }
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_port.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_port.js
deleted file mode 100644
index 88d6d8f1417..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_port.js
+++ /dev/null
@@ -1,52 +0,0 @@
-// mongofiles_port.js; ensure that supplying valid/invalid port addresses
-// succeeds/fails as expected
-var testName = 'mongofiles_port';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles --port option');
-
- var runTests = function(topology, passthrough) {
- var t = topology.init(passthrough);
- var conn = t.connection();
- var db = conn.getDB('test');
-
- jsTest.log('Putting file with valid port with ' + passthrough.name + ' passthrough');
-
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put 1 failed');
-
- // ensure the file was inserted
- assert.eq(1, db.fs.files.count(), 'unexpected fs.files count 1');
-
- jsTest.log('Putting file with invalid port with ' + passthrough.name + ' passthrough');
-
- // ensure tool exits with a non-zero exit code when supplied invalid ports
- assert.neq(runMongoProgram.apply(this, ['mongofiles',
- '--port', '12345',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'expected mongofiles to fail but it succeeded 1');
- assert.neq(runMongoProgram.apply(this, ['mongofiles',
- '--port', 'random',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'expected mongofiles to fail but it succeeded 2');
-
- // ensure the file was not inserted
- var count = db.fs.files.count();
- assert.eq(1, count, 'unexpected fs.files count - expected 2 but got ' + count);
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_prefix.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_prefix.js
deleted file mode 100644
index 3d19bd141ee..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_prefix.js
+++ /dev/null
@@ -1,49 +0,0 @@
-// mongofiles_prefix.js; ensure that passing --prefix works as expected - the
-// provided prefix is used as the collection name prefix
-var testName = 'mongofiles_prefix';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles --prefix option');
-
- var runTests = function(topology, passthrough) {
- var t = topology.init(passthrough);
- var conn = t.connection();
- var db = conn.getDB('test');
-
- jsTest.log('Putting file without --prefix with ' + passthrough.name + ' passthrough');
-
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put 1 failed');
-
- // ensure the default collection name prefix was used
- assert.eq(1, db.fs.files.count(), 'unexpected fs.files count');
- assert.eq(0, db[testName + '.files'].count(), 'unexpected ' + testName + '.files count');
-
- jsTest.log('Putting file with --prefix with ' + passthrough.name + ' passthrough');
-
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--prefix', testName,
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put 2 failed');
-
- // ensure the supplied collection name prefix was used
- assert.eq(1, db.fs.files.count(), 'unexpected fs.files count');
- assert.eq(1, db[testName + '.files'].count(), 'unexpected ' + testName + '.files count');
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_put.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_put.js
deleted file mode 100644
index 87678df0bcb..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_put.js
+++ /dev/null
@@ -1,108 +0,0 @@
-// mongofiles_put.js; ensure that put works with very large files.
-// NOTE: this test uses mongodump to create a large file
-var testName = 'mongofiles_put';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles put command');
-
- var runTests = function(topology, passthrough) {
- var t = topology.init(passthrough);
- var conn = t.connection();
- var db = conn.getDB('test');
-
- // create a large collection and dump it
- jsTest.log('Creating large collection with ' + passthrough.name + ' passthrough');
-
- var insertString = new Array(100).join("mongoDB");
- var inserted = 0;
- var num = 0;
- var dbName = 'test';
- var collection = 'foo';
- var bulk = db[collection].initializeUnorderedBulkOp();
-
- while (inserted < (40 * 1024 * 1024)) {
- bulk.insert({
- _id: num++,
- str: insertString
- });
- inserted += insertString.length;
- }
-
- assert.writeOK(bulk.execute({w: "majority"}));
-
- // dumping large collection to single large file
- jsTest.log('Dumping collection to filesystem with ' + passthrough.name + ' passthrough');
-
- var dumpDir = './dumpDir';
-
- assert.eq(runMongoProgram.apply(this, ['mongodump',
- '-d', dbName,
- '--port', conn.port,
- '-c', collection,
- '--out', dumpDir]
- .concat(passthrough.args)),
- 0, 'dump failed when it should have succeeded');
-
- jsTest.log('Putting directory');
-
- // putting a directory should fail
- assert.neq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', dumpDir]
- .concat(passthrough.args)),
- 0, 'put succeeded when it should have failed');
-
- jsTest.log('Putting file with ' + passthrough.name + ' passthrough');
-
- var putFile = dumpDir + '/' + dbName + '/' + collection + '.bson';
-
- // ensure putting of the large file succeeds
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--local', putFile,
- 'put', testName]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded');
-
- // verify file metadata
- var fileObj = db.fs.files.findOne({
- filename: testName
- });
- assert(fileObj, testName + ' was not found');
-
- var numDbChunks = db.fs.chunks.count();
-
- // the number of chunks should be equal to math.ceil[fileSize (KB) / 255 KB]
- // filesize for the dump should be s bytes
- var expectedNumChunks = Math.ceil(fileObj.length / (1024 * 255));
-
- assert.eq(expectedNumChunks, numDbChunks, 'expected ' + expectedNumChunks + ' chunks; got ' + numDbChunks);
-
- // now attempt to get the large file
- jsTest.log('Getting file with ' + passthrough.name + ' passthrough');
-
- // ensure tool runs without error
- var getFile = testName + (Math.random() + 1).toString(36).substring(7);
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--local', getFile,
- 'get', testName]
- .concat(passthrough.args)),
- 0, 'get failed');
-
- // ensure the retrieved file is exactly the same as that inserted
- var actual = md5sumFile(putFile);
- var expected = md5sumFile(getFile);
-
- assert.eq(actual, expected, 'mismatched md5 sum - expected ' + expected + ' got ' + actual);
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_replace.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_replace.js
deleted file mode 100644
index baef44033e0..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_replace.js
+++ /dev/null
@@ -1,79 +0,0 @@
-// mongofiles_replace.js; ensure that after putting a file once multiple times,
-// on using --replace, any and all occurences of the given file is replaced in
-// the GridFS collection - all other files are left as is
-var testName = 'mongofiles_replace';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles --replace option');
-
- var runTests = function(topology, passthrough) {
- var t = topology.init(passthrough);
- var conn = t.connection();
- var db = conn.getDB('test');
-
- jsTest.log('Running put on file with --replace with ' + passthrough.name + ' passthrough');
-
- // insert the same file a couple of times
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded 1');
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded 2');
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded 3');
-
- // ensure that it is never overwritten
- db.fs.files.findOne({
- filename: filesToInsert[0]
- });
-
- assert.eq(db.fs.files.count(), 3, 'expected 3 files inserted but got ' + db.fs.files.count());
-
- // now run with --replace
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--replace',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded 4');
-
- assert.eq(db.fs.files.count(), 1, 'expected 1 file inserted but got ' + db.fs.files.count());
-
- // insert other files but ensure only 1 is replaced
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', filesToInsert[1]]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded 5');
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', filesToInsert[2]]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded 6');
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--replace',
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded 7');
-
- assert.eq(db.fs.files.count(), 3, 'expected 3 files inserted but got ' + db.fs.files.count());
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_search.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_search.js
deleted file mode 100644
index 0a39326ff33..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_search.js
+++ /dev/null
@@ -1,110 +0,0 @@
-// mongofiles_search.js; ensures that the search command returns any and all
-// files that match the regex supplied
-var testName = 'mongofiles_search';
-(function() {
- load('jstests/files/util/mongofiles_common.js');
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var conn;
-
- // Given a list of search strings and an expected result - 0 for present or 1 for
- // hasMatch takes in raw mongofiles search output and a matchItem; it returns 0
- // if it finds the match item in any line of the output and 1 otherwise. If the
- // exactString argument is not empty, hasMatch further checks that the line
- // matches the argument
- var hasMatch = function(output, matchItem, exactString) {
- var lines = output.split('\n');
- var shellOutputRegex = /^sh.*/;
- for (var i = 0; i < lines.length; i++) {
- if (lines[i].match(shellOutputRegex) && lines[i].match(matchItem)) {
- if (exactString && !lines[i].match(exactString)) {
- continue;
- }
- return 0;
- }
- }
- // matchItem wasn't found
- return 1;
- };
-
- // note - assertHasFiles checks that the output of running mongofiles search with
- // each of the search strings meets the expected result supplied. If exactString
- // is not empty, it further checks that the output also matches exactString
- var assertHasFiles = function(passthrough, searchStrings, expectedResult, exactString) {
- // perform a couple of search commands against the GridFS collection
- for (var i = 0; i < searchStrings.length; i++) {
- clearRawMongoProgramOutput();
- var queryString = searchStrings[i];
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--quiet',
- '--port', conn.port,
- 'search', queryString]
- .concat(passthrough.args)),
- 0, 'search command failed on ' + queryString + ' - part of ' + searchStrings);
-
- // eslint-disable-next-line no-loop-func
- assert.eq.soon(expectedResult, function() {
- return hasMatch(rawMongoProgramOutput(), queryString, exactString);
- }, 'search failed: expected "' + queryString + '" to be ' + (expectedResult ? 'found' : 'missing'));
- }
- };
-
- var runTests = function(topology, passthrough) {
- jsTest.log('Testing mongofiles search command');
- var t = topology.init(passthrough);
- conn = t.connection();
-
- jsTest.log('Putting files into GridFS with ' + passthrough.name + ' passthrough');
-
- for (var i = 0; i < filesToInsert.length; i++) {
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- 'put', filesToInsert[i]]
- .concat(passthrough.args)),
- 0, 'put failed on ' + filesToInsert[i] + ' when it should have succeeded');
- }
-
- jsTest.log('Searching files in GridFS with ' + passthrough.name + ' passthrough');
-
- // these search strings should be matched
- var searchStrings = ['files', '.txt', 'ile', '.'];
-
- // add the verbatim file names put into GridFS
- for (i = 0; i < filesToInsert.length; i++) {
- searchStrings.push(filesToInsert[i]);
- }
-
- // all inserted files should be returned
- assertHasFiles(passthrough, searchStrings, 0);
-
- // these search strings should NOT be matched
- searchStrings = ['random', 'always', 'filer'];
- assertHasFiles(passthrough, searchStrings, 1);
-
- // test that only the requested file is returned
- for (i = 0; i < filesToInsert.length; i++) {
- var currentFile = filesToInsert[i];
- jsTest.log('Searching for file ' + currentFile + ' with ' + passthrough.name + ' passthrough');
-
- // ensure the requested file is returned
- assertHasFiles(passthrough, [currentFile], 0);
-
- // ensure no other files are returned
- assertHasFiles(passthrough,
- // eslint-disable-next-line no-loop-func
- filesToInsert.filter(function(file) {
- return file !== currentFile;
- }), 1, currentFile);
- }
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_type.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_type.js
deleted file mode 100644
index c5af7b3e70f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_type.js
+++ /dev/null
@@ -1,63 +0,0 @@
-// mongofiles_type.js; ensure that the given content type is stored when passed
-// as the --type argument. If no argument is passed, it should be omitted in the
-// database.
-var testName = 'mongofiles_type';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles --type option');
-
- var runTests = function(topology, passthrough) {
- var t = topology.init(passthrough);
- var conn = t.connection();
- var db = conn.getDB('test');
- var contentType = 'txt';
-
- jsTest.log('Running put on file with --type with ' + passthrough.name + ' passthrough');
-
- // ensure tool runs without error with a non-empty --type argument
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '-t', contentType,
- 'put', filesToInsert[0]]
- .concat(passthrough.args)),
- 0, 'put failed when it should have succeeded 1');
-
- var fileObj = db.fs.files.findOne({
- filename: filesToInsert[0]
- });
-
- assert(fileObj, 'did not find expected GridFS file - ' + filesToInsert[0]);
-
- assert.eq(fileObj.contentType, contentType, 'unexpected content type - found ' + fileObj.contentType + ' but expected ' + contentType);
-
- // ensure tool runs without error with empty --type argument on linux
- // and fails on windows
- var comparison = 'eq';
- if (_isWindows()) {
- comparison = 'neq';
- }
- assert[comparison](runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--type', '',
- 'put', filesToInsert[1]]
- .concat(passthrough.args)),
- 0, 'put failed unexpectedly');
-
- if (!_isWindows()) {
- fileObj = db.fs.files.findOne({
- filename: filesToInsert[1]
- });
- assert.neq(fileObj, null, 'did not find expected GridFS file - ' + filesToInsert[1]);
- assert.eq(fileObj.contentType, undefined, 'unexpected content type - found ' + fileObj.contentType + ' but expected undefined');
- }
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_version.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_version.js
deleted file mode 100644
index 3c2c3a6d959..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_version.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// mongofiles_version.js; ensure that getting the version works without error
-var testName = 'mongofiles_version';
-load('jstests/files/util/mongofiles_common.js');
-(function() {
- jsTest.log('Testing mongofiles --version option');
-
- var runTests = function(topology, passthrough) {
- var t = topology.init(passthrough);
- var conn = t.connection();
-
- jsTest.log('Testing --version with ' + passthrough.name + ' passthrough');
-
- // ensure tool runs without error
- assert.eq(runMongoProgram.apply(this, ['mongofiles',
- '--port', conn.port,
- '--version']
- .concat(passthrough.args)),
- 0, '--version failed');
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_write_concern.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_write_concern.js
deleted file mode 100644
index 4e261536273..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_write_concern.js
+++ /dev/null
@@ -1,54 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = new ToolTest('write_concern', null);
- var commonToolArgs = getCommonToolArguments();
-
- var rs = new ReplSetTest({
- name: "rpls",
- nodes: 3,
- useHostName: true,
- settings: {chainingAllowed: false},
- });
-
- rs.startSet();
- rs.initiate();
- rs.awaitReplication();
- toolTest.port = rs.getPrimary().port;
- var dbOne = rs.nodes[0].getDB("dbOne");
-
- function writeConcernTestFunc(exitCode, writeConcern, name) {
- jsTest.log(name);
- ret = toolTest.runTool.apply(toolTest, ['files',
- '-vvvvv',
- '-d', 'dbOne']
- .concat(writeConcern)
- .concat(commonToolArgs)
- .concat(['put', 'jstests/files/testdata/files1.txt']));
- assert.eq(exitCode, ret, name);
- dbOne.dropDatabase();
- }
-
- function noConnectTest() {
- return startMongoProgramNoConnect.apply(null, ['mongofiles',
- '-d', 'dbOne',
- '--writeConcern={w:3}',
- '--host', rs.getPrimary().host]
- .concat(commonToolArgs)
- .concat(['put', 'jstests/files/testdata/files1.txt']));
- }
-
- // drop the database so it's empty
- dbOne.dropDatabase();
-
- // load and run the write concern suite
- load('jstests/libs/wc_framework.js');
- runWCTest("mongofiles", rs, toolTest, writeConcernTestFunc, noConnectTest);
-
- dbOne.dropDatabase();
- rs.stopSet();
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_write_concern_mongos.js b/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_write_concern_mongos.js
deleted file mode 100644
index 45386f2cc2f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/mongofiles_write_concern_mongos.js
+++ /dev/null
@@ -1,59 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = new ToolTest('write_concern', null);
- var commonToolArgs = getCommonToolArguments();
-
- var st = new ShardingTest({
- shards: {
- rs0: {
- nodes: 3,
- useHostName: true,
- settings: {chainingAllowed: false},
- },
- },
- mongos: 1,
- config: 1,
- configReplSetTestOptions: {
- settings: {chainingAllowed: false},
- },
- });
- var rs = st.rs0;
- rs.awaitReplication();
- toolTest.port = st.s.port;
- var dbOne = st.s.getDB('dbOne');
- function writeConcernTestFunc(exitCode, writeConcern, name) {
- jsTest.log(name);
- ret = toolTest.runTool.apply(toolTest, ['files',
- '-vvvvv',
- '-d', 'dbOne']
- .concat(writeConcern)
- .concat(commonToolArgs)
- .concat(['put', 'jstests/files/testdata/files1.txt']));
- assert.eq(exitCode, ret, name);
- dbOne.dropDatabase();
- }
-
- function noConnectTest() {
- return startMongoProgramNoConnect.apply(null, ['mongofiles',
- '-d', 'dbOne',
- '--writeConcern={w:3}',
- '--host', st.s.host]
- .concat(commonToolArgs)
- .concat(['put', 'jstests/files/testdata/files1.txt']));
- }
-
- // drop the database so it's empty
- dbOne.dropDatabase();
-
- // load and run the write concern suite
- load('jstests/libs/wc_framework.js');
- runWCTest("mongofiles", rs, toolTest, writeConcernTestFunc, noConnectTest);
-
- dbOne.dropDatabase();
- rs.stopSet();
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/testdata/files1.txt b/src/mongo/gotools/test/qa-tests/jstests/files/testdata/files1.txt
deleted file mode 100644
index e9ea42a12b9..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/testdata/files1.txt
+++ /dev/null
@@ -1 +0,0 @@
-this is a text file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/testdata/files2.txt b/src/mongo/gotools/test/qa-tests/jstests/files/testdata/files2.txt
deleted file mode 100644
index 6d65e626d46..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/testdata/files2.txt
+++ /dev/null
@@ -1 +0,0 @@
-this is another text file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/testdata/files3.txt b/src/mongo/gotools/test/qa-tests/jstests/files/testdata/files3.txt
deleted file mode 100644
index 181ba5fd828..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/testdata/files3.txt
+++ /dev/null
@@ -1 +0,0 @@
-this is yet another test file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/files/util/mongofiles_common.js b/src/mongo/gotools/test/qa-tests/jstests/files/util/mongofiles_common.js
deleted file mode 100644
index 7ff85d959b1..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/files/util/mongofiles_common.js
+++ /dev/null
@@ -1,10 +0,0 @@
-// mongofiles_common.js; contains variables used by mongofiles tests
-load('jstests/common/topology_helper.js');
-
-/* exported filesToInsert */
-// these must have unique names
-var filesToInsert = [
- 'jstests/files/testdata/files1.txt',
- 'jstests/files/testdata/files2.txt',
- 'jstests/files/testdata/files3.txt'
-];
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/all_primaries_down_error_code.js b/src/mongo/gotools/test/qa-tests/jstests/import/all_primaries_down_error_code.js
deleted file mode 100644
index c5f24f6e697..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/all_primaries_down_error_code.js
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * all_primaries_down_error_code.js
- *
- * This file tests TOOLS-690 where mongoimport returned exit code 0 when it should have returned
- * exit code 1 on error. The error stems from when mongos cannot find a primary.
- * This file tests that errors of type 'could not contact primary for replica set' return exit
- * code 1.
- */
-(function() {
- 'use strict';
- jsTest.log('Testing mongoimport when a sharded cluster has no primaries');
-
- var sh = new ShardingTest({
- name: 'all_primaries_down_error_code',
- shards: 1,
- verbose: 0,
- mongos: 1,
- other: {
- rs: true,
- numReplicas: 3,
- chunksize: 1,
- enableBalancer: 0,
- },
- });
-
- // Make sure there is no primary in any replica set.
- for (var rs of sh._rs) {
- var ranOutOfPrimaries = false;
- for (var i = 0; i < rs.nodes.length + 1; i++) {
- var primary;
- try {
- // If we can't find a primary in 20 seconds than assume there are no more.
- primary = rs.test.getPrimary(20000);
- } catch (e) {
- print('Error Finding Primary: ' + e);
- ranOutOfPrimaries = true;
- break;
- }
-
- jsTest.log('Stepping down ' + primary.host);
-
- try {
- primary.adminCommand({replSetStepDown: 300, force: true});
- } catch (e) {
- // Ignore any errors that occur when stepping down the primary.
- print('Error Stepping Down Primary: ' + e);
- }
- }
- // Assert that we left due to running out of primaries and not due to the loop ending.
- assert(ranOutOfPrimaries,
- 'Had to kill primary more times than number of nodes in the replset.');
- }
-
- // Check that we catch 'could not contact primary for replica set'
- jsTest.log('All primaries stepped down, trying to import.');
-
- var ret = runMongoProgram('mongoimport',
- '--file', 'jstests/import/testdata/basic.json',
- '--db', 'test',
- '--collection', 'noPrimaryErrorCode',
- '--host', sh.s0.host);
- assert.eq(ret, 1, 'mongoimport should fail with no primary');
-
- sh.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/boolean_type.js b/src/mongo/gotools/test/qa-tests/jstests/import/boolean_type.js
deleted file mode 100644
index b7832cc0981..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/boolean_type.js
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * boolean_type.js
- *
- * This file tests the Boolean() type in mongoimport. Importing a document with a field like
- * Boolean(1) should be treated identically to how the shell would insert a similar document.
- */
-
-(function() {
- 'use strict';
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- jsTest.log('Testing running import with various options in the Boolean() type');
-
- var toolTest = getToolTest('import');
- var db1 = toolTest.db;
- var commonToolArgs = getCommonToolArguments();
- var testDocs = [
- {key: 'a', bool: Boolean(1)},
- {key: 'b', bool: Boolean(0)},
- {key: 'c', bool: Boolean(140)},
- {key: 'd', bool: Boolean(-140.5)},
- {key: 'e', bool: Boolean(Boolean(1))},
- {key: 'f', bool: Boolean(Boolean(0))},
- {key: 'g', bool: Boolean('')},
- {key: 'h', bool: Boolean('f')},
- {key: 'i', bool: Boolean(null)},
- {key: 'j', bool: Boolean(undefined)},
- {key: 'k', bool: Boolean(true)},
- {key: 'l', bool: Boolean(false)},
- {key: 'm', bool: Boolean(true, false)},
- {key: 'n', bool: Boolean(false, true)},
- {key: 'o', bool: [Boolean(1), Boolean(0), Date(23)]},
- {key: 'p', bool: Boolean(Date(15))},
- {key: 'q', bool: Boolean(0x585)},
- {key: 'r', bool: Boolean(0x0)},
- {key: 's', bool: Boolean()},
- ];
-
- var ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', 'jstests/import/testdata/boolean.json',
- '--db', 'imported',
- '--collection', 'testcollbool']
- .concat(commonToolArgs));
- assert.eq(ret, 0);
-
- // Confirm that mongoimport imports the testDocs identically to how the shell interprets them.
- var coll = db1.getSiblingDB('imported').testcollbool;
- for (var i = 0; i < testDocs.length; i++) {
- var postImportDoc = coll.findOne({key: testDocs[i].key});
- assert.eq(testDocs[i].key, postImportDoc.key,
- 'imported doc ' + testDocs[i].key + 'does not match original');
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/collections.js b/src/mongo/gotools/test/qa-tests/jstests/import/collections.js
deleted file mode 100644
index cf72bf581a8..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/collections.js
+++ /dev/null
@@ -1,77 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- jsTest.log('Testing running import with bad command line options');
-
- var toolTest = getToolTest('import');
- var db1 = toolTest.db;
- var commonToolArgs = getCommonToolArguments();
-
-
- // Make a dummy file to import by writing a test collection and exporting it
- assert.eq(0, db1.c.count(), "setup1");
- db1.c.save({a: 1, b: 2, c: 3});
- db1.c.save({a: 4, b: 5, c: 6});
- assert.eq(2, db1.c.count(), "setup2");
-
- toolTest.runTool.apply(toolTest, ["export",
- "--out", toolTest.extFile,
- "-d", toolTest.baseName,
- "-c", db1.c.getName()]
- .concat(commonToolArgs));
-
- db1.c.drop();
- assert.eq(0, db1.c.count(), "after drop", "-d", toolTest.baseName, "-c", "foo");
-
-
- // copy the file to a file that contains the collection name
- removeFile("foo.blah.json");
- copyFile(toolTest.extFile, "foo.blah.json");
-
- // copy the file to a file that contains the collection name plus an extra extension (.backup)
- removeFile("foo.blah.json.backup");
- copyFile(toolTest.extFile, "foo.blah.json.backup");
-
-
- toolTest.runTool.apply(toolTest, ["import",
- "--file", "foo.blah.json"]
- .concat(commonToolArgs));
- assert.eq(db1.c.getDB().getSiblingDB("test").foo.blah.count(), 2,
- "importing file named after collection should insert to correct namespace");
- db1.c.getDB().getSiblingDB("test").foo.blah.drop();
-
- toolTest.runTool.apply(toolTest, ["import",
- "--file", "foo.blah.json.backup"]
- .concat(commonToolArgs));
- assert.eq(db1.c.getDB().getSiblingDB("test").foo.blah.json.count(), 2,
- "importing file with extra extension should still assume correct namespace");
- db1.c.getDB().getSiblingDB("test").foo.blah.json.drop();
-
- toolTest.runTool.apply(toolTest, ["import",
- "--file", "foo.blah.json",
- "--collection", "testcoll1"]
- .concat(commonToolArgs));
- assert.eq(db1.c.getDB().getSiblingDB("test").testcoll1.count(), 2,
- "importing --file with --collection should use correct collection name");
- db1.c.getDB().getSiblingDB("test").testcoll1.drop();
-
- toolTest.runTool.apply(toolTest, ["import",
- "foo.blah.json"]
- .concat(commonToolArgs));
- assert.eq(db1.c.getDB().getSiblingDB("test").foo.blah.count(), 2,
- "should be allowed to specify file as positional arg");
- db1.c.getDB().getSiblingDB("test").foo.blah.drop();
-
- toolTest.runTool.apply(toolTest, ["import",
- "foo.blah.json",
- "--db", "testdb2"]
- .concat(commonToolArgs));
- assert.eq(db1.c.getDB().getSiblingDB("testdb2").foo.blah.count(), 2,
- "should use database specified by --db");
- db1.c.getDB().getSiblingDB("testdb2").foo.blah.drop();
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/decimal128.js b/src/mongo/gotools/test/qa-tests/jstests/import/decimal128.js
deleted file mode 100644
index 9024096ca18..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/decimal128.js
+++ /dev/null
@@ -1,44 +0,0 @@
-(function() {
- // skip this test where NumberDecimal is unsupported (3.2 and earlier)
- if (typeof NumberDecimal === 'undefined') {
- return;
- }
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- jsTest.log('Testing running import with various data types');
-
- var toolTest = getToolTest('import');
- var db1 = toolTest.db;
- var commonToolArgs = getCommonToolArguments();
-
- var testDoc = {
- _id: "foo",
- x: NumberDecimal("124124"),
- };
-
- // Make a dummy file to import by writing a test collection and exporting it
- assert.eq(0, db1.c.count(), "initial collection is not empty");
- db1.c.save(testDoc);
- toolTest.runTool.apply(toolTest, ["export",
- "--out", toolTest.extFile,
- "-d", toolTest.baseName,
- "-c", db1.c.getName()]
- .concat(commonToolArgs));
-
- toolTest.runTool.apply(toolTest, ["import",
- "--file", toolTest.extFile,
- "--db", "imported",
- "--collection", "dec128"]
- .concat(commonToolArgs));
- var importedDocs = db1.c.getDB().getSiblingDB("imported").dec128.find().toArray();
-
- assert.eq(importedDocs.length, 1, "incorrect # of docs imported");
-
- var importedDoc = importedDocs[0];
-
- assert.eq(importedDoc, testDoc, "imported doc and test doc do not match");
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/drop.js b/src/mongo/gotools/test/qa-tests/jstests/import/drop.js
deleted file mode 100644
index a5a115269e0..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/drop.js
+++ /dev/null
@@ -1,48 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- jsTest.log('Testing running import with bad command line options');
-
- var toolTest = getToolTest('import_writes');
- var db = toolTest.db.getSiblingDB("droptest");
- var commonToolArgs = getCommonToolArguments();
-
- // Verify that --drop works.
- // put a test doc in the collection, run import with --drop,
- // make sure that the inserted doc is gone and only the imported
- // docs are left.
- db.c.insert({x: 1});
- assert.eq(db.c.count(), 1, "collection count should be 1 at setup");
- var ret = toolTest.runTool.apply(toolTest, ["import",
- "--file", "jstests/import/testdata/csv_header.csv",
- "--type=csv",
- "--db", db.getName(),
- "--collection", db.c.getName(),
- "--headerline",
- "--drop"]
- .concat(commonToolArgs));
-
- // test csv file contains 3 docs and collection should have been dropped, so the doc we inserted
- // should be gone and only the docs from the test file should be in the collection.
- assert.eq(ret, 0);
- assert.eq(db.c.count(), 3);
- assert.eq(db.c.count({x: 1}), 0);
-
- // --drop on a non-existent collection should not cause error
- db.c.drop();
- ret = toolTest.runTool.apply(toolTest, ["import",
- "--file", "jstests/import/testdata/csv_header.csv",
- "--type=csv",
- "--db", db.getName(),
- "--collection", db.c.getName(),
- "--headerline",
- "--drop"]
- .concat(commonToolArgs));
- assert.eq(ret, 0);
- assert.eq(db.c.count(), 3);
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/fields.js b/src/mongo/gotools/test/qa-tests/jstests/import/fields.js
deleted file mode 100644
index 073ac01dc20..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/fields.js
+++ /dev/null
@@ -1,107 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- jsTest.log('Testing running import with headerline');
-
- formats = ["csv", "tsv"];
-
- var checkCollectionContents = function(coll) {
- var importedDoc = coll.findOne({"a": "foo"});
- delete importedDoc["_id"];
- assert.docEq(importedDoc, {a: "foo", b: "bar", c: {xyz: "blah"}, d: {hij: {lkm: "qwz"}}});
- assert.eq(coll.count(), 3);
- };
-
- var reset = function(coll) {
- coll.drop();
- assert.eq(coll.count(), 0);
- };
-
- var toolTest = getToolTest("import_fields");
- var db1 = toolTest.db;
- var commonToolArgs= getCommonToolArguments();
- for (var i=0; i<formats.length; i++) {
- var format=formats[i];
-
- var c = db1.c.getDB().getSiblingDB(format + "testdb")[format+"testcoll"];
- // check that headerline uses the correct headers
- var ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/" +format+"_header." + format,
- "--type=" + format,
- "--db", format + "testdb",
- "--collection", format + "testcoll",
- "--headerline"]
- .concat(commonToolArgs));
-
- checkCollectionContents(c);
- reset(c);
-
- // check that the fields can be specified with --fields
- ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/" +format+"_noheader." + format,
- "--type=" + format,
- "--db", format + "testdb",
- "--collection", format + "testcoll",
- "--fields", "a,b,c.xyz,d.hij.lkm"]
- .concat(commonToolArgs));
- checkCollectionContents(c);
- reset(c);
-
- // check that the fields can be specified with --fieldsFile
- ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/" +format+"_noheader." + format,
- "--type=" + format,
- "--db", format + "testdb",
- "--collection", format + "testcoll",
- "--fieldFile", "jstests/import/testdata/fieldfile"]
- .concat(commonToolArgs));
- checkCollectionContents(c);
- // check that without --ignoreBlanks, the empty field is just blank string
- assert.eq(c.findOne({a: "bob"}).b, "");
- reset(c);
-
- // check that --ignoreBlanks causes empty fields to be omitted
- ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/" + format + "_noheader." + format,
- "--type=" + format,
- "--db", format + "testdb",
- "--collection", format + "testcoll",
- "--fieldFile", "jstests/import/testdata/fieldfile",
- "--ignoreBlanks"]
- .concat(commonToolArgs));
- assert.eq(c.findOne({a: "bob"}).b, undefined);
- reset(c);
-
- // when --fieldFile, --fields, and --headerline are all omitted,
- // import should fail
- ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/" + format + "_noheader." + format,
- "--type=" + format,
- "--db", format + "testdb",
- "--collection", format + "testcoll"]
- .concat(commonToolArgs));
- assert.neq(ret, 0);
- reset(c);
-
- }
-
- var c2 = db1.c.getDB().getSiblingDB("testdb")["extrafields"];
- // check that extra fields are created as expected
- ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/extrafields.csv",
- "--type=csv",
- "--db", c2.getDB().toString(),
- "--collection", c2.getName(),
- "--fieldFile", "jstests/import/testdata/fieldfile"]
- .concat(commonToolArgs));
-
- var importedDoc = c2.findOne({"a": "one"});
- assert.eq(importedDoc.field4, "extra1");
- assert.eq(importedDoc.field5, "extra2");
- assert.eq(importedDoc.field6, "extra3");
- reset(c2);
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/import_document_validation.js b/src/mongo/gotools/test/qa-tests/jstests/import/import_document_validation.js
deleted file mode 100644
index 6fca242cb4a..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/import_document_validation.js
+++ /dev/null
@@ -1,110 +0,0 @@
-/**
- * import_document_validation.js
- *
- * This file test that mongoimport works with document validation. It both checks that when
- * validation is turned on invalid documents are not imported and that when a user indicates
- * they want to bypass validation, that all documents are imported.
- */
-(function() {
- 'use strict';
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- /**
- * Part 1: Test that import follows document validation rules.
- */
- jsTest.log('Testing that import reacts well to document validation');
-
- var toolTest = getToolTest('import_document_validation');
- var commonToolArgs = getCommonToolArguments();
-
- // the db we will use
- var testDB = toolTest.db.getSiblingDB('test');
-
- // create 1000 documents, half of which will pass the validation
- var data = [];
- for (var i = 0; i < 1000; i++) {
- if (i%2 === 0) {
- data.push({_id: i, num: i+1, s: '' + i});
- } else {
- data.push({_id: i, num: i+1, s: '' + i, baz: i});
- }
- }
- testDB.bar.insertMany(data);
- // sanity check the insertion worked
- assert.eq(1000, testDB.bar.count());
-
- // export the data
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', toolTest.extFile,
- '-d', 'test',
- '-c', 'bar']
- .concat(commonToolArgs));
- assert.eq(0, ret, 'export should run successfully');
-
- testDB.dropDatabase();
- assert.eq(0, testDB.bar.count(),
- 'after dropping the database, no documents should be seen');
-
- // sanity check that we can import the data without validation
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', toolTest.extFile,
- '--db', 'test',
- '-c', 'bar']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- assert.eq(1000, testDB.bar.count(),
- 'after import, the documents should be seen again');
-
- testDB.dropDatabase();
- assert.eq(0, testDB.bar.count(),
- 'after dropping the database, no documents should be seen');
-
- // turn on validation
- var r = testDB.createCollection('bar', {validator: {baz: {$exists: true}}});
- assert.eq(r, {ok: 1}, 'create collection with validation works');
-
- // test that it's working
- r = testDB.bar.insert({num: 10000});
- assert.eq(r.nInserted, 0, "invalid documents shouldn't be inserted");
-
- // import the 1000 records of which only 500 are valid
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', toolTest.extFile,
- '--db', 'test',
- '-c', 'bar']
- .concat(commonToolArgs));
- assert.eq(0, ret,
- 'import against a collection with validation on still succeeds');
-
- assert.eq(500, testDB.bar.count(), 'only the valid documents are imported');
-
- /**
- * Part 2: Test that import can bypass document validation rules.
- */
- jsTest.log('Testing that bypass document validation works');
-
- testDB.dropDatabase();
-
- // turn on validation
- r = testDB.createCollection('bar', {validator: {baz: {$exists: true}}});
- assert.eq(r, {ok: 1}, 'create collection with validation should work');
-
- // test that we cannot insert an 'invalid' document
- r = testDB.bar.insert({num: 10000});
- assert.eq(r.nInserted, 0, 'invalid documents should not be inserted');
-
- // import the 1000 records again with bypassDocumentValidation turned on
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', toolTest.extFile,
- '--db', 'test',
- '-c', 'bar',
- '--bypassDocumentValidation']
- .concat(commonToolArgs));
- assert.eq(0, ret,
- 'importing documents should work with bypass document validation set');
- assert.eq(1000, testDB.bar.count(),
- 'all documents should be imported with bypass document validation set');
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/import_types.js b/src/mongo/gotools/test/qa-tests/jstests/import/import_types.js
deleted file mode 100644
index 3d0a26d745d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/import_types.js
+++ /dev/null
@@ -1,75 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- jsTest.log('Testing importing a json file and checking types');
-
- var toolTest = getToolTest('import_types');
-
- // the import file
- var importFile = 'jstests/import/testdata/types.json';
-
- // the db and collection we'll use
- var testDB = toolTest.db.getSiblingDB('imported');
- var testColl = testDB.types;
- testColl.drop();
- var commonToolArgs = getCommonToolArguments();
-
- var importTypes = {
- "double_type": 1,
- "double_exponent_type": 1,
- "double_negative_type": 1,
- "NaN": 1,
- "infinity": 1,
- "negative_infinity": 1,
- "string_type": 2,
- "object_type": 3,
- "binary_data": 5,
- "undefined_type": 6,
- "object_id_type": 7,
- "true_type": 8,
- "false_type": 8,
- "date_type": 9,
- "iso_date_type": 9,
- "null_type": 10,
- "int32_type": 16,
- "int32_negative_type": 16,
- "number_int_type": 16,
- "int32_hex": 16,
- "int64_type": 18,
- "int64_negative_type": 18,
- "number_long_type": 18,
- "minkey_type": -1,
- "maxkey_type": 127,
- "regex_type": 11,
- };
-
-
- // import the data in from types.json
- ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', importFile,
- '--db', 'imported',
- '--collection', 'types']
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- jsTest.log("Imported", importFile);
-
- var postImportDoc = testColl.findOne();
- printjson(postImportDoc);
-
- docKeys = Object.keys(importTypes);
-
- for (var i = 0; i < docKeys.length; i++) {
- jsTest.log("Checking type of", docKeys[i]);
- var typeNum = importTypes[docKeys[i]];
- var field = docKeys[i];
- var query = {};
- query[field] = {"$type": typeNum};
- printjson(query);
- assert.eq(testColl.find(query).count(), 1);
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/import_write_concern.js b/src/mongo/gotools/test/qa-tests/jstests/import/import_write_concern.js
deleted file mode 100644
index 0f068aa2b4e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/import_write_concern.js
+++ /dev/null
@@ -1,72 +0,0 @@
-(function() {
-
- load("jstests/configs/replset_28.config.js");
-
- var name = 'import_write_concern';
- var toolTest = new ToolTest(name, null);
- var dbName = "foo";
- var colName = "bar";
- var rs = new ReplSetTest({
- name: name,
- nodes: 3,
- useHostName: true,
- settings: {chainingAllowed: false},
- });
-
- var commonToolArgs = getCommonToolArguments();
- var fileTarget = "wc.csv";
- rs.startSet();
- rs.initiate();
- rs.awaitReplication();
- toolTest.port = rs.getPrimary().port;
-
- var db = rs.getPrimary().getDB(dbName);
-
- function writeConcernTestFunc(exitCode, writeConcern, name) {
- jsTest.log(name);
- var ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', fileTarget,
- '-d', dbName,
- '-c', colName]
- .concat(writeConcern)
- .concat(commonToolArgs));
- assert.eq(exitCode, ret, name);
- db.dropDatabase();
- }
-
- function noConnectTest() {
- return startMongoProgramNoConnect.apply(null, ['mongoimport',
- '--writeConcern={w:3}',
- '--host', rs.getPrimary().host,
- '--file', fileTarget]
- .concat(commonToolArgs));
- }
-
- // create a test collection
- var data = [];
- for (var i=0; i<=100; i++) {
- data.push({_id: i, x: i*i});
- }
- db.getCollection(colName).insertMany(data);
- rs.awaitReplication();
-
- // export the data that we'll use
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', fileTarget,
- '-d', dbName,
- '-c', colName]
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database so it's empty
- db.dropDatabase();
-
- // load and run the write concern suite
- load('jstests/libs/wc_framework.js');
- runWCTest("mongoimport", rs, toolTest, writeConcernTestFunc, noConnectTest);
-
- db.dropDatabase();
- rs.stopSet();
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/import_write_concern_mongos.js b/src/mongo/gotools/test/qa-tests/jstests/import/import_write_concern_mongos.js
deleted file mode 100644
index bd73b214098..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/import_write_concern_mongos.js
+++ /dev/null
@@ -1,77 +0,0 @@
-(function() {
-
- load("jstests/configs/replset_28.config.js");
-
- var name = 'import_write_concern';
- var toolTest = new ToolTest(name, null);
- var dbName = "foo";
- var colName = "bar";
- var fileTarget = "wc_mongos.csv";
- var st = new ShardingTest({
- shards: {
- rs0: {
- nodes: 3,
- useHostName: true,
- settings: {chainingAllowed: false},
- },
- },
- mongos: 1,
- config: 1,
- configReplSetTestOptions: {
- settings: {chainingAllowed: false},
- },
- });
- var rs = st.rs0;
- rs.awaitReplication();
- toolTest.port = st.s.port;
-
- var commonToolArgs = getCommonToolArguments();
- var db = st.s.getDB(dbName);
-
- function writeConcernTestFunc(exitCode, writeConcern, name) {
- jsTest.log(name);
- var ret = toolTest.runTool.apply(toolTest, ['import',
- '--file', fileTarget,
- '-d', dbName,
- '-c', colName]
- .concat(writeConcern)
- .concat(commonToolArgs));
- assert.eq(exitCode, ret, name);
- db.dropDatabase();
- }
-
- function startProgramNoConnect() {
- return startMongoProgramNoConnect.apply(null, ['mongoimport',
- '--writeConcern={w:3}',
- '--host', st.s.host,
- '--file', fileTarget]
- .concat(commonToolArgs));
- }
-
- // create a test collection
- var data = [];
- for (var i=0; i<=100; i++) {
- data.push({_id: i, x: i*i});
- }
- db.getCollection(colName).insertMany(data);
- rs.awaitReplication();
-
- // setup: export the data that we'll use
- var ret = toolTest.runTool.apply(toolTest, ['export',
- '--out', fileTarget,
- '-d', dbName,
- '-c', colName]
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database so it's empty
- db.dropDatabase();
-
- // load and run the write concern suite
- load('jstests/libs/wc_framework.js');
- runWCTest("mongoimport", rs, toolTest, writeConcernTestFunc, startProgramNoConnect);
-
- db.dropDatabase();
- rs.stopSet();
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/mode.js b/src/mongo/gotools/test/qa-tests/jstests/import/mode.js
deleted file mode 100644
index 0e9a3205fc8..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/mode.js
+++ /dev/null
@@ -1,147 +0,0 @@
-(function() {
- jsTest.log('Testing running import with modes');
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- jsTest.log('Testing running import with bad command line cmdArgs');
-
- var toolTest = getToolTest('import');
- var db1 = toolTest.db;
-
- var db = db1.getSiblingDB("upserttest");
- db.dropDatabase();
-
- var commonToolArgs = [
- "--db", db.getName(),
- "--collection", db.c.getName(),
- ].concat(getCommonToolArguments());
-
- function testWithUpsertFields(expectMode, cmdArg) {
- // This works by applying update w/ query on the fields
- db.c.drop();
- var doc1_origin = {a: 1234, b: "000000", c: 222, x: "origin field"};
- var doc2_1_origin = {a: 4567, b: "111111", c: 333, x: "origin field"};
- db.c.insert(doc1_origin);
- db.c.insert(doc2_1_origin);
- assert.eq(db.c.count(), 2, "collection count should be 2 at setup");
-
- var argv = ["import",
- "--file", "jstests/import/testdata/upsert2.json",
- "--upsertFields", "a,c"];
- if (cmdArg) {
- argv.push(cmdArg);
- }
- argv = argv.concat(commonToolArgs);
- var ret = toolTest.runTool.apply(toolTest, argv);
- if (expectMode === "error") {
- return assert.neq(ret, 0);
- }
- assert.eq(ret, 0);
-
- var doc1 = db.c.findOne({a: 1234});
- var doc1_expect;
- delete doc1["_id"];
- switch (expectMode) {
- case "upsert":
- doc1_expect = {a: 1234, b: "blah", c: 222};
- break;
- case "merge":
- doc1_expect = {a: 1234, b: "blah", c: 222, x: "origin field"};
- break;
- default:
- throw new Error();
- }
- assert.docEq(doc1, doc1_expect);
-
- var doc2_1 = db.c.findOne({a: 4567, c: 333});
- var doc2_2 = db.c.findOne({a: 4567, c: 222});
- delete doc2_1["_id"];
- delete doc2_2["_id"];
- var doc2_1_expect, doc2_2_expect;
- switch (expectMode) {
- case "upsert":
- doc2_1_expect = {a: 4567, b: "yyy", c: 333};
- doc2_2_expect = {a: 4567, b: "asdf", c: 222};
- break;
- case "merge":
- doc2_1_expect = {a: 4567, b: "yyy", c: 333, x: "origin field"};
- doc2_2_expect = {a: 4567, b: "asdf", c: 222};
- break;
- default:
- throw new Error();
- }
- assert.docEq(doc2_1, doc2_1_expect);
- assert.docEq(doc2_2, doc2_2_expect);
- }
-
- function testWithoutUpsertFields(expectMode, cmdArg) {
- // This works by applying the update using _id
- db.c.drop();
- var docOrigin = [
- {_id: "one", a: "origin value", x: "origin field"},
- {_id: "two", a: "origin value 2", x: "origin field"},
- ];
- db.c.insert(docOrigin[0]);
- db.c.insert(docOrigin[1]);
- assert.eq(db.c.count(), 2, "collection count should be 2 at setup");
-
- var argv = ["import", "--file", "jstests/import/testdata/upsert1.json"];
- if (cmdArg) {
- argv.push(cmdArg);
- }
- argv = argv.concat(commonToolArgs);
- var ret = toolTest.runTool.apply(toolTest, argv);
- if (expectMode === "error") {
- return assert.neq(ret, 0);
- }
- assert.eq(ret, 0);
- assert.eq(db.c.count(), 2);
-
- var docs = [
- db.c.findOne({_id: "one"}),
- db.c.findOne({_id: "two"}),
- ];
- var docExpects = [];
- switch (expectMode) {
- case "insert":
- docExpects = docOrigin;
- break;
- case "upsert":
- docExpects = [
- {_id: "one", a: "unicorns", b: "zebras"},
- {_id: "two", a: "xxx", b: "yyy"},
- ];
- break;
- case "merge":
- docExpects = [
- {_id: "one", a: "unicorns", b: "zebras", x: "origin field"},
- {_id: "two", a: "xxx", b: "yyy", x: "origin field"},
- ];
- break;
- default:
- throw new Error();
- }
- assert.docEq(docs, docExpects);
- }
-
- // argument-1: expected behavior
- // argument-2: command argument for mongoimport
-
- testWithUpsertFields("error", "--mode=wrong");
- testWithUpsertFields("error", "--mode=insert");
- testWithUpsertFields("upsert", "");
- testWithUpsertFields("upsert", "--upsert"); // deprecated cmdArg
- testWithUpsertFields("upsert", "--mode=upsert");
- testWithUpsertFields("merge", "--mode=merge");
-
- testWithoutUpsertFields("error", "--mode=wrong");
- testWithoutUpsertFields("insert", "--mode=insert");
- testWithoutUpsertFields("insert", "");
- testWithoutUpsertFields("upsert", "--upsert"); // deprecated cmdArg
- testWithoutUpsertFields("upsert", "--mode=upsert");
- testWithoutUpsertFields("merge", "--mode=merge");
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/mode_upsert_id_subdoc.js b/src/mongo/gotools/test/qa-tests/jstests/import/mode_upsert_id_subdoc.js
deleted file mode 100644
index a253e3f3740..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/mode_upsert_id_subdoc.js
+++ /dev/null
@@ -1,86 +0,0 @@
-(function() {
- // This test creates a collection with a subdocument _id field. We export the collection,
- // replace the existing documents with a pre-made dataset and --mode=upsert, then overwrite
- // that with the original data, again with --mode=upsert. This verifies that import and
- // export do not change the order of _id fields.
- jsTest.log('Testing running import with --mode=upsert and _id subdocuments');
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = getToolTest('import');
- var db1 = toolTest.db;
- var commonToolArgs = getCommonToolArguments();
-
- var db = db1.getSiblingDB("upserttest");
- db.dropDatabase();
-
- // create a set of documents with a subdocument _id
- var h, i, j;
- for (h = 0; h < 2; h++) {
- var data = [];
- for (i = h * 50; i < (h+1) * 50; i++) {
- for (j = 0; j < 20; j++) {
- data.push({
- _id: {
- a: i,
- b: [0, 1, 2, {c: j, d: "foo"}],
- e: "bar",
- },
- x: "string",
- });
- }
- }
- db.c.insertMany(data);
- }
- assert.eq(db.c.count(), 2000);
-
- jsTest.log('Exporting documents with subdocument _ids.');
- var ret = toolTest.runTool.apply(toolTest, ["export",
- "-o", toolTest.extFile,
- "--db", db.getName(),
- "--collection", db.c.getName()]
- .concat(commonToolArgs));
- assert.eq(ret, 0, "export should succeed");
-
- jsTest.log('Upserting pre-made documents with subdocument _ids.');
- ret = toolTest.runTool.apply(toolTest, ["import",
- "--file", "jstests/import/testdata/upsert3.json",
- "--mode=upsert",
- "--db", db.getName(),
- "--collection", db.c.getName()]
- .concat(commonToolArgs));
- assert.eq(ret, 0, "import should succeed");
- assert.eq(db.c.count(), 2000,
- "count should be the same before and after import");
-
- // check each document
- for (i = 0; i < 100; i++) {
- for (j = 0; j < 20; j++) {
- assert.eq(db.c.findOne({_id: {a: i, b: [0, 1, 2, {c: j, d: "foo"}], e: "bar"}}).x, "str2",
- "all documents should be updated");
- }
- }
-
- jsTest.log('Upserting original exported documents with subdocument _ids.');
- ret = toolTest.runTool.apply(toolTest, ["import",
- "--file", toolTest.extFile,
- "--mode=upsert",
- "--db", db.getName(),
- "--collection", db.c.getName()]
- .concat(commonToolArgs));
- assert.eq(ret, 0, "import should succeed");
- assert.eq(db.c.count(), 2000,
- "count should be the same before and after import");
-
- // check each document to see that it is back at its original value
- for (i = 0; i < 100; i++) {
- for (j = 0; j < 20; j++) {
- assert.eq(db.c.findOne({_id: {a: i, b: [0, 1, 2, {c: j, d: "foo"}], e: "bar"}}).x, "string",
- "all documents should be updated");
- }
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/no_primary_error_code.js b/src/mongo/gotools/test/qa-tests/jstests/import/no_primary_error_code.js
deleted file mode 100644
index 5fc0356e9e0..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/no_primary_error_code.js
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * no_primary_error_code.js
- *
- * This file tests TOOLS-690 where mongoimport returned exit code 0 when it should have returned
- * exit code 1 on error. The error stems from when mongos cannot find a primary. This file checks
- * that errors of type 'not master', 'unable to target', and 'Connection refused' yield error
- * code 1.
- */
-(function() {
- 'use strict';
- jsTest.log('Testing mongoimport when a sharded cluster has no primaries');
-
- var sh = new ShardingTest({
- name: 'no_primary_error_code',
- shards: 1,
- verbose: 0,
- mongos: 1,
- other: {
- rs: true,
- numReplicas: 1,
- chunksize: 1,
- enableBalancer: 0,
- },
- });
-
- // If we can't find a primary in 20 seconds than assume there are no more.
- var primary = sh.rs0.getPrimary(20000);
-
- jsTest.log('Stepping down ' + primary.host);
-
- try {
- primary.adminCommand({replSetStepDown: 300, force: true});
- } catch (e) {
- // Ignore any errors that occur when stepping down the primary.
- print('Error Stepping Down Primary: ' + e);
- }
-
- // Check that we catch 'not master'
- jsTest.log('All primaries stepped down, trying to import.');
-
-
- var ret = runMongoProgram('mongoimport',
- '--file', 'jstests/import/testdata/basic.json',
- '--db', 'test',
- '--collection', 'noPrimaryErrorCode',
- '--host', sh.s0.host);
- assert.eq(ret, 1, 'mongoimport should fail with no primary');
-
- sh.getDB('test').dropDatabase();
-
- // Kill the replica set.
- sh.rs0.stopSet(15);
-
- // Check that we catch 'Connection refused'
- jsTest.log('All primaries died, trying to import.');
-
- ret = runMongoProgram('mongoimport',
- '--file', 'jstests/import/testdata/basic.json',
- '--db', 'test',
- '--collection', 'noPrimaryErrorCode',
- '--host', sh.s0.host);
- assert.eq(ret, 1, 'mongoimport should fail with no primary');
-
- sh.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/options.js b/src/mongo/gotools/test/qa-tests/jstests/import/options.js
deleted file mode 100644
index 12be9fd3bd4..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/options.js
+++ /dev/null
@@ -1,123 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- jsTest.log('Testing running import with bad command line options');
-
- var toolTest = getToolTest('bad_options');
- var db1 = toolTest.db;
-
- // Make a dummy file to import by writing a test collection and exporting it
- assert.eq(0, db1.c.count(), "setup1");
- db1.c.save({a: 1, b: 2, c: 3});
- db1.c.save({a: 4, b: 5, c: 6});
- assert.eq(2, db1.c.count(), "setup2");
-
- toolTest.runTool("export",
- "--out", toolTest.extFile,
- "-d", toolTest.baseName,
- "-c", db1.c.getName());
-
- // also make a CSV version of it
- toolTest.runTool("export",
- "--out", toolTest.extFile + ".csv",
- "-d", toolTest.baseName,
- "-c", db1.c.getName(),
- "--fields", "a,b,c",
- "--csv");
- db1.c.drop();
- assert.eq(0, db1.c.count(), "after drop", "-d", toolTest.baseName, "-c", "foo");
-
- // verify that the normal sane case works
- var ret = toolTest.runTool("import",
- "--file", toolTest.extFile,
- "-d", "test",
- "-c", "test");
- assert.eq(ret, 0);
-
- var testDb = db1.c.getDB().getSiblingDB("test");
- assert.eq.soon(2, testDb.test.count.bind(testDb.test), "test.test should have 2 records");
- testDb.test.drop();
-
- var testScenarios = [
- {args: [],
- desc: "importing with no args should fail"},
-
- {args: [toolTest.extFile, toolTest.extFile],
- desc: "importing with multiple positional args should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile, toolTest.extFile],
- desc: "specifying both a --file and a positional argument should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", "non-existent-file.json"],
- desc: "specifying a --file with a nonexistent filename should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", "."],
- desc: "specifying a --file with a directory name should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile, "--type", "bogus"],
- desc: "importing with an invalid --type should fail"},
-
- {args: ["--db", "x.y.z", "-c", "test", "--file", toolTest.extFile],
- desc: "importing with an invalid database name (. in name) should fail"},
-
- {args: ["--db", "$x", "-c", "test", "--file", toolTest.extFile],
- desc: "importing with an invalid database name ($ in name) should fail"},
-
- {args: ["--db", "test", "-c", "blah$asfsaf", "--file", toolTest.extFile],
- desc: "importing with an invalid collection name should fail"},
-
- {args: ["--db", "test", "-c", "blah$asfsaf", "--file", toolTest.extFile],
- desc: "importing with an invalid collection name should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile + ".csv", "--type=csv", "--fields", "a,$xz,b"],
- desc: "--fields containing a field containing a $ should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile, "--type=json", "--fields", "a,b"],
- desc: "specifying --fields with --json should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile + ".csv", "--headerline", "--fields", "a,b", "--type=csv"],
- desc: "specifying both --fields and --headerline should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile + ".csv", "--type=csv", "--fields", "a,b", "--fieldFile", toolTest.extFile + ".csv"],
- desc: "specifying both --fields and --fieldFile should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile + ".csv", "--type=csv", "--headerline", "--fieldFile", toolTest.extFile + ".csv"],
- desc: "specifying both --headerline and --fieldFile should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile + ".csv", "--type=csv", "--fields", "a,b,b"],
- desc: "--fields with duplicate field names should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile + ".csv", "--type=csv", "--fields", "a,b,b.c"],
- desc: "--fields with field names of overlapping structures should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile, "--type=csv", "--fields", "a,b,b.c"],
- desc: "--fields with field names of overlapping structures should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile, "--upsertFields", "a,$b"],
- desc: "invalid characters in upsertFields should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile, "--jsonArray"],
- desc: "using --jsonArray with a non-array input file should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile + ".csv", "--type=json"],
- desc: "using --type=json with invalid json should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile, "--type=csv", "--fields=a,b,c"],
- desc: "using --type=csv with invalid csv should fail"},
-
- {args: ["--db", "test", "-c", "test", "--file", toolTest.extFile, "--type=json", "--headerline"],
- desc: "using --type=json with headerline should fail"},
- ];
-
- for (var i=0; i<testScenarios.length; i++) {
- jsTest.log('Testing: ' + testScenarios[i].desc);
- ret = toolTest.runTool.apply(toolTest, ["import"].concat(testScenarios[i].args));
- assert.neq(0, ret, i + ": " + testScenarios[i].desc);
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/parse_grace.js b/src/mongo/gotools/test/qa-tests/jstests/import/parse_grace.js
deleted file mode 100644
index 5726ecaafd9..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/parse_grace.js
+++ /dev/null
@@ -1,113 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var expectedDocs = [{
- a: "foo",
- b: 12,
- c: {
- xyz: ISODate("1997-06-02T15:24:00Z"),
- noop: true,
- },
- d: {hij: {lkm: BinData(0, "e8MEnzZoFyMmD7WSHdNrFJyEk8M=")}},
- }, {
- a: "bar",
- b: 24,
- c: {
- xyz: "06/08/2016 09:26:00",
- noop: true,
- },
- d: {hij: {lkm: BinData(0, "dGVzdAo=")}},
- }, {
- a: "baz",
- b: 36,
- c: {
- xyz: ISODate("2016-06-08T09:26:00Z"),
- noop: false,
- },
- d: {hij: {lkm: BinData(0, "dGVzdAo=")}},
- }];
- jsTest.log('Testing parseGrace option');
-
- var checkCollectionContents = function(coll) {
- var importedDoc = coll.findOne({a: "foo"});
- delete importedDoc["_id"];
- assert.docEq(importedDoc, expectedDocs[0]);
- importedDoc = coll.findOne({a: "baz"});
- delete importedDoc["_id"];
- assert.docEq(importedDoc, expectedDocs[2]);
- };
-
- var toolTest = getToolTest("import_fields");
- var commonToolArgs = getCommonToolArguments();
- var d = toolTest.db;
- var c;
-
- // parseGrace=fail should cause a failure
- c = d.testcoll1;
- var ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/parse_grace.csv",
- "--type", "csv",
- "--db", d.getName(),
- "--collection", c.getName(),
- "--columnsHaveTypes",
- "--parseGrace", "stop",
- "--headerline"]
- .concat(commonToolArgs));
- assert.neq(ret, 0);
-
- // parseGrace=skipRow should not import the row
- // with an un-coercable field
- c = d.testcoll2;
- ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/parse_grace.csv",
- "--type", "csv",
- "--db", d.getName(),
- "--collection", c.getName(),
- "--columnsHaveTypes",
- "--parseGrace", "skipRow",
- "--headerline"]
- .concat(commonToolArgs));
- checkCollectionContents(c);
- assert.eq(c.count(), 2);
-
- // parseGrace=skipField should not import the
- // an un-coercable field, but still keep the rest
- // of the row
- c = d.testcoll3;
- ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/parse_grace.csv",
- "--type", "csv",
- "--db", d.getName(),
- "--collection", c.getName(),
- "--columnsHaveTypes",
- "--parseGrace", "skipField",
- "--headerline"]
- .concat(commonToolArgs));
- checkCollectionContents(c);
- assert.eq(c.count(), 3);
- assert.neq(c.findOne({a: "bar"}), null);
- assert.eq(c.findOne({a: "bar"}).c.xyz, undefined);
-
- // parseGrace=autoCast should import the un-coercable field
- c = d.testcoll4;
- ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/parse_grace.csv",
- "--type", "csv",
- "--db", d.getName(),
- "--collection", c.getName(),
- "--columnsHaveTypes",
- "--parseGrace", "autoCast",
- "--headerline"]
- .concat(commonToolArgs));
- checkCollectionContents(c);
- assert.eq(c.count(), 3);
- var importedDoc = c.findOne({a: "bar"});
- delete importedDoc["_id"];
- assert.docEq(importedDoc, expectedDocs[1]);
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/replset.js b/src/mongo/gotools/test/qa-tests/jstests/import/replset.js
deleted file mode 100644
index 3ff32650d07..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/replset.js
+++ /dev/null
@@ -1,48 +0,0 @@
-(function() {
- jsTest.log('Testing running import with upserts');
-
- var toolTest = new ToolTest('import_repl');
-
- var replset1 = new ReplSetTest({nodes: 3, name: 'importtest'});
- replset1.startSet();
- replset1.initiate();
-
- var primary = replset1.getPrimary();
- var secondary = replset1.getSecondary();
-
- var db = primary.getDB('import_repl_test');
-
- // trying to write to the secondary should fail
- assert.neq(runMongoProgram.apply(this, ['mongoimport',
- '--file', 'jstests/import/testdata/basic.json',
- '--db', db.getName(),
- '--collection', db.c.getName(),
- '--host', secondary.host]), 0,
- "writing to secondary should fail");
-
- assert.eq(db.c.count(), 0, 'database not empty');
-
- // now import using the primary
- assert.eq(runMongoProgram.apply(this, ['mongoimport',
- '--file', 'jstests/import/testdata/basic.json',
- '--db', db.getName(),
- '--collection', db.c.getName(),
- '--host', primary.host]), 0,
- "writing to primary should succeed");
-
- assert.neq(db.c.count(), 0, 'database unexpectedly empty on primary');
-
- db.dropDatabase();
-
- // import using the secondary but include replset name, should succeed
- assert.eq(runMongoProgram.apply(this, ['mongoimport',
- '--file', 'jstests/import/testdata/basic.json',
- '--db', db.getName(),
- '--collection', db.c.getName(),
- '--host', replset1.name + "/" + secondary.host]), 0,
- "writing to secondary with replset name should succeed");
-
- assert.neq(db.c.count(), 0, 'database unexpectedly empty on secondary');
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/stoponerror.js b/src/mongo/gotools/test/qa-tests/jstests/import/stoponerror.js
deleted file mode 100644
index 6c964681676..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/stoponerror.js
+++ /dev/null
@@ -1,40 +0,0 @@
-(function() {
- jsTest.log('Testing running import with upserts');
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = getToolTest('import_dupes');
- var db1 = toolTest.db;
- var commonToolArgs = getCommonToolArguments();
-
- var db = db1.getSiblingDB("dupetest");
- db.dropDatabase();
-
- // Verify that --mode=upsert with --upsertFields works by applying update w/ query on the fields
- db.c.insert({_id: 1234, b: "000000", c: 222});
- assert.eq(db.c.count(), 1, "collection count should be 1 at setup");
- var ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/dupes.json",
- "--db", db.getName(),
- "--collection", db.c.getName(),
- "--stopOnError"]
- .concat(commonToolArgs));
-
- assert.neq(ret, 0,
- "duplicate key with --stopOnError should return nonzero exit code");
-
- // drop it, try again without stop on error
- db.c.drop();
- db.c.insert({_id: 1234, b: "000000", c: 222});
- ret = toolTest.runTool.apply(toolTest, ["import", "--file",
- "jstests/import/testdata/dupes.json",
- "--db", db.getName(),
- "--collection", db.c.getName()]
- .concat(commonToolArgs));
- assert.eq(ret, 0,
- "duplicate key without --stopOnError should return zero exit code");
- assert.docEq(db.c.findOne({_id: 1234}), {_id: 1234, b: "000000", c: 222});
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/basic.json b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/basic.json
deleted file mode 100644
index 93b5efcd940..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/basic.json
+++ /dev/null
@@ -1,36 +0,0 @@
-{_id:1, x:"1"}
-{_id:2, x:"2"}
-{_id:3, x:"3"}
-{_id:4, x:"4"}
-{_id:5, x:"5"}
-{_id:6, x:"6"}
-{_id:7, x:"7"}
-{_id:8, x:"8"}
-{_id:9, x:"9"}
-{_id:10, x:"10"}
-{_id:11, x:"11"}
-{_id:12, x:"12"}
-{_id:13, x:"13"}
-{_id:14, x:"14"}
-{_id:15, x:"15"}
-{_id:16, x:"16"}
-{_id:17, x:"17"}
-{_id:18, x:"18"}
-{_id:19, x:"19"}
-{_id:20, x:"20"}
-{_id:21, x:"21"}
-{_id:22, x:"22"}
-{_id:23, x:"23"}
-{_id:24, x:"24"}
-{_id:25, x:"25"}
-{_id:26, x:"26"}
-{_id:27, x:"27"}
-{_id:28, x:"28"}
-{_id:29, x:"29"}
-{_id:30, x:"30"}
-{_id:31, x:"31"}
-{_id:32, x:"32"}
-{_id:33, x:"33"}
-{_id:34, x:"34"}
-{_id:35, x:"35"}
-
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/boolean.json b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/boolean.json
deleted file mode 100644
index 1ee4458d5fa..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/boolean.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{ key: 'a', bool: Boolean(1) }
-{ key: 'b', bool: Boolean(0) }
-{ key: 'c', bool: Boolean(140) }
-{ key: 'd', bool: Boolean(-140.5) }
-{ key: 'e', bool: Boolean(Boolean(1)) }
-{ key: 'f', bool: Boolean(Boolean(0)) }
-{ key: 'g', bool: Boolean('') }
-{ key: 'h', bool: Boolean('f') }
-{ key: 'i', bool: Boolean(null) }
-{ key: 'j', bool: Boolean(undefined) }
-{ key: 'k', bool: Boolean(true) }
-{ key: 'l', bool: Boolean(false) }
-{ key: 'm', bool: Boolean(true, false) }
-{ key: 'n', bool: Boolean(false, true) }
-{ key: 'o', bool: [ Boolean(1), Boolean(0), Date(23) ] }
-{ key: 'p', bool: Boolean(Date(15)) }
-{ key: 'q', bool: Boolean(0x585) }
-{ key: 'r', bool: Boolean(0x0) }
-{ key: 's', bool: Boolean() } \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/csv_header.csv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/csv_header.csv
deleted file mode 100644
index 4c308f094b1..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/csv_header.csv
+++ /dev/null
@@ -1,4 +0,0 @@
-a,b,c.xyz,d.hij.lkm
-foo,bar,blah,qwz
-bob,,steve,sue
-one,two,three,four
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/csv_noheader.csv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/csv_noheader.csv
deleted file mode 100644
index 15427ed2b89..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/csv_noheader.csv
+++ /dev/null
@@ -1,3 +0,0 @@
-foo,bar,blah,qwz
-bob,,steve,sue
-one,two,three,four
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/dupes.json b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/dupes.json
deleted file mode 100644
index 5d7ce696dca..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/dupes.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{_id:2, x:"2"}
-{_id:3, x:"3"}
-{_id:4, x:"4"}
-{_id:5, x:"5"}
-{_id:6, x:"6"}
-{_id:7, x:"7"}
-{_id:8, x:"8"}
-{_id:9, x:"9"}
-{_id:10, x:"10"}
-{_id:11, x:"11"}
-{_id:12, x:"12"}
-{_id:13, x:"13"}
-{_id:14, x:"14"}
-{_id:15, x:"15"}
-{_id:16, x:"16"}
-{_id:17, x:"17"}
-{_id:18, x:"18"}
-{_id:19, x:"19"}
-{_id:20, x:"20"}
-{_id:1234, x:"21"}
-{_id:22, x:"22"}
-{_id:23, x:"23"}
-{_id:24, x:"24"}
-{_id:25, x:"25"}
-{_id:26, x:"26"}
-{_id:27, x:"27"}
-{_id:28, x:"28"}
-{_id:29, x:"29"}
-{_id:30, x:"30"}
-{_id:31, x:"31"}
-{_id:32, x:"32"}
-{_id:33, x:"33"}
-{_id:34, x:"34"}
-{_id:35, x:"35"}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/extrafields.csv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/extrafields.csv
deleted file mode 100644
index 945dedb557a..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/extrafields.csv
+++ /dev/null
@@ -1,3 +0,0 @@
-foo,bar,blah,qwz
-bob,,steve,sue
-one,two,three,four,extra1,extra2,extra3
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/fieldfile b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/fieldfile
deleted file mode 100644
index d08b7dd21f5..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/fieldfile
+++ /dev/null
@@ -1,4 +0,0 @@
-a
-b
-c.xyz
-d.hij.lkm
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/parse_grace.csv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/parse_grace.csv
deleted file mode 100644
index 447c1bd647e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/parse_grace.csv
+++ /dev/null
@@ -1,4 +0,0 @@
-a.string(),b.int32(),"c.xyz.date_oracle(Month dd, yyyy HH24:mi:ss)",c.noop.boolean(),d.hij.lkm.binary(hex)
-foo,12,"June 02, 1997 15:24:00",true,7bc3049f36681723260fb5921dd36b149c8493c3
-bar,24,"06/08/2016 09:26:00",true,746573740a
-baz,36,"June 08, 2016 09:26:00",false,746573740a
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/tsv_header.tsv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/tsv_header.tsv
deleted file mode 100644
index d10280f5e04..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/tsv_header.tsv
+++ /dev/null
@@ -1,4 +0,0 @@
-a b c.xyz d.hij.lkm
-foo bar blah qwz
-bob steve sue
-one two three four
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/tsv_noheader.tsv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/tsv_noheader.tsv
deleted file mode 100644
index 3729293b3ac..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/tsv_noheader.tsv
+++ /dev/null
@@ -1,3 +0,0 @@
-foo bar blah qwz
-bob steve sue
-one two three four
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_extrafields.csv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_extrafields.csv
deleted file mode 100644
index 8d398a3745f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_extrafields.csv
+++ /dev/null
@@ -1,3 +0,0 @@
-foo,12,"June 02, 1997 15:24:00",true,7bc3049f36681723260fb5921dd36b149c8493c3
-bar,24,"June 08, 2016 09:26:00",false,746573740a
-one,2,"May 08, 2016 09:26:00",false,746573740a,extra1,extra2
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_extrafields.tsv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_extrafields.tsv
deleted file mode 100644
index a4ca42f4589..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_extrafields.tsv
+++ /dev/null
@@ -1,3 +0,0 @@
-foo 12 June 02, 1997 15:24:00 true 7bc3049f36681723260fb5921dd36b149c8493c3
-bar 24 June 08, 2016 09:26:00 false 746573740a
-one 2 May 08, 2016 09:26:00 false 746573740a extra1 extra2
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_header.csv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_header.csv
deleted file mode 100644
index 1140f31b20b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_header.csv
+++ /dev/null
@@ -1,3 +0,0 @@
-a.string(),b.int32(),"c.xyz.date_oracle(Month dd, yyyy HH24:mi:ss)",c.noop.boolean(),d.hij.lkm.binary(hex)
-foo,12,"June 02, 1997 15:24:00",true,7bc3049f36681723260fb5921dd36b149c8493c3
-bar,24,"June 08, 2016 09:26:00",false,746573740a
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_header.tsv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_header.tsv
deleted file mode 100644
index a80b16848be..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_header.tsv
+++ /dev/null
@@ -1,3 +0,0 @@
-a.string() b.int32() c.xyz.date_oracle(Month dd, yyyy HH24:mi:ss) c.noop.boolean() d.hij.lkm.binary(hex)
-foo 12 June 02, 1997 15:24:00 true 7bc3049f36681723260fb5921dd36b149c8493c3
-bar 24 June 08, 2016 09:26:00 false 746573740a
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_noheader.csv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_noheader.csv
deleted file mode 100644
index 50eeda2d83f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_noheader.csv
+++ /dev/null
@@ -1,2 +0,0 @@
-foo,12,"June 02, 1997 15:24:00",true,7bc3049f36681723260fb5921dd36b149c8493c3
-bar,24,"June 08, 2016 09:26:00",false,746573740a
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_noheader.tsv b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_noheader.tsv
deleted file mode 100644
index a4eb1896c1c..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typed_noheader.tsv
+++ /dev/null
@@ -1,2 +0,0 @@
-foo 12 June 02, 1997 15:24:00 true 7bc3049f36681723260fb5921dd36b149c8493c3
-bar 24 June 08, 2016 09:26:00 false 746573740a
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typedfieldfile b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typedfieldfile
deleted file mode 100644
index 0068166003f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/typedfieldfile
+++ /dev/null
@@ -1,5 +0,0 @@
-a.string()
-b.int32()
-c.xyz.date_oracle(Month dd, yyyy HH24:mi:ss)
-c.noop.boolean()
-d.hij.lkm.binary(hex)
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/types.json b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/types.json
deleted file mode 100644
index 692d7999709..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/types.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{ "double_type" : 5.0,
- "double_exponent_type" : 5e+32,
- "double_negative_type" : -5.0,
- "NaN": NaN,
- "infinity" : Infinity,
- "negative_infinity" : -Infinity,
- "string_type" : "sample string",
- "object_type" : {"sample" : "object"},
- "binary_data" : BinData(3, "e8MEnzZoFyMmD7WSHdNrFJyEk8M="),
- "undefined_type" : undefined,
- "object_id_type" : ObjectId("54b03ef2a817f4f960f5b809"),
- "true_type" : true,
- "false_type" : false,
- "date_type" : Date(45),
- "iso_date_type" : ISODate("2015-02-25T16:42:11Z"),
- "null_type" : null,
- "int32_type" : 5,
- "int32_negative_type" : -5,
- "number_int_type" : NumberInt(5),
- "int32_hex" : 0x123,
- "int64_type" : 214748364765,
- "int64_negative_type" : -214748364765,
- "number_long_type" : NumberLong(5000),
- "minkey_type" : { "$minKey" : 1 },
- "maxkey_type" : { "$maxKey" : 1 },
- "regex_type" : { "$regex" : "\\.", "$options" : "" }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert1.json b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert1.json
deleted file mode 100644
index 1608356a73b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert1.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{_id:"one", a:1234,b:4567}
-{_id:"two", a:"xxx",b:"yyy"}
-{_id:"one", a:"foo",b:"blah"}
-{_id:"one", a:"test",b:"test"}
-{_id:"one", a:"unicorns",b:"zebras"}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert2.json b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert2.json
deleted file mode 100644
index cf35f2762ac..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert2.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{a:1234,b:4567, c:222}
-{a:4567,b:"yyy", c:333}
-{a:1234,b:"blah", c:222}
-{a:"xxx",b:"test", c:-1}
-{a:4567,b:"asdf", c:222}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert3.json b/src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert3.json
deleted file mode 100644
index c22767ceac0..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/testdata/upsert3.json
+++ /dev/null
@@ -1,2000 +0,0 @@
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":0.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":1.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":2.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":3.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":4.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":5.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":6.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":7.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":8.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":9.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":10.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":11.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":12.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":13.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":14.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":15.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":16.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":17.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":18.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":19.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":20.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":21.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":22.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":23.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":24.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":25.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":26.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":27.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":28.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":29.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":30.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":31.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":32.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":33.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":34.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":35.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":36.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":37.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":38.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":39.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":40.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":41.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":42.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":43.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":44.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":45.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":46.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":47.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":48.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":49.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":50.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":51.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":52.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":53.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":54.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":55.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":56.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":57.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":58.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":59.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":60.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":61.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":62.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":63.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":64.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":65.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":66.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":67.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":68.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":69.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":70.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":71.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":72.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":73.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":74.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":75.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":76.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":77.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":78.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":79.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":80.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":81.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":82.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":83.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":84.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":85.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":86.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":87.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":88.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":89.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":90.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":91.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":92.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":93.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":94.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":95.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":96.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":97.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":98.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":0.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":1.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":2.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":3.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":4.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":5.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":6.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":7.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":8.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":9.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":10.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":11.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":12.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":13.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":14.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":15.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":16.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":17.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":18.0,"d":"foo"}],"e":"bar"},"x":"str2"}
-{"_id":{"a":99.0,"b":[0.0,1.0,2.0,{"c":19.0,"d":"foo"}],"e":"bar"},"x":"str2"}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/type_case.js b/src/mongo/gotools/test/qa-tests/jstests/import/type_case.js
deleted file mode 100644
index 671ae707f31..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/type_case.js
+++ /dev/null
@@ -1,98 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- jsTest.log('Testing running import with bad command line options');
-
- var toolTest = getToolTest('bad_options');
- var db1 = toolTest.db;
-
- // Make a dummy file to import by writing a test collection and exporting it
- assert.eq(0, db1.c.count(), "setup1");
- db1.c.save({a: 1, b: 2, c: 3});
- db1.c.save({a: 4, b: 5, c: 6});
- assert.eq(2, db1.c.count(), "setup2");
-
- toolTest.runTool("export",
- "--out", toolTest.extFile,
- "-d", toolTest.baseName,
- "-c", db1.c.getName());
-
- // also make a CSV version of it
- toolTest.runTool("export",
- "--out", toolTest.extFile + ".csv",
- "-d", toolTest.baseName,
- "-c", db1.c.getName(),
- "--csv",
- "--fields", "a,b,c");
- db1.c.drop();
- assert.eq(0, db1.c.count(), "after drop", "-d", toolTest.baseName, "-c", "foo");
-
- // verify that the normal sane case works
- var ret = toolTest.runTool("import",
- "--file", toolTest.extFile,
- "-d", "test",
- "-c", "test");
- assert.eq(ret, 0);
-
- // verify that the a lower case json type works
- ret = toolTest.runTool("import",
- "--file", toolTest.extFile,
- "-d", "test",
- "-c", "test",
- "--type=json");
- assert.eq(ret, 0);
-
- // verify that the a upper case json type works
- ret = toolTest.runTool("import",
- "--file", toolTest.extFile,
- "-d", "test",
- "-c", "test",
- "--type=JSON");
- assert.eq(ret, 0);
-
- // verify that the a csv type specifier failes to load a json file
- ret = toolTest.runTool("import",
- "--file", toolTest.extFile,
- "-d", "test",
- "-c", "test",
- "--type=csv",
- "-f", "a,b,c");
- assert.eq(ret, 1);
-
- // verify that the a lower case csv type works
- ret = toolTest.runTool("import",
- "--file", toolTest.extFile+".csv",
- "-d", "test",
- "-c", "test",
- "--type=csv",
- "-f", "a,b,c");
- assert.eq(ret, 0);
-
- // verify that the a upper case csv type works
- ret = toolTest.runTool("import",
- "--file", toolTest.extFile+".csv",
- "-d", "test",
- "-c", "test",
- "--type=CSV",
- "-f", "a,b,c");
- assert.eq(ret, 0);
-
- // verify that the a mixed case csv type works
- ret = toolTest.runTool("import",
- "--file", toolTest.extFile+".csv",
- "-d", "test",
- "-c", "test",
- "--type=cSv",
- "-f", "a,b,c");
- assert.eq(ret, 0);
-
- var testDb = db1.c.getDB().getSiblingDB("test");
- assert.eq.soon(11, testDb.test.count.bind(testDb.test), "test.test should have 11 records");
- testDb.test.drop();
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/typed_fields.js b/src/mongo/gotools/test/qa-tests/jstests/import/typed_fields.js
deleted file mode 100644
index fa7e07dd0b1..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/typed_fields.js
+++ /dev/null
@@ -1,114 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- var formats = ["csv", "tsv"];
- var header = "a.string(),b.int32(),c.xyz.date_oracle(Month dd, yyyy HH24:mi:ss),c.noop.boolean(),d.hij.lkm.binary(hex)";
- var expectedDocs = [{
- a: "foo",
- b: 12,
- c: {
- xyz: ISODate("1997-06-02T15:24:00Z"),
- noop: true,
- },
- d: {hij: {lkm: BinData(0, "e8MEnzZoFyMmD7WSHdNrFJyEk8M=")}},
- }, {
- a: "bar",
- b: 24,
- c: {
- xyz: ISODate("2016-06-08T09:26:00Z"),
- noop: false,
- },
- d: {hij: {lkm: BinData(0, "dGVzdAo=")}},
- }];
- jsTest.log('Testing typed fields in CSV/TSV');
-
- var checkCollectionContents = function(coll) {
- var importedDoc = coll.findOne({a: "foo"});
- delete importedDoc["_id"];
- assert.docEq(importedDoc, expectedDocs[0]);
- importedDoc = coll.findOne({a: "bar"});
- delete importedDoc["_id"];
- assert.docEq(importedDoc, expectedDocs[1]);
- assert.eq(coll.count(), 2);
- };
-
- var reset = function(coll) {
- coll.drop();
- assert.eq(coll.count(), 0);
- };
-
- var toolTest = getToolTest("import_fields");
- var db1 = toolTest.db;
- var commonToolArgs= getCommonToolArguments();
- for (var i=0; i<formats.length; i++) {
- var format=formats[i];
-
- var c = db1.c.getDB().getSiblingDB(format + "testdb")[format+"testcoll"];
- // check that headerline uses the correct headers
- var ret = toolTest.runTool.apply(toolTest, ["import",
- "--file", "jstests/import/testdata/typed_header." + format,
- "--type=" + format,
- "--db", format + "testdb",
- "--collection", format + "testcoll",
- "--columnsHaveTypes",
- "--headerline"]
- .concat(commonToolArgs));
-
- checkCollectionContents(c);
- reset(c);
-
- // check that the fields can be specified with --fields
- ret = toolTest.runTool.apply(toolTest, ["import",
- "--file", "jstests/import/testdata/typed_noheader." + format,
- "--type=" + format,
- "--db", format + "testdb",
- "--collection", format + "testcoll",
- "--columnsHaveTypes",
- "--fields", header]
- .concat(commonToolArgs));
- checkCollectionContents(c);
- reset(c);
-
- // check that the fields can be specified with --fieldsFile
- ret = toolTest.runTool.apply(toolTest, ["import",
- "--file", "jstests/import/testdata/typed_noheader." + format,
- "--type=" + format,
- "--db", format + "testdb",
- "--collection", format + "testcoll",
- "--columnsHaveTypes",
- "--fieldFile", "jstests/import/testdata/typedfieldfile"]
- .concat(commonToolArgs));
- checkCollectionContents(c);
- reset(c);
-
- // when --fieldFile, --fields, and --headerline are all omitted,
- // import should fail
- ret = toolTest.runTool.apply(toolTest, ["import",
- "--file", "jstests/import/testdata/typed_noheader." + format,
- "--type=" + format,
- "--db", format + "testdb",
- "--collection", format + "testcoll",
- "--columnsHaveTypes"]
- .concat(commonToolArgs));
- assert.neq(ret, 0);
- reset(c);
-
- // check that extra fields are created as expected
- ret = toolTest.runTool.apply(toolTest, ["import",
- "--file", "jstests/import/testdata/typed_extrafields." + format,
- "--type=" + format,
- "--db", format + "testdb",
- "--collection", format + "testcoll",
- "--columnsHaveTypes",
- "--fieldFile", "jstests/import/testdata/typedfieldfile"]
- .concat(commonToolArgs));
-
- var importedDoc = c.findOne({"a": "one"});
- assert.eq(importedDoc.field5, "extra1");
- assert.eq(importedDoc.field6, "extra2");
- reset(c);
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/import/types.js b/src/mongo/gotools/test/qa-tests/jstests/import/types.js
deleted file mode 100644
index c128c9c5dc9..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/import/types.js
+++ /dev/null
@@ -1,117 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- jsTest.log('Testing running import with various data types');
-
- var toolTest = getToolTest('import');
- var db1 = toolTest.db;
- var commonToolArgs = getCommonToolArguments();
-
- var testDoc = {
- _id: ObjectId(),
- a: BinData(0, "e8MEnzZoFyMmD7WSHdNrFJyEk8M="),
- b: Boolean(1),
- d: "this is a string",
- e: ["this is an ", 2, 23.5, "array with various types in it"],
- f: {"this is": "an embedded doc"},
- g: function () {
- print("hey sup");
- },
- h: null,
- i: true,
- j: false,
- k: NumberLong(10000),
- l: MinKey(),
- m: MaxKey(),
- n: ISODate("2015-02-25T16:42:11Z"),
- o: DBRef('namespace', 'identifier', 'database'),
- p: NumberInt(5),
- q: 5.0,
- };
-
- // Make a dummy file to import by writing a test collection and exporting it
- assert.eq(0, db1.c.count(), "setup1");
- db1.c.save(testDoc);
- toolTest.runTool.apply(toolTest, ["export",
- "--out", toolTest.extFile,
- "-d", toolTest.baseName,
- "-c", db1.c.getName()]
- .concat(commonToolArgs));
-
- toolTest.runTool.apply(toolTest, ["import",
- "--file", toolTest.extFile,
- "--db", "imported",
- "--collection", "testcoll2"]
- .concat(commonToolArgs));
- var postImportDoc = db1.c.getDB().getSiblingDB("imported").testcoll2.findOne();
-
- printjson(postImportDoc);
-
- for (var docKey in testDoc) {
- if (!testDoc.hasOwnProperty(docKey)) {
- continue;
- }
- jsTest.log("checking field " + docKey);
- if (typeof testDoc[docKey] === 'function') {
- // SERVER-23472: As of 3.3.5, JS functions are serialized when inserted,
- // so accept either the original function or its serialization
- try {
- assert.eq(testDoc[docKey], postImportDoc[docKey],
- "function does not directly match");
- } catch (e) {
- assert.eq({code: String(testDoc[docKey])}, postImportDoc[docKey],
- "serialized function does not match");
- }
- continue;
- }
- assert.eq(testDoc[docKey], postImportDoc[docKey],
- "imported field " + docKey + " does not match original");
- }
-
- // DBPointer should turn into a DBRef with a $ref field and hte $id field being an ObjectId. It will not convert back to a DBPointer.
-
- var oid = ObjectId();
- var irregularObjects = {
- _id: ObjectId(),
- a: DBPointer('namespace', oid),
- b: NumberInt("5"),
- c: NumberLong("5000"),
- d: 5,
- e: 9223372036854775,
- };
-
- db1.c.drop();
- db1.c.getDB().getSiblingDB("imported").testcoll3.drop();
- assert.eq(0, db1.c.count(), "setup1");
- db1.c.save(irregularObjects);
- toolTest.runTool.apply(toolTest, ["export",
- "--out", toolTest.extFile,
- "-d", toolTest.baseName,
- "-c", db1.c.getName()]
- .concat(commonToolArgs));
-
- toolTest.runTool.apply(toolTest, ["import",
- "--file", toolTest.extFile,
- "--db", "imported",
- "--collection", "testcoll3"]
- .concat(commonToolArgs));
- postImportDoc = db1.c.getDB().getSiblingDB("imported").testcoll3.findOne();
-
- printjson(postImportDoc);
-
- var dbRef = DBRef("namespace", oid);
- assert.eq(postImportDoc["a"], dbRef);
-
- assert.eq(postImportDoc["b"], 5);
- assert.eq(postImportDoc["d"], 5);
-
- var numLong = NumberLong(5000);
- assert.eq(postImportDoc["c"], numLong);
-
- numLong = NumberLong(9223372036854775);
- assert.eq(postImportDoc["e"], numLong);
-
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/.eslintrc.yml b/src/mongo/gotools/test/qa-tests/jstests/libs/.eslintrc.yml
deleted file mode 100644
index 1750fda88a5..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/.eslintrc.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-rules:
- no-unused-vars: 0
- no-empty-function: 0
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/analyze_plan.js b/src/mongo/gotools/test/qa-tests/jstests/libs/analyze_plan.js
deleted file mode 100644
index b930470fdb5..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/analyze_plan.js
+++ /dev/null
@@ -1,76 +0,0 @@
-// Contains helpers for checking, based on the explain output, properties of a
-// plan. For instance, there are helpers for checking whether a plan is a collection
-// scan or whether the plan is covered (index only).
-
-/**
- * Given the root stage of explain's BSON representation of a query plan ('root'),
- * returns true if the plan has a stage called 'stage'.
- */
-function planHasStage(root, stage) {
- if (root.stage === stage) {
- return true;
- } else if ("inputStage" in root) {
- return planHasStage(root.inputStage, stage);
- } else if ("inputStages" in root) {
- for (var i = 0; i < root.inputStages.length; i++) {
- if (planHasStage(root.inputStages[i], stage)) {
- return true;
- }
- }
- }
-
- return false;
-}
-
-/**
- * A query is covered iff it does *not* have a FETCH stage or a COLLSCAN.
- *
- * Given the root stage of explain's BSON representation of a query plan ('root'),
- * returns true if the plan is index only. Otherwise returns false.
- */
-function isIndexOnly(root) {
- return !planHasStage(root, "FETCH") && !planHasStage(root, "COLLSCAN");
-}
-
-/**
- * Returns true if the BSON representation of a plan rooted at 'root' is using
- * an index scan, and false otherwise.
- */
-function isIxscan(root) {
- return planHasStage(root, "IXSCAN");
-}
-
-/**
- * Returns true if the BSON representation of a plan rooted at 'root' is using
- * the idhack fast path, and false otherwise.
- */
-function isIdhack(root) {
- return planHasStage(root, "IDHACK");
-}
-
-/**
- * Returns true if the BSON representation of a plan rooted at 'root' is using
- * a collection scan, and false otherwise.
- */
-function isCollscan(root) {
- return planHasStage(root, "COLLSCAN");
-}
-
-/**
- * Get the number of chunk skips for the BSON exec stats tree rooted at 'root'.
- */
-function getChunkSkips(root) {
- if (root.stage === "SHARDING_FILTER") {
- return root.chunkSkips;
- } else if ("inputStage" in root) {
- return getChunkSkips(root.inputStage);
- } else if ("inputStages" in root) {
- var skips = 0;
- for (var i = 0; i < root.inputStages.length; i++) {
- skips += getChunkSkips(root.inputStages[0]);
- }
- return skips;
- }
-
- return 0;
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/authTestsKey b/src/mongo/gotools/test/qa-tests/jstests/libs/authTestsKey
deleted file mode 100644
index 573898a4f05..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/authTestsKey
+++ /dev/null
@@ -1 +0,0 @@
-This key is only for running the suite with authentication dont use it in any tests directly
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/badSAN.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/badSAN.pem
deleted file mode 100644
index d8e362731e0..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/badSAN.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDdTCCAl2gAwIBAgIDAYKXMA0GCSqGSIb3DQEBBQUAMHQxFzAVBgNVBAMTDktl
-cm5lbCBUZXN0IENBMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoTB01vbmdvREIx
-FjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYD
-VQQGEwJVUzAeFw0xNDA5MjMxNTE3MjNaFw0zNDA5MjMxNTE3MjNaMG8xEjAQBgNV
-BAMTCTEyNy4wLjAuMTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RC
-MRYwFAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkG
-A1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCDB/lxuzeU
-OHR5nnOTJM0fHz0WeicnuUfGG5wP89Mbkd3Y+BNS0ozbnkW+NAGhD+ehNBjogISZ
-jLCd+uaYu7TLWpkgki+1+gM99Ro0vv7dIc8vD7ToILKMbM8xQmLbSxDT2tCUoXlc
-m7ccgDZl9oW1scQYQ8gWHjmk3yK8sCoGa/uwr49u74aVM7673tLsK41m8oYPzt/q
-VGT+mXpBJQcGXkTNQtIPxBtD25jr+aPietS3u70zrVPY6ZDsGE7DofEeRl97kVoF
-NcpaQmVEwEo8KCWaT6OaPaUUUjAMwzqiZaHNZ6mL1pCr65bLXP6T9tiMtWLw5+SG
-3E09fhQuWod5AgMBAAGjFTATMBEGA1UdEQQKMAiCBmJhZFNBTjANBgkqhkiG9w0B
-AQUFAAOCAQEAQzlibJvlUpJG3vc5JppdrudpXoVAP3wtpzvnkrY0GTWIUE52mCIf
-MJ5sARvjzs/uMhV5GLnjqTcT+DFkihqKyFo1tKBD7LSuSjfDvjmggG9lq0/xDvVU
-uczAuNtI1T7N+6P7LyTG4HqniYouPMDWyCKBOmzzNsk+r1OJb6cxU7QQwmSWw1n1
-ztNcF6JzCQVcd9Isau9AEXZ9q0M0sjD9mL67Qo3Dh3Mvf4UkJKqm3KOQOupUHZLU
-vJwfsS2u+gfHY1Plywzq3AuT7ygbksR3Pqfs8LFPnuRAH+41sFTGUM52hiU7mNPj
-ebl8s1tjK7WQ+a8GTABJV0hDNeWd3Sr+Og==
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAgwf5cbs3lDh0eZ5zkyTNHx89FnonJ7lHxhucD/PTG5Hd2PgT
-UtKM255FvjQBoQ/noTQY6ICEmYywnfrmmLu0y1qZIJIvtfoDPfUaNL7+3SHPLw+0
-6CCyjGzPMUJi20sQ09rQlKF5XJu3HIA2ZfaFtbHEGEPIFh45pN8ivLAqBmv7sK+P
-bu+GlTO+u97S7CuNZvKGD87f6lRk/pl6QSUHBl5EzULSD8QbQ9uY6/mj4nrUt7u9
-M61T2OmQ7BhOw6HxHkZfe5FaBTXKWkJlRMBKPCglmk+jmj2lFFIwDMM6omWhzWep
-i9aQq+uWy1z+k/bYjLVi8OfkhtxNPX4ULlqHeQIDAQABAoIBAC4Bx8jyJmKpq+Pk
-CcqZelg6HLXesA7XlGbv3M0RHIeqoM2E1SwYd5LJMM3G7ueBcR/97dz8+xH6/yyJ
-Ixxvk9xu9CMmkRABN9AyVkA867nzHA73Idr7WBXMQreWCqXa5o6sXt5BEB6/If0k
-23TTqUERqLuoWQHDHRRRsJ218RuNmbvBe8TGXcfunC0eeDVKDeqAXol6bD5lztdu
-B6jkdLt5UZSQ7X8OmClbeDlac90B8usNi+pUE9q1p7X462vAw8LohkxLY2nyIcmU
-feNdTNHP+lklv+E+p9w/Az7Hf6zxm525tw90QVI048fr9SL3ftLHOt4FhucSCn0Z
-CjylP4ECgYEA+nQrNVdVwmxcWCVn69LR1grNXUSz+fLHCo+QKma4IyC1kuuZ+BBo
-Iwdf9t/S1tgtTYru3uxzCpQg7J1iDeEFEsMHl0rc6U1MmIE+6OvACVG3yotqoOqE
-852pi1OWIe94yTk2ZmNXJ8gpUE/gtMprbcSWOb7IzzrXy2lDcaEMuGkCgYEAhe7L
-ZvYI4LEvu6GSPp97qBzDH9m5UrHaTZIJk/Nu7ie919Sdg62LTfphsaK+pSyA55XQ
-8L9P7wNUPC44NnE+7CIJZsIuKdYqR5QI6No9RdTyij0Hgljfc7KuH2b8lf8EjvuH
-qZAf5zL3pIOQs8E8/MYHlGIqmTkYK41eCAcS9JECgYEADnra6KmU9rmnGR2IhZTZ
-tuNG/kZzlVbY9R5ZumnX6YgBl23xp+ri6muJu88y9GLpM5t9tfu7pvfrc2KiAaVp
-0qzd6nxUi1SBwituxK6kmqVT1+z5jDYi26bY34pEms+qjw+0unSx3EXxRYhouGsf
-jOgZu1rxZzHCuirq0E38W0kCgYBzOK16RX37t9OFywlioJekWCIxu4BouSNCirl8
-s/eiIUR8cqiUCPAIRLhZNtZmiTPYiBW5mAyvZiDIqUao56InSVznL3TBf0LeU2ea
-023VLs79yGU2aTjLc1PDJjl03XDRhWj/okMgBsPvn1QUoNDT8ZXBvPZC3VCC31qe
-818GUQKBgQDBUP2BC/Th/0dErOQ5lWkY3YbmzrTp2pDsHGZJRD+OdQ5B8FUvCP8m
-JESk/0ATn7niUqawnOy/2KlKIkeBBV2XL1rjIGEhCkBUuhCiInNDqz1AGdXzIKaT
-myoZ4PhIsH1D643e6iLhyAZuUAA4yB31E2a3l7EMyhV3vKbdWWygGQ==
------END RSA PRIVATE KEY-----
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/ca.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/ca.pem
deleted file mode 100644
index d1a5689cf0f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/ca.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDczCCAlugAwIBAgIBATANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjB0MRcwFQYDVQQD
-Ew5LZXJuZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25n
-b0RCMRYwFAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazEL
-MAkGA1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCBxSXj
-qA5y2EMQkcmvLDNikE88Og3+spJ3ex60HWVPk8EeXN68jyfbKLYsoCcBE2rBAE/N
-shVBJa8irh0o/UTh1XNW4iGCsfMvYamXiHnaOjmGVKjfBoj6pzQH0uK0X5olm3Sa
-zZPkLLCR81yxsK6woJZMFTvrlEjxj/SmDZ9tVXW692bC4i6nGvOCSpgv9kms85xO
-Ed2xbuCLXFDXKafXZd5AK+iegkDs3ah7VXMEE8sbqGnlqC1nsy5bpCnb7aC+3af7
-SV2XEFlSQT5kwTmk9CvTDzM9O78SO8nNhEOFBLQEdGDGd3BShE8dCdh2JTy3zKsb
-WeE+mxy0mEwxNfGfAgMBAAGjEDAOMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEF
-BQADggEBAANwbvhM5K/Jcl6yzUSqAawvyAypT5aWBob7rt9KFq/aemjMN0gY2nsS
-8WTGd9jiXlxGc/TzrK6MOsJ904UAFE1L9uR//G1gIBa9dNbYoiii2Fc8b1xDVJEP
-b23rl/+GAT6UTSY+YgEjeA4Jk6H9zotO07lSw06rbCQam5SdA5UiMvuLHWCo3BHY
-8WzqLiW/uHlb4K5prF9yuTUBEIgkRvvvyOKXlRvm1Ed5UopT2hmwA86mffAfgJc2
-vSbm9/8Q00fYwO7mluB6mbEcnbquaqRLoB83k+WbwUAZ2yjWHXuXVMPwyaysazcp
-nOjaLwQJQgKejY62PiNcw7xC/nIxBeI=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEowIBAAKCAQEAgcUl46gOcthDEJHJrywzYpBPPDoN/rKSd3setB1lT5PBHlze
-vI8n2yi2LKAnARNqwQBPzbIVQSWvIq4dKP1E4dVzVuIhgrHzL2Gpl4h52jo5hlSo
-3waI+qc0B9LitF+aJZt0ms2T5CywkfNcsbCusKCWTBU765RI8Y/0pg2fbVV1uvdm
-wuIupxrzgkqYL/ZJrPOcThHdsW7gi1xQ1ymn12XeQCvonoJA7N2oe1VzBBPLG6hp
-5agtZ7MuW6Qp2+2gvt2n+0ldlxBZUkE+ZME5pPQr0w8zPTu/EjvJzYRDhQS0BHRg
-xndwUoRPHQnYdiU8t8yrG1nhPpsctJhMMTXxnwIDAQABAoIBAD5iGOnM800wO2Uu
-wGbOd9FNEFoiinHDRHfdnw/1BavwmqjO+mBo7T8E3jarsrRosiwfyz1V+7O6uuuQ
-CgKXZlKuOuksgfGDPCWt7EolWHaZAOhbsGaujJD6ah/MuCD/yGmFxtNYOl05QpSX
-Cht9lSzhtf7TQl/og/xkOLbO27JB540ck/OCSOczXg9Z/O8AmIUyDn7AKb6G1Zhk
-2IN//HQoAvDUMZLWrzy+L7YGbA8pBR3yiPsYBH0rX2Oc9INpiGA+B9Nf1HDDsxeZ
-/o+5xLbRDDfIDtlYO0cekJ053W0zUQLrMEIn9991EpG2O/fPgs10NlKJtaFH8CmT
-ExgVA9ECgYEA+6AjtUdxZ0BL3Wk773nmhesNH5/5unWFaGgWpMEaEM7Ou7i6QApL
-KAbzOYItV3NNCbkcrejq7jsDGEmiwUOdXeQx6XN7/Gb2Byc/wezy5ALi0kcUwaur
-6s9+Ah+T4vcU2AjfuCWXIpe46KLEbwORmCRQGwkCBCwRhHGt5sGGxTkCgYEAhAaw
-voHI6Cb+4z3PNAKRnf2rExBYRyCz1KF16ksuwJyQSLzFleXRyRWFUEwLuVRL0+EZ
-JXhMbtrILrc23dJGEsB8kOCFehSH/IuL5eB0QfKpDFA+e6pimsbVeggx/rZhcERB
-WkcV3jN4O82gSL3EnIgvAT1/nwhmbmjvDhFJhZcCgYBaW4E3IbaZaz9S/O0m69Fa
-GbQWvS3CRV1oxqgK9cTUcE9Qnd9UC949O3GwHw0FMERjz3N7B/8FGW/dEuQ9Hniu
-NLmvqWbGlnqWywNcMihutJKbDCdp/Km5olUPkiNbB3sWsOkViXoiU/V0pK6BZvir
-d67EZpGwydpogyH9kVVCEQKBgGHXc3Q7SmCBRbOyQrQQk0m6i+V8328W1S5m2bPg
-M62aWXMOMn976ZRT1pBDSwz1Y5yJ3NDf7gTZLjEwpgCNrFCJRcc4HLL0NDL8V5js
-VjvpUU5GyYdsJdb+M4ZUPHi/QEaqzqPQumwJSLlJEdfWirZWVj9dDA8XcpGwQjjy
-psHRAoGBAJUTgeJYhjK7k5sgfh+PRqiRJP0msIH8FK7SenBGRUkelWrW6td2Riey
-EcOCMFkRWBeDgnZN5xDyWLBgrzpw9iHQQIUyyBaFknQcRUYKHkCx+k+fr0KHHCUb
-X2Kvf0rbeMucb4y/h7950HkBBq83AYKMAoI8Ql3cx7pKmyOLXRov
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/client.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/client.pem
deleted file mode 100644
index 50a64e41728..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/client.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDXTCCAkWgAwIBAgIBAzANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBwMQ8wDQYDVQQD
-EwZjbGllbnQxEzARBgNVBAsTCktlcm5lbFVzZXIxEDAOBgNVBAoTB01vbmdvREIx
-FjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYD
-VQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJIFboAk9Fdi
-DY5Xld2iw36vB3IpHEfgWIimd+l1HX4jyp35i6xoqkZZHJUL/NMbUFJ6+44EfFJ5
-biB1y1Twr6GqpYp/3R30jKQU4PowO7DSal38MR34yiRFYPG4ZPPXXfwPSuwKrSNo
-bjqa0/DRJRVQlnGwzJkPsWxIgCjc8KNO/dSHv/CGymc9TjiFAI0VVOhMok1CBNvc
-ifwWjGBg5V1s3ItMw9x5qk+b9ff5hiOAGxPiCrr8R0C7RoeXg7ZG8K/TqXbsOZEG
-AOQPRGcrmqG3t4RNBJpZugarPWW6lr11zMpiPLFTrbq3ZNYB9akdsps4R43TKI4J
-AOtGMJmK430CAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAA+nPgVT4addi13yB6mjW
-+UhdUkFwtb1Wcg0sLtnNucopHZLlCj5FfDdp1RQxe3CyMonxyHTKkrWtQmVtUyvf
-C/fjpIKt9A9kAmveMHBiu9FTNTc0sbiXcrEBeHF5cD7N+Uwfoc/4rJm0WjEGNkAd
-pYLCCLVZXPVr3bnc3ZLY1dFZPsJrdH3nJGMjLgUmoNsKnaGozcjiKiXqm6doFzkg
-0Le5yD4C/QTaie2ycFa1X5bJfrgoMP7NqKko05h4l0B0+DnjpoTJN+zRreNTMKvE
-ETGvpUu0IYGxe8ZVAFnlEO/lUeMrPFvH+nDmJYsxO1Sjpds2hi1M1JoeyrTQPwXj
-2Q==
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAkgVugCT0V2INjleV3aLDfq8HcikcR+BYiKZ36XUdfiPKnfmL
-rGiqRlkclQv80xtQUnr7jgR8UnluIHXLVPCvoaqlin/dHfSMpBTg+jA7sNJqXfwx
-HfjKJEVg8bhk89dd/A9K7AqtI2huOprT8NElFVCWcbDMmQ+xbEiAKNzwo0791Ie/
-8IbKZz1OOIUAjRVU6EyiTUIE29yJ/BaMYGDlXWzci0zD3HmqT5v19/mGI4AbE+IK
-uvxHQLtGh5eDtkbwr9Opduw5kQYA5A9EZyuaobe3hE0Emlm6Bqs9ZbqWvXXMymI8
-sVOturdk1gH1qR2ymzhHjdMojgkA60YwmYrjfQIDAQABAoIBAB249VEoNIRE9TVw
-JpVCuEBlKELYk2UeCWdnWykuKZ6vcmLNlNy3QVGoeeTs172w5ZykY+f4icXP6da5
-o3XauCVUMvYKKNwcFzSe+1xxzPSlH/mZh/Xt2left6f8PLBVuk/AXSPG2I9Ihodv
-VIzERaQdD0J9FmhhhV/hMhUfQ+w5rTCaDpq1KVGU61ks+JAtlQ46g+cvPF9c80cI
-TEC875n2LqWKmLRN43JUnctV3uGTmolIqCRMHPAs/egl+lG2RXJjqXSQ2uFLOvC/
-PXtBb597yadSs2BWPnTu/r7LbLGBAExzlQK1uFsTvuKsBPb3qrvUux0L68qwPuiv
-W24N8BECgYEAydtAvVB7OymQEX3mck2j7ixDN01wc1ZaCLBDvYPYS/Pvzq4MBiAD
-lHRtbIa6HPGA5jskbccPqQn8WGnJWCaYvCQryvgaA+BBgo1UTLfQJUo/7N5517vv
-KvbUa6NF0nj3VwfDV1vvy+amoWi9NOVn6qOh0K84PF4gwagb1EVy9MsCgYEAuTAt
-KCWdZ/aNcKgJc4NCUqBpLPF7EQypX14teixrbF/IRNS1YC9S20hpkG25HMBXjpBe
-tVg/MJe8R8CKzYjCt3z5Ff1bUQ2bzivbAtgjcaO0Groo8WWjnamQlrIQcvWM7vBf
-dnIflQ0slxbHfCi3XEe8tj2T69R7wJZ8L7PxR9cCgYEACgwNtt6Qo6s37obzt3DB
-3hL57YC/Ph5oMNKFLKOpWm5z2zeyhYOGahc5cxNppBMpNUxwTb6AuwsyMjxhty+E
-nqi2PU4IDXVWDWd3cLIdfB2r/OA99Ez4ZI0QmaLw0L8QoJZUVL7QurdqR9JsyHs6
-puUqIrb195s/yiPR7sjeJe0CgYEAuJviKEd3JxCN52RcJ58OGrh2oKsJ9/EbV0rX
-Ixfs7th9GMDDHuOOQbNqKOR4yMSlhCU/hKA4PgTFWPIEbOiM08XtuZIb2i0qyNjH
-N4qnqr166bny3tJnzOAgl1ljNHa8y+UsBTO3cCr17Jh0vL0KLSAGa9XvBAWKaG6b
-1iIXwXkCgYAVz+DA1yy0qfXdS1pgPiCJGlGZXpbBcFnqvbpGSclKWyUG4obYCbrb
-p5VKVfoK7uU0ly60w9+PNIRsX/VN/6SVcoOzKx40qQBMuYfJ72DQrsPjPYvNg/Nb
-4SK94Qhp9TlAyXbqKJ02DjtuDim44sGZ8g7b+k3FfoK4OtzNsqdVdQ==
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/client_revoked.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/client_revoked.pem
deleted file mode 100644
index 03db67deb50..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/client_revoked.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDZTCCAk2gAwIBAgIBAjANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjB4MRcwFQYDVQQD
-Ew5jbGllbnRfcmV2b2tlZDETMBEGA1UECxMKS2VybmVsVXNlcjEQMA4GA1UEChMH
-TW9uZ29EQjEWMBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlv
-cmsxCzAJBgNVBAYTAlVTMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
-lJog+grPpvDFKFn9mxxToLgkx1uM+LmuRf1XG707TIccGfSKyZMGCcqlYVQmqT/J
-Fnq2rvtXGG9yxPhHoBwKw4x9rfQEy8Z93BAMNRoIlbt505HaWv7b+M3qksRHDLpw
-/Ix0Yay+fjT9DGmcrahC9h8c8SVtyHoODvxdmR7P+p7e6F6/6vcUkg/aU/50MRUN
-qGUSMkm/kpcjFNmDqRSLQoDPE8G2UOIolG/m95uaCOkGCT4lQjrCpR36fkr5a+vI
-UbDJP8M26Kp2fFnvObKzoSFSEVOMGeBM9p4qa88I4hwfthNFvGDOCNBKZK5ZxLZ3
-gGFcR6kL6u11y4zoLrZ6xwIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQB8WQMn/cjh
-9qFtr7JL4VPIz/+96QaBmkHxMqiYL/iMg5Vko3GllLc1mgfWQfaWOvyRJClKj395
-595L2u8wBKon3DXUPAkinc6+VOwDWsxFLNtWl+jhigat5UDzGm8ZKFhl0WwNhqzZ
-dlNPrh2LJZzPFfimfGyVkhPHYYdELvn+bnEMT8ae1jw2yQEeVFzHe7ZdlV5nMOE7
-Gx6ZZhYlS+jgpIxez5aiKqit/0azq5GGkpCv2H8/EXxkR4gLZGYnIqGuZP3r34NY
-Lkh5J3Qnpyhdopa/34yOCa8mY1wW7vEro0fb/Dh21bpyEOz6tBk3C1QRaGD+XQOM
-cedxtUjYmWqn
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAlJog+grPpvDFKFn9mxxToLgkx1uM+LmuRf1XG707TIccGfSK
-yZMGCcqlYVQmqT/JFnq2rvtXGG9yxPhHoBwKw4x9rfQEy8Z93BAMNRoIlbt505Ha
-Wv7b+M3qksRHDLpw/Ix0Yay+fjT9DGmcrahC9h8c8SVtyHoODvxdmR7P+p7e6F6/
-6vcUkg/aU/50MRUNqGUSMkm/kpcjFNmDqRSLQoDPE8G2UOIolG/m95uaCOkGCT4l
-QjrCpR36fkr5a+vIUbDJP8M26Kp2fFnvObKzoSFSEVOMGeBM9p4qa88I4hwfthNF
-vGDOCNBKZK5ZxLZ3gGFcR6kL6u11y4zoLrZ6xwIDAQABAoIBAFlu0T3q18Iu0VlR
-n5GEYMgvSuM4NAVVKo8wwwYMwu1xuvXb+NMLyuyFqzaCQKpHmywOOnfhCC/KkxX8
-Ho87kTbTDKhuXZyOHx0cA1zKCDSlGdK8yt9M1vJMa0pdGi2M34b+uOQ35IVsOocH
-4KWayIH7g52V2xZ2bpOSSnpm0uCPZSBTgClCgTUYepOT2wbLn/8V0NtVpZhDsBqg
-fORuEHkiurrbLa8yjQsvbR+hsR/XbGhre8sTQapj4EITXvkEuOL/vwbRebhOFHgh
-8sipsXZ9CMaJkBpVoLZTxTKQID/9006cczJK2MGKFhn6mvP6AeFuJAM3xqLGZTc4
-xxpfJyECgYEA0+iKxy5r1WUpBHR8jTh7WjLc6r5MFJQlGgLPjdQW6gCIe/PZc+b9
-x5vDp27EQ1cAEePEu0glQ/yk19yfxbxrqHsRjRrgwoiYTXjGI5zZSjXKArHyEgBj
-XOyo5leO5XMFnk2AShPlh+/RhAW3NhxcWkBEAsCD6QyC3BPvP6aaAXkCgYEAs4WH
-dTuweTdnyquHQm59ijatvBeP8h4tBozSupflQjB9WxJeW5uEa8lNQ3lSz1F4TV3M
-xvGdDSqwftLRS2mWGho/1jaCeAzjsiUQ2WUHChxprt0+QU7XkJbaBY9eF+6THZFw
-sDG688TiolxqoD8OYi8EtxmIvbQhXHmXnrk3jj8CgYBSi74rkrisuqg8tQejl0Ht
-w+xsgM5wIblGJZwmOlzmsGh6KGYnkO6Ap/uSKELJnIVJcrk63wKtNigccjPGufwR
-+EbA+ZxeCwmQ/B/q1XmLP+K+JAUQ4BfUpdexSqA+XwzsOnJj6NY7mr65t+RDbs7G
-1Uvo6oc37Ai5pAZJfCN3uQKBgQAJr5qvaJkM8UBYXwjdPLjpTCnzjBHoLlifkdmM
-18U23QbmcwdESg/LAQF6MoGVTf//rJ/v2/ltTHBZZ2aDex7uKZxoImjHsWpXokhW
-cmz+zqmlFarWOzrGQl1hD2s0P1sQrVg3KXe8z1KrD/Fw0/Yitga7GlWWZrGmG6li
-lvu4YQKBgQANODQYEaz739IoPNnMfTpTqAoQIOR4PNdMfCXSQrCB8i0Hh4z48E4F
-DEAd1xIYyxI8pu7r52dQlBk7yrILOTG0gmgLJd5xKdtCTrasYAICI3hsRLtP8dVA
-8WeykXY4Wf1bYQ+VzKVImkwL/SBm2ik5woyxCzT8JSjyoAwRrQp9Vw==
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/cluster_cert.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/cluster_cert.pem
deleted file mode 100644
index a8623ab67ef..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/cluster_cert.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDXjCCAkagAwIBAgIBBDANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBxMRQwEgYDVQQD
-EwtjbHVzdGVydGVzdDEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RC
-MRYwFAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkG
-A1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCX42ZTwADG
-sEkS7ijfADlDQaJpbdgrnQKa5ssMQK3oRGSqXfTp0ThsJiVBbYZ8ZZRpPMgJdowa
-pFCGHQJh6VOdKelR0f/uNVpBGVz1yD4E4AtkA6UYcIJq6ywcj+W7Pli1Ed8VUN3Q
-tBU+HvHiEdMj74kLJb4ID1cP3gehvRv/0szkN8/ODFKCgYb1619BdFb9gRn8eily
-Wcg1m1gXz2xSfqRZkFEcEYet3BeOEGZBhaufJFzinvQjocH+kWFKlZf0+2DEFFbH
-NRqmabMmqMBUke629EUn8a7PBWBYNLld9afoNHwNY68wpONf5IqR2mNar5bVz8/d
-4g7BuVNvEFdJAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAA3U2O+cE/ZS8SDBw/sr
-BVFf0uaoME7+XX2jdTi4RUpWPfQ6uTkhKnXKzTzGrQtKwA96slGp4c3mxGBaAbC5
-IuTS97mLCju9NFvJVtazIajO4eNlG6dJSk0pQzjc0RAeLYksX/9NRNKZ+lQ5QVS2
-NVLce70QZBIvujjVJZ5hqDdjPV0JGOOUzNGyyUhzgY7s9MQagNnBSu5HO4CK1onc
-goOkizulq/5WF+JtqW8VKKx+/CH6SnTkS4b3qbjgKRmHZcOshH/d4KqhoLya7sfH
-pedmm7WgO9p8umXXqNj+04ehuPKTnD8tLMhj+GbJ9eIChPCBf1XnIzOXYep+fq9j
-n/g=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEogIBAAKCAQEAl+NmU8AAxrBJEu4o3wA5Q0GiaW3YK50CmubLDECt6ERkql30
-6dE4bCYlQW2GfGWUaTzICXaMGqRQhh0CYelTnSnpUdH/7jVaQRlc9cg+BOALZAOl
-GHCCaussHI/luz5YtRHfFVDd0LQVPh7x4hHTI++JCyW+CA9XD94Hob0b/9LM5DfP
-zgxSgoGG9etfQXRW/YEZ/HopclnINZtYF89sUn6kWZBRHBGHrdwXjhBmQYWrnyRc
-4p70I6HB/pFhSpWX9PtgxBRWxzUapmmzJqjAVJHutvRFJ/GuzwVgWDS5XfWn6DR8
-DWOvMKTjX+SKkdpjWq+W1c/P3eIOwblTbxBXSQIDAQABAoIBAHhjNFMDZ1oUlgbc
-ICcI/VoyprFb8DA5ZbwzXBMdHPpxYzyp9hpxy3/lCDiAwEzPEsAK/h6KCOiA/pYe
-XioPD0gN1TIV+f3r6dqZaNYi3g1tK3odbXkejDCEsFT/NT6hXxw9yw0RKI9ofUHc
-synVqP3duUjNpH6s8fvQp0nqI0wzoNm1kklpTWVjZmbtSZF9m/xfv7NGwQEYUL2V
-f5YvX6aHPVDtUXAqyPBgv6SGuogSSjwRTsNTef3aY6Se5MlP3YIfRqdad8+ORkKu
-WSrO+GjQccV4sztD8Sn3LR7qe6Lmid4yopHSS4EFq0Sc8LznTeflWcRAsBLezRp5
-xZB/blECgYEA8yrEzFA247AOXbhL1CdqMyPs523oy5+dmByyovjYjEhjUCRlAa9D
-ApvID4TfAkA4n0rUdICCtwbZlFrBZbn6rXNvJ362ufZjvaFIucQm90YkG1J6Ldek
-8ohJfLyyLLWzVHJIS7WxFqqsGmDhYUTErFbJZjI8tNSglrc81jUWT7UCgYEAn+dw
-ICyc09f6+xm3nFZIOq2Gtpw8lrOJlwZugn1AqY2D5Ko2gq1Fx2oZWpVaBivjH3gU
-ONlnPuealE0RJHvCm/+axy7Rcj65IwTrN5V+j6rg1tuEdi70PvNKmN6XQqRvEjOX
-HOh3gQYP6EFAoVINZZqUkwJzqpv4tnOSpEHXncUCgYB3+Z8Vq3IZjtDXvslzCGtm
-hhAp81mLtdocpfQhYqP9Ou39KafIV/+49sGTnpwlUShet53xSUK1KSULBGgtV8Bt
-+ela1DM1t3Joqn3mYfhTwoCoFl5/5cjVfRa8+6DxXEj5nlU7PY79PwIhFbG9ux9K
-ZJuD17+J/Oqq0gerLJAwjQKBgAS4AbkRV/dwcjmiwqZcbXk90bHl3mvcFH1edTho
-ldXrFS9UTpOApYSC/wiLS8LO3L76/i3HTKKwlwE1XQIknNOZsWmbWhby/uenp4FW
-agu3UTdF9xy9uft5loP4XaJb0+NHnnf97DjkgueptUyNbVPIQgYsllk8jRRlSLiM
-MN65AoGAUPLlh8ok/iNirO5YKqc5/3FKA1o1V1KSTHYVUK+Y+vuVJxQZeO3LMybe
-7AJ1cLHEWc8V4B27e6g33rfGGAW+/+RJ7/uHxuYCuKhstbq/x+rf9i4nl93emlMV
-PC3yuZsCmpk9Uypzi2+PT10yVgXkXRYtLpuUpoABWRzVXGnEsXo=
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/command_line/test_parsed_options.js b/src/mongo/gotools/test/qa-tests/jstests/libs/command_line/test_parsed_options.js
deleted file mode 100644
index 55f93a01d1c..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/command_line/test_parsed_options.js
+++ /dev/null
@@ -1,213 +0,0 @@
-// Merge the two options objects. Used as a helper when we are trying to actually compare options
-// despite the fact that our test framework adds extra stuff to it. Anything set in the second
-// options object overrides the first options object. The two objects must have the same structure.
-function mergeOptions(obj1, obj2) {
- var obj3 = {};
- var attrname;
- for (attrname in obj1) {
- if (typeof obj1[attrname] === "object" &&
- typeof obj2[attrname] !== "undefined") {
- if (typeof obj2[attrname] !== "object") {
- throw Error("Objects being merged must have the same structure");
- }
- obj3[attrname] = mergeOptions(obj1[attrname], obj2[attrname]);
- } else {
- obj3[attrname] = obj1[attrname];
- }
- }
- for (attrname in obj2) {
- if (typeof obj2[attrname] === "object" &&
- typeof obj1[attrname] !== "undefined") {
- if (typeof obj1[attrname] !== "object") {
- throw Error("Objects being merged must have the same structure");
- }
- // Already handled above
- } else {
- obj3[attrname] = obj2[attrname];
- }
- }
- return obj3;
-}
-
-// Test that the parsed result of setting certain command line options has the correct format in
-// mongod. See SERVER-13379.
-//
-// Arguments:
-// mongoRunnerConfig - Configuration object to pass to the mongo runner
-// expectedResult - Object formatted the same way as the result of running the "getCmdLineOpts"
-// command, but with only the fields that should be set by the options implied by the first
-// argument set.
-//
-// Example:
-//
-// testGetCmdLineOptsMongod({ port : 10000 }, { "parsed" : { "net" : { "port" : 10000 } } });
-//
-var getCmdLineOptsBaseMongod;
-function testGetCmdLineOptsMongod(mongoRunnerConfig, expectedResult) {
-
- // Get the options object returned by "getCmdLineOpts" when we spawn a mongod using our test
- // framework without passing any additional options. We need this because the framework adds
- // options of its own, and we only want to compare against the options we care about.
- function getBaseOptsObject() {
-
- // Start mongod with no options
- var baseMongod = MongoRunner.runMongod();
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsBaseMongod = baseMongod.adminCommand("getCmdLineOpts");
-
- // Stop the mongod we used to get the options
- MongoRunner.stopMongod(baseMongod.port);
-
- return getCmdLineOptsBaseMongod;
- }
-
- if (typeof getCmdLineOptsBaseMongod === "undefined") {
- getCmdLineOptsBaseMongod = getBaseOptsObject();
- }
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsExpected = getCmdLineOptsBaseMongod;
-
- // Delete port and dbPath if we are not explicitly setting them, since they will change on
- // multiple runs of the test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsExpected.parsed.net.port;
- }
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.storage === "undefined" ||
- typeof expectedResult.parsed.storage.dbPath === "undefined") {
- delete getCmdLineOptsExpected.parsed.storage.dbPath;
- }
-
- // Merge with the result that we expect
- expectedResult = mergeOptions(getCmdLineOptsExpected, expectedResult);
-
- // Start mongod with options
- var mongod = MongoRunner.runMongod(mongoRunnerConfig);
-
- // Create and authenticate high-privilege user in case mongod is running with authorization.
- // Try/catch is necessary in case this is being run on an uninitiated replset, by a test
- // such as repl_options.js for example.
- var ex;
- try {
- mongod.getDB("admin").createUser({user: "root", pwd: "pass", roles: ["root"]});
- mongod.getDB("admin").auth("root", "pass");
- } catch (err) {
- ex = err;
- }
-
- // Get the parsed options
- var getCmdLineOptsResult = mongod.adminCommand("getCmdLineOpts");
-
- // Delete port and dbPath if we are not explicitly setting them, since they will change on
- // multiple runs of the test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsResult.parsed.net.port;
- }
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.storage === "undefined" ||
- typeof expectedResult.parsed.storage.dbPath === "undefined") {
- delete getCmdLineOptsResult.parsed.storage.dbPath;
- }
-
- // Make sure the options are equal to what we expect
- assert.docEq(getCmdLineOptsResult.parsed, expectedResult.parsed);
-
- // Cleanup
- mongod.getDB("admin").logout();
- MongoRunner.stopMongod(mongod.port);
-}
-
-// Test that the parsed result of setting certain command line options has the correct format in
-// mongos. See SERVER-13379.
-//
-// Arguments:
-// mongoRunnerConfig - Configuration object to pass to the mongo runner
-// expectedResult - Object formatted the same way as the result of running the "getCmdLineOpts"
-// command, but with only the fields that should be set by the options implied by the first
-// argument set.
-//
-// Example:
-//
-// testGetCmdLineOptsMongos({ port : 10000 }, { "parsed" : { "net" : { "port" : 10000 } } });
-//
-var getCmdLineOptsBaseMongos;
-function testGetCmdLineOptsMongos(mongoRunnerConfig, expectedResult) {
-
- // Get the options object returned by "getCmdLineOpts" when we spawn a mongos using our test
- // framework without passing any additional options. We need this because the framework adds
- // options of its own, and we only want to compare against the options we care about.
- function getBaseOptsObject() {
-
- // Start mongod with no options
- var baseMongod = MongoRunner.runMongod();
-
- // Start mongos with only the configdb option
- var baseMongos = MongoRunner.runMongos({configdb: baseMongod.host});
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsBaseMongos = baseMongos.adminCommand("getCmdLineOpts");
-
- // Remove the configdb option
- delete getCmdLineOptsBaseMongos.parsed.sharding.configDB;
-
- // Stop the mongod and mongos we used to get the options
- MongoRunner.stopMongos(baseMongos.port);
- MongoRunner.stopMongod(baseMongod.port);
-
- return getCmdLineOptsBaseMongos;
- }
-
- if (typeof getCmdLineOptsBaseMongos === "undefined") {
- getCmdLineOptsBaseMongos = getBaseOptsObject();
- }
-
- // Get base command line opts. Needed because the framework adds its own options
- var getCmdLineOptsExpected = getCmdLineOptsBaseMongos;
-
- // Delete port if we are not explicitly setting it, since it will change on multiple runs of the
- // test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsExpected.parsed.net.port;
- }
-
- // Merge with the result that we expect
- expectedResult = mergeOptions(getCmdLineOptsExpected, expectedResult);
-
- // Start mongod with no options
- var mongod = MongoRunner.runMongod();
-
- // Add configdb option
- mongoRunnerConfig['configdb'] = mongod.host;
-
- // Start mongos connected to mongod
- var mongos = MongoRunner.runMongos(mongoRunnerConfig);
-
- // Get the parsed options
- var getCmdLineOptsResult = mongos.adminCommand("getCmdLineOpts");
-
- // Delete port if we are not explicitly setting it, since it will change on multiple runs of the
- // test framework and cause false failures.
- if (typeof expectedResult.parsed === "undefined" ||
- typeof expectedResult.parsed.net === "undefined" ||
- typeof expectedResult.parsed.net.port === "undefined") {
- delete getCmdLineOptsResult.parsed.net.port;
- }
-
- // Remove the configdb option
- delete getCmdLineOptsResult.parsed.sharding.configDB;
-
- // Make sure the options are equal to what we expect
- assert.docEq(getCmdLineOptsResult.parsed, expectedResult.parsed);
-
- // Cleanup
- MongoRunner.stopMongos(mongos.port);
- MongoRunner.stopMongod(mongod.port);
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_auth.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_auth.ini
deleted file mode 100644
index c1193be1b03..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_auth.ini
+++ /dev/null
@@ -1 +0,0 @@
-auth=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_dur.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_dur.ini
deleted file mode 100644
index 8f83f3ae5a7..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_dur.ini
+++ /dev/null
@@ -1 +0,0 @@
-dur=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_httpinterface.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_httpinterface.ini
deleted file mode 100644
index fc839a98a76..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_httpinterface.ini
+++ /dev/null
@@ -1 +0,0 @@
-httpinterface=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_ipv6.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_ipv6.ini
deleted file mode 100644
index a091421022d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_ipv6.ini
+++ /dev/null
@@ -1 +0,0 @@
-ipv6=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_journal.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_journal.ini
deleted file mode 100644
index d0010a86906..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_journal.ini
+++ /dev/null
@@ -1 +0,0 @@
-journal=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_jsonp.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_jsonp.ini
deleted file mode 100644
index 82847f50b2b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_jsonp.ini
+++ /dev/null
@@ -1 +0,0 @@
-jsonp=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_jsonp.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_jsonp.json
deleted file mode 100644
index 4d5477a8547..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_jsonp.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "net" : {
- "http" : {
- "JSONPEnabled" : false
- }
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_moveparanoia.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_moveparanoia.ini
deleted file mode 100644
index f21b50f9513..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_moveparanoia.ini
+++ /dev/null
@@ -1 +0,0 @@
-moveParanoia=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noauth.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noauth.ini
deleted file mode 100644
index a65f909baf3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noauth.ini
+++ /dev/null
@@ -1 +0,0 @@
-noauth=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noautosplit.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noautosplit.ini
deleted file mode 100644
index b490f9038dd..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noautosplit.ini
+++ /dev/null
@@ -1 +0,0 @@
-noAutoSplit=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nodur.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nodur.ini
deleted file mode 100644
index b0c73a48b30..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nodur.ini
+++ /dev/null
@@ -1 +0,0 @@
-nodur=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nohttpinterface.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nohttpinterface.ini
deleted file mode 100644
index 52c4958da6e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nohttpinterface.ini
+++ /dev/null
@@ -1 +0,0 @@
-nohttpinterface=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noindexbuildretry.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noindexbuildretry.ini
deleted file mode 100644
index 79e428c492f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noindexbuildretry.ini
+++ /dev/null
@@ -1 +0,0 @@
-noIndexBuildRetry=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nojournal.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nojournal.ini
deleted file mode 100644
index 17172363d25..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nojournal.ini
+++ /dev/null
@@ -1 +0,0 @@
-nojournal=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nomoveparanoia.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nomoveparanoia.ini
deleted file mode 100644
index 4696304134f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nomoveparanoia.ini
+++ /dev/null
@@ -1 +0,0 @@
-noMoveParanoia=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noobjcheck.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noobjcheck.ini
deleted file mode 100644
index 471e83c3172..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noobjcheck.ini
+++ /dev/null
@@ -1 +0,0 @@
-noobjcheck=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noprealloc.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noprealloc.ini
deleted file mode 100644
index 08c78be3507..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noprealloc.ini
+++ /dev/null
@@ -1 +0,0 @@
-noprealloc=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noscripting.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noscripting.ini
deleted file mode 100644
index 4cfaf3395f6..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_noscripting.ini
+++ /dev/null
@@ -1 +0,0 @@
-noscripting=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nounixsocket.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nounixsocket.ini
deleted file mode 100644
index 66da9f08391..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_nounixsocket.ini
+++ /dev/null
@@ -1 +0,0 @@
-nounixsocket=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_objcheck.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_objcheck.ini
deleted file mode 100644
index bd19d026bbf..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_objcheck.ini
+++ /dev/null
@@ -1 +0,0 @@
-objcheck=false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_rest_interface.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_rest_interface.json
deleted file mode 100644
index f9ad93a4f5d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/disable_rest_interface.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "net" : {
- "http" : {
- "RESTInterfaceEnabled" : false
- }
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_auth.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_auth.json
deleted file mode 100644
index 9f9cc84d107..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_auth.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "security" : {
- "authorization" : "enabled"
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_autosplit.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_autosplit.json
deleted file mode 100644
index a0d4f8af1be..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_autosplit.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "sharding" : {
- "autoSplit" : true
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_httpinterface.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_httpinterface.json
deleted file mode 100644
index c87dabe125d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_httpinterface.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "net" : {
- "http" : {
- "enabled" : true
- }
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_indexbuildretry.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_indexbuildretry.json
deleted file mode 100644
index 362db08edd3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_indexbuildretry.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "storage" : {
- "indexBuildRetry" : true
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_journal.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_journal.json
deleted file mode 100644
index d75b94ccbc7..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_journal.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "storage" : {
- "journal" : {
- "enabled" : false
- }
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_objcheck.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_objcheck.json
deleted file mode 100644
index b52be7382ed..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_objcheck.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "net" : {
- "wireObjectCheck" : true
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_paranoia.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_paranoia.json
deleted file mode 100644
index 218646b1662..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_paranoia.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "sharding" : {
- "archiveMovedChunks" : true
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_prealloc.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_prealloc.json
deleted file mode 100644
index 15ecefbb546..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_prealloc.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "storage" : {
- "preallocDataFiles" : true
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_scripting.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_scripting.json
deleted file mode 100644
index e8f32f2c23c..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_scripting.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "security" : {
- "javascriptEnabled" : true
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_unixsocket.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_unixsocket.json
deleted file mode 100644
index 660d21eb17f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/enable_unixsocket.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "net" : {
- "unixDomainSocket" : {
- "enabled" : true
- }
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_dur.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_dur.ini
deleted file mode 100644
index 43495fbd0bd..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_dur.ini
+++ /dev/null
@@ -1 +0,0 @@
-dur=
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_journal.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_journal.ini
deleted file mode 100644
index f750ac2e185..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_journal.ini
+++ /dev/null
@@ -1 +0,0 @@
-journal=
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_nodur.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_nodur.ini
deleted file mode 100644
index f1046df16a9..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_nodur.ini
+++ /dev/null
@@ -1 +0,0 @@
-nodur=
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_nojournal.ini b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_nojournal.ini
deleted file mode 100644
index 737e5c28029..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/implicitly_enable_nojournal.ini
+++ /dev/null
@@ -1 +0,0 @@
-nojournal=
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_component_verbosity.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_component_verbosity.json
deleted file mode 100644
index 69c200834a1..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_component_verbosity.json
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "systemLog" : {
- "verbosity" : 2,
- "component" : {
- "accessControl" : {
- "verbosity" : 0
- },
- "storage" : {
- "verbosity" : 3,
- "journaling" : {
- "verbosity" : 5
- }
- }
- }
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_profiling.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_profiling.json
deleted file mode 100644
index 944f0de1575..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_profiling.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "operationProfiling" : {
- "mode" : "all"
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_replsetname.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_replsetname.json
deleted file mode 100644
index 522ca2b766f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_replsetname.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "replication" : {
- "replSetName" : "myconfigname"
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_shardingrole.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_shardingrole.json
deleted file mode 100644
index 71f92f122db..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_shardingrole.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "sharding" : {
- "clusterRole" : "configsvr"
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_verbosity.json b/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_verbosity.json
deleted file mode 100644
index 47a1cce1b03..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/config_files/set_verbosity.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "systemLog" : {
- "verbosity" : 5
- }
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/crl.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/crl.pem
deleted file mode 100644
index 275c9e2d91c..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/crl.pem
+++ /dev/null
@@ -1,38 +0,0 @@
-Certificate Revocation List (CRL):
- Version 2 (0x1)
- Signature Algorithm: sha256WithRSAEncryption
- Issuer: /CN=Kernel Test CA/OU=Kernel/O=MongoDB/L=New York City/ST=New York/C=US
- Last Update: Aug 21 13:56:28 2014 GMT
- Next Update: Aug 18 13:56:28 2024 GMT
- CRL extensions:
- X509v3 CRL Number:
- 4096
-No Revoked Certificates.
- Signature Algorithm: sha256WithRSAEncryption
- 48:1b:0b:b1:89:f5:6f:af:3c:dd:2a:a0:e5:55:04:80:16:b4:
- 23:98:39:bb:9f:16:c9:25:73:72:c6:a6:73:21:1d:1a:b6:99:
- fc:47:5e:bc:af:64:29:02:9c:a5:db:15:8a:65:48:3c:4f:a6:
- cd:35:47:aa:c6:c0:39:f5:a6:88:8f:1b:6c:26:61:4e:10:d7:
- e2:b0:20:3a:64:92:c1:d3:2a:11:3e:03:e2:50:fd:4e:3c:de:
- e2:e5:78:dc:8e:07:a5:69:55:13:2b:8f:ae:21:00:42:85:ff:
- b6:b1:2b:69:08:40:5a:25:8c:fe:57:7f:b1:06:b0:72:ff:61:
- de:21:59:05:a8:1b:9e:c7:8a:08:ab:f5:bc:51:b3:36:68:0f:
- 54:65:3c:8d:b7:80:d0:27:01:3e:43:97:89:19:89:0e:c5:01:
- 2c:55:9f:b6:e4:c8:0b:35:f8:52:45:d3:b4:09:ce:df:73:98:
- f5:4c:e4:5a:06:ac:63:4c:f8:4d:9c:af:88:fc:19:f7:77:ea:
- ee:56:18:49:16:ce:62:66:d1:1b:8d:66:33:b5:dc:b1:25:b3:
- 6c:81:e9:d0:8a:1d:83:61:49:0e:d9:94:6a:46:80:41:d6:b6:
- 59:a9:30:55:3d:5b:d3:5b:f1:37:ec:2b:76:d0:3a:ac:b2:c8:
- 7c:77:04:78
------BEGIN X509 CRL-----
-MIIBzjCBtwIBATANBgkqhkiG9w0BAQsFADB0MRcwFQYDVQQDEw5LZXJuZWwgVGVz
-dCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQH
-Ew1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMX
-DTE0MDgyMTEzNTYyOFoXDTI0MDgxODEzNTYyOFqgDzANMAsGA1UdFAQEAgIQADAN
-BgkqhkiG9w0BAQsFAAOCAQEASBsLsYn1b6883Sqg5VUEgBa0I5g5u58WySVzcsam
-cyEdGraZ/EdevK9kKQKcpdsVimVIPE+mzTVHqsbAOfWmiI8bbCZhThDX4rAgOmSS
-wdMqET4D4lD9Tjze4uV43I4HpWlVEyuPriEAQoX/trEraQhAWiWM/ld/sQawcv9h
-3iFZBagbnseKCKv1vFGzNmgPVGU8jbeA0CcBPkOXiRmJDsUBLFWftuTICzX4UkXT
-tAnO33OY9UzkWgasY0z4TZyviPwZ93fq7lYYSRbOYmbRG41mM7XcsSWzbIHp0Iod
-g2FJDtmUakaAQda2WakwVT1b01vxN+wrdtA6rLLIfHcEeA==
------END X509 CRL-----
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/crl_client_revoked.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/crl_client_revoked.pem
deleted file mode 100644
index 0b99d56936e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/crl_client_revoked.pem
+++ /dev/null
@@ -1,41 +0,0 @@
-Certificate Revocation List (CRL):
- Version 2 (0x1)
- Signature Algorithm: sha256WithRSAEncryption
- Issuer: /CN=Kernel Test CA/OU=Kernel/O=MongoDB/L=New York City/ST=New York/C=US
- Last Update: Aug 21 13:43:27 2014 GMT
- Next Update: Aug 18 13:43:27 2024 GMT
- CRL extensions:
- X509v3 CRL Number:
- 4096
-Revoked Certificates:
- Serial Number: 02
- Revocation Date: Aug 21 13:43:27 2014 GMT
- Signature Algorithm: sha256WithRSAEncryption
- 24:86:73:8d:7f:55:15:d0:d6:8a:47:53:cf:97:f7:e5:3d:0b:
- 4a:ea:fb:02:6a:2e:79:c6:b1:38:b2:ac:f0:c0:64:47:b0:3e:
- ad:4e:2e:94:e6:64:ed:79:34:bd:74:c0:d4:3d:b9:a1:bb:38:
- 89:5c:02:6a:ad:6b:dc:3b:64:34:6a:2d:4c:90:36:82:95:0c:
- 19:88:e2:a3:bf:8e:1b:56:98:37:32:87:ed:f0:bd:dd:e2:0d:
- f9:80:dc:f2:a5:b4:ee:d9:bb:83:fe:b8:3a:13:e0:da:fc:04:
- 77:fb:ce:f9:c5:2a:54:a7:f0:34:09:2a:b2:3d:46:1b:48:e6:
- e8:16:c7:a1:3c:88:8c:72:cd:cc:53:dc:f8:54:63:1f:b9:8b:
- ea:2c:e5:26:c5:b4:a4:9f:8b:e1:6c:85:9b:c6:63:6f:2f:ae:
- 18:c5:6a:23:f0:58:27:85:5c:0f:01:04:da:d2:8b:de:9e:ab:
- 46:00:22:07:28:e1:ef:46:91:90:06:58:95:05:68:67:58:6e:
- 67:a8:0b:06:1a:73:d9:04:18:c9:a3:e4:e3:d6:94:a3:e1:5c:
- e5:08:1b:b3:9d:ab:3e:ea:20:b1:04:e5:90:e1:42:54:b2:58:
- bb:51:1a:48:87:60:b0:95:4a:2e:ce:a0:4f:8c:17:6d:6b:4c:
- 37:aa:4d:d7
------BEGIN X509 CRL-----
-MIIB5DCBzQIBATANBgkqhkiG9w0BAQsFADB0MRcwFQYDVQQDEw5LZXJuZWwgVGVz
-dCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQH
-Ew1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMX
-DTE0MDgyMTEzNDMyN1oXDTI0MDgxODEzNDMyN1owFDASAgECFw0xNDA4MjExMzQz
-MjdaoA8wDTALBgNVHRQEBAICEAAwDQYJKoZIhvcNAQELBQADggEBACSGc41/VRXQ
-1opHU8+X9+U9C0rq+wJqLnnGsTiyrPDAZEewPq1OLpTmZO15NL10wNQ9uaG7OIlc
-Amqta9w7ZDRqLUyQNoKVDBmI4qO/jhtWmDcyh+3wvd3iDfmA3PKltO7Zu4P+uDoT
-4Nr8BHf7zvnFKlSn8DQJKrI9RhtI5ugWx6E8iIxyzcxT3PhUYx+5i+os5SbFtKSf
-i+FshZvGY28vrhjFaiPwWCeFXA8BBNrSi96eq0YAIgco4e9GkZAGWJUFaGdYbmeo
-CwYac9kEGMmj5OPWlKPhXOUIG7Odqz7qILEE5ZDhQlSyWLtRGkiHYLCVSi7OoE+M
-F21rTDeqTdc=
------END X509 CRL-----
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/crl_expired.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/crl_expired.pem
deleted file mode 100644
index c9b3abb05a7..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/crl_expired.pem
+++ /dev/null
@@ -1,38 +0,0 @@
-Certificate Revocation List (CRL):
- Version 2 (0x1)
- Signature Algorithm: sha256WithRSAEncryption
- Issuer: /CN=Kernel Test CA/OU=Kernel/O=MongoDB/L=New York City/ST=New York/C=US
- Last Update: Jul 21 19:45:56 2014 GMT
- Next Update: Jul 21 20:45:56 2014 GMT
- CRL extensions:
- X509v3 CRL Number:
- 4096
-No Revoked Certificates.
- Signature Algorithm: sha256WithRSAEncryption
- 14:e8:6d:51:fc:0e:66:08:22:b2:4d:fb:da:7a:5f:4d:d1:a0:
- 80:f0:18:f3:c5:ca:c7:05:6c:70:59:fa:d5:96:68:fa:c7:1d:
- 7e:fb:53:3b:4a:8f:ed:bb:51:04:e8:fb:db:d7:b8:96:d9:e2:
- 8d:bb:54:cc:11:60:c8:20:ea:81:28:5f:e1:eb:d6:8c:94:bf:
- 42:e0:7f:a3:13:0c:76:05:f2:f0:34:98:a3:e8:64:74:4c:cb:
- bf:39:bb:fa:d5:2d:72:02:d1:fa:56:15:59:12:b7:ff:a3:cc:
- c9:d6:14:ca:4a:1e:0b:b4:47:cf:58:b0:e5:24:d2:21:71:0d:
- 2d:09:77:5c:2f:ef:40:f8:74:90:03:cc:37:2e:ea:6a:25:59:
- c0:bf:48:90:00:55:9c:db:bf:1f:f0:7b:b6:5a:90:94:b6:8d:
- 7c:7d:bb:2d:11:5f:0c:f5:4a:9b:c5:ed:ab:e3:fd:35:c8:76:
- 3b:2e:41:cb:df:76:b5:f4:e9:05:72:f6:56:7a:fc:34:07:d6:
- a2:55:eb:7c:58:33:5b:9d:3e:b2:03:89:01:c6:d1:54:75:1a:
- 5c:73:3f:5e:2e:fd:3b:38:ed:d4:e1:fa:ec:ff:84:f0:55:ee:
- 83:e0:f0:13:97:e7:f0:55:8c:00:a3:1a:31:e4:31:9e:68:d0:
- 6d:3e:81:b0
------BEGIN X509 CRL-----
-MIIBzjCBtwIBATANBgkqhkiG9w0BAQsFADB0MRcwFQYDVQQDEw5LZXJuZWwgVGVz
-dCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQH
-Ew1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMX
-DTE0MDcyMTE5NDU1NloXDTE0MDcyMTIwNDU1NlqgDzANMAsGA1UdFAQEAgIQADAN
-BgkqhkiG9w0BAQsFAAOCAQEAFOhtUfwOZggisk372npfTdGggPAY88XKxwVscFn6
-1ZZo+scdfvtTO0qP7btRBOj729e4ltnijbtUzBFgyCDqgShf4evWjJS/QuB/oxMM
-dgXy8DSYo+hkdEzLvzm7+tUtcgLR+lYVWRK3/6PMydYUykoeC7RHz1iw5STSIXEN
-LQl3XC/vQPh0kAPMNy7qaiVZwL9IkABVnNu/H/B7tlqQlLaNfH27LRFfDPVKm8Xt
-q+P9Nch2Oy5By992tfTpBXL2Vnr8NAfWolXrfFgzW50+sgOJAcbRVHUaXHM/Xi79
-Ozjt1OH67P+E8FXug+DwE5fn8FWMAKMaMeQxnmjQbT6BsA==
------END X509 CRL-----
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_bad_first.journal b/src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_bad_first.journal
deleted file mode 100644
index 687317844a7..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_bad_first.journal
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_bad_last.journal b/src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_bad_last.journal
deleted file mode 100644
index 7dd98e2c97b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_bad_last.journal
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_good.journal b/src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_good.journal
deleted file mode 100644
index d76790d2451..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/dur_checksum_good.journal
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/expired.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/expired.pem
deleted file mode 100644
index e1d2ceb8de8..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/expired.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDfzCCAmegAwIBAgIBEDANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzEwMTYwMDAwWhcNMTQwNzE2MTYwMDAwWjBtMRAwDgYDVQQD
-EwdleHBpcmVkMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoTB01vbmdvREIxFjAU
-BgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQG
-EwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPFSQZlHvJpi3dmA
-1X5U1qaUN/O/EQy5IZ5Rw+cfFHWOZ84EsLZxehWyqDZRH49Rg06xSYdO2WZOopP8
-OnUVCLGL819K83ikZ5sCbvB/gKCSCenwveEN992gJfs70HaZfiJNC7/cFigSb5Jg
-5G77E1/Uml4hIThfYG2NbCsTuP/P4JLwuzCkfgEUWRbCioMPEpIpxQw2LCx5DCy6
-Llhct0Hp14N9dZ4nA1h1621wOckgGJHw9DXdt9rGzulY1UgOOPczyqT08CdpaVxK
-VzrJCcUxfUjhO4ukHz+LBFQY+ZEm+tVboDbinbiHxY24urP46/u+BwRvBvjOovJi
-NVUh5GsCAwEAAaMjMCEwHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEw
-DQYJKoZIhvcNAQEFBQADggEBAG3rRSFCSG3hilGK9SMtTpFnrquJNlL+yG0TP8VG
-1qVt1JGaDJ8YUc5HXXtKBeLnRYii7EUx1wZIKn78RHRdITo5OJvlmcwwh0bt+/eK
-u9XFgR3z35w5UPr/YktgoX39SOzAZUoorgNw500pfxfneqCZtcRufVvjtk8TUdlN
-lcd2HfIxtUHWJeTcVM18g0JdHMYdMBXDKuXOW9VWLIBC2G6nAL/8SZJtUaDllPb4
-NisuIGjfjGgNxMpEXn+sQjFTupAoJru21OtAgERWFJhKQ0hbO0kucEPKEfxHDBVG
-dKSRIl6b0XSDLfxEXPv5ZhdrK4KEw1dYYXySvIVXtn0Ys38=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEA8VJBmUe8mmLd2YDVflTWppQ3878RDLkhnlHD5x8UdY5nzgSw
-tnF6FbKoNlEfj1GDTrFJh07ZZk6ik/w6dRUIsYvzX0rzeKRnmwJu8H+AoJIJ6fC9
-4Q333aAl+zvQdpl+Ik0Lv9wWKBJvkmDkbvsTX9SaXiEhOF9gbY1sKxO4/8/gkvC7
-MKR+ARRZFsKKgw8SkinFDDYsLHkMLLouWFy3QenXg311nicDWHXrbXA5ySAYkfD0
-Nd232sbO6VjVSA449zPKpPTwJ2lpXEpXOskJxTF9SOE7i6QfP4sEVBj5kSb61Vug
-NuKduIfFjbi6s/jr+74HBG8G+M6i8mI1VSHkawIDAQABAoIBAGAO1QvVkU6HAjX8
-4X6a+KJwJ2F/8aJ14trpQyixp2wv1kQce9bzjpwqdGjCm+RplvHxAgq5KTJfJLnx
-UbefOsmpoqOQ6x9fmdoK+uwCZMoFt6qGaJ63960hfVzm71D2Qk4XCxFA4xTqWb0T
-knpWuNyRfSzw1Q9ib7jL7X2sKRyx9ZP+1a41ia/Ko6iYPUUnRb1Ewo10alYVWVIE
-upeIlWqv+1DGfda9f34pGVh3ldIDh1LHqaAZhdn6sKtcgIUGcWatZRmQiA5kSflP
-VBpOI2c2tkQv0j5cPGwD7GGaJ2aKayHG0EwnoNmxCeR0Ay3MO0vBAsxn7Wy6yqrS
-EfkYhFkCgYEA/OA2AHFIH7mE0nrMwegXrEy7BZUgLRCRFWTjxwnCKFQj2Uo2dtYD
-2QQKuQWeiP+LD2nHj4n1KXuSJiB1GtmEF3JkYV4Wd7mPWEVNDHa0G8ZndquPK40s
-YSjh9u0KesUegncBFfIiwzxsk9724iaXq3aXOexc0btQB2xltRzj6/0CgYEA9E2A
-QU6pnCOzGDyOV7+TFr0ha7TXaMOb5aIVz6tJ7r5Nb7oZP9T9UCdUnw2Tls5Ce5tI
-J23O7JqwT4CudnWnk5ZtVtGBYA23mUryrgf/Utfg08hU2uRyq9LOxVaVqfV/AipN
-62GmfuxkK4PatOcAOhKqmS/zGfZqIg7V6rtX2ocCgYEAlY1ogpR8ij6mvfBgPmGr
-9nues+uBDwXYOCXlzCYKTN2OIgkQ8vEZb3RDfy9CllVDgccWfd6iPnlVcvUJLOrt
-gwxlL2x8ryvwCc1ahv+A/1g0gmtuDdy9HW0XTnjcFMWViKUm4DrGsl5+/GkF67PV
-SVOmllwifOthpjJGaHmAlmUCgYB6EFMZzlzud+PfIzqX20952Avfzd6nKL03EjJF
-rbbmA82bGmfNPfVHXC9qvRTWD76mFeMKWFJAY9XeE1SYOZb+JfYBn/I9dP0cKZdx
-nutSkCx0hK7pI6Wr9kt7zBRBdDj+cva1ufe/iQtPtrTLGHRDj9oPaibT/Qvwcmst
-umdd9wKBgQDM7j6Rh7v8AeLy2bw73Qtk0ORaHqRBHSQw87srOLwtfQzE92zSGMj+
-FVt/BdPgzyaddegKvJ9AFCPAxbA8Glnmc89FO7pcXn9Wcy+ZoZIF6YwgUPhPCp/4
-r9bKuXuQiutFbKyes/5PTXqbJ/7xKRZIpQCvxg2syrW3hxx8LIx/kQ==
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/extended_assert.js b/src/mongo/gotools/test/qa-tests/jstests/libs/extended_assert.js
deleted file mode 100644
index 8680fb08da0..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/extended_assert.js
+++ /dev/null
@@ -1,61 +0,0 @@
-// Exports 'extendedAssert' which includes all built in assertions and:
-// - New extendedAssert.strContains(needle, haystack, msg)
-// - a .soon variant of eq, neq, contains, gt, lt, gte, lte, and strContains
-// e.g. .eq.soon(expected, getActualFunc, msg[, timeout, interval])
-// This produces more descriptive assertion error messages than the built
-// in assert.soon provides.
-
-var extendedAssert;
-(function() {
- if (typeof extendedAssert !== 'undefined') {
- return;
- }
-
- // Make a copy of the assert object
- extendedAssert = assert.bind(this);
- for (var key in assert) {
- if (assert.hasOwnProperty(key)) {
- extendedAssert[key] = assert[key];
- }
- }
-
- extendedAssert.strContains = function(needle, haystack, msg) {
- if (haystack.indexOf(needle) === -1) {
- doassert('"' + haystack + '" does not contain "' + needle + '" : ' + msg);
- }
- };
-
- var EX_ASSERT_DONT_PRINT = '**extended_assert.js--do not print this error message**';
- var builtin_doassert = doassert;
- var muteable_doassert = function(msg, obj) {
- if (msg.indexOf(EX_ASSERT_DONT_PRINT) !== -1) {
- throw Error(msg);
- }
- builtin_doassert(msg, obj);
- };
-
- ['eq', 'neq', 'contains', 'gt', 'lt', 'gte', 'lte', 'strContains']
- .forEach(function (name) {
- var assertFunc = extendedAssert[name];
- var newAssertFunc = assertFunc.bind(this);
- newAssertFunc.soon = function(expected, actualFunc, msg, timeout, interval) {
- try {
- doassert = muteable_doassert;
- extendedAssert.soon(function() {
- try {
- assertFunc(expected, actualFunc(), EX_ASSERT_DONT_PRINT);
- return true;
- } catch (e) {
- return false;
- }
- }, EX_ASSERT_DONT_PRINT, timeout, interval);
- doassert = builtin_doassert;
- } catch (e) {
- doassert = builtin_doassert;
- // Make it fail
- assertFunc(expected, actualFunc(), msg);
- }
- };
- extendedAssert[name] = newAssertFunc;
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/fts.js b/src/mongo/gotools/test/qa-tests/jstests/libs/fts.js
deleted file mode 100644
index 0da80d5d3ae..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/fts.js
+++ /dev/null
@@ -1,22 +0,0 @@
-function getIDS(commandResult) {
- if (!(commandResult && commandResult.results)) {
- return [];
- }
-
- return commandResult.results.map(function(z) {
- return z.obj._id;
- });
-}
-
-function queryIDS(coll, search, filter, extra) {
- var cmd = {search: search};
- if (filter) {
- cmd.filter = filter;
- }
- if (extra) {
- Object.extend(cmd, extra);
- }
- lastCommadResult = coll.runCommand("text", cmd);
-
- return getIDS(lastCommadResult);
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/geo_near_random.js b/src/mongo/gotools/test/qa-tests/jstests/libs/geo_near_random.js
deleted file mode 100644
index 7809aa77adc..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/geo_near_random.js
+++ /dev/null
@@ -1,100 +0,0 @@
-GeoNearRandomTest = function(name) {
- this.name = name;
- this.t = db[name];
- this.nPts = 0;
-
- // reset state
- this.t.drop();
- Random.srand(1234);
-
- print("starting test: " + name);
-};
-
-
-GeoNearRandomTest.prototype.mkPt = function mkPt(scale, indexBounds) {
- if (!indexBounds) {
- scale = scale || 1; // scale is good for staying away from edges
- return [((Random.rand() * 359.8) - 179.9) * scale, ((Random.rand() * 180) - 90) * scale];
- }
- var range = indexBounds.max - indexBounds.min;
- var eps = Math.pow(2, -40);
- // Go very close to the borders but not quite there.
- return [(Random.rand() * (range - eps) + eps) + indexBounds.min, (Random.rand() * (range - eps) + eps) + indexBounds.min];
-
-};
-
-GeoNearRandomTest.prototype.insertPts = function(nPts, indexBounds, scale) {
- assert.eq(this.nPts, 0, "insertPoints already called");
- this.nPts = nPts;
-
- var bulk = this.t.initializeUnorderedBulkOp();
- for (var i=0; i<nPts; i++) {
- bulk.insert({_id: i, loc: this.mkPt(scale, indexBounds)});
- }
- assert.writeOK(bulk.execute());
-
- if (!indexBounds) {
- this.t.ensureIndex({loc: '2d'});
- } else {
- this.t.ensureIndex({loc: '2d'}, indexBounds);
- }
-};
-
-GeoNearRandomTest.prototype.assertIsPrefix = function(short, long) {
- for (var i=0; i < short.length; i++) {
-
- var xS = short[i].obj ? short[i].obj.loc[0] : short[i].loc[0];
- var yS = short[i].obj ? short[i].obj.loc[1] : short[i].loc[1];
- var dS = short[i].obj ? short[i].dis : 1;
-
- var xL = long[i].obj ? long[i].obj.loc[0] : long[i].loc[0];
- var yL = long[i].obj ? long[i].obj.loc[1] : long[i].loc[1];
- var dL = long[i].obj ? long[i].dis : 1;
-
- assert.eq([xS, yS, dS], [xL, yL, dL]);
- }
-};
-
-GeoNearRandomTest.prototype.testPt = function(pt, opts) {
- assert.neq(this.nPts, 0, "insertPoints not yet called");
-
- opts = opts || {};
- opts['sphere'] = opts['sphere'] || 0;
- opts['nToTest'] = opts['nToTest'] || this.nPts; // be careful, test is O( N^2 )
-
- print("testing point: " + tojson(pt) + " opts: " + tojson(opts));
-
-
- var cmd = {geoNear: this.t.getName(), near: pt, num: 1, spherical: opts.sphere};
-
- var last = db.runCommand(cmd).results;
- for (var i=2; i <= opts.nToTest; i++) {
- // print(i); // uncomment to watch status
- cmd.num = i;
- var ret = db.runCommand(cmd).results;
-
- try {
- this.assertIsPrefix(last, ret);
- } catch (e) {
- print("*** failed while compairing " + (i-1) + " and " + i);
- printjson(cmd);
- throw e; // rethrow
- }
-
- last = ret;
- }
-
-
- if (!opts.sharded) {
- last = last.map(function(x) {
- return x.obj;
- });
-
- var query = {loc: {}};
- query.loc[opts.sphere ? '$nearSphere' : '$near'] = pt;
- var near = this.t.find(query).limit(opts.nToTest).toArray();
-
- this.assertIsPrefix(last, near);
- assert.eq(last, near);
- }
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/host_ipaddr.js b/src/mongo/gotools/test/qa-tests/jstests/libs/host_ipaddr.js
deleted file mode 100644
index f5412da1563..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/host_ipaddr.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// Returns non-localhost ipaddr of host running the mongo shell process
-function get_ipaddr() {
- // set temp path, if it exists
- var path = "";
- try {
- path = TestData.tmpPath;
- if (typeof path === "undefined") {
- path = "";
- } else if (path.slice(-1) !== "/") {
- // Terminate path with / if defined
- path += "/";
- }
- } catch (err) {
- // no testdata
- }
-
- var ipFile = path+"ipaddr.log";
- var windowsCmd = "ipconfig > "+ipFile;
- var unixCmd = "/sbin/ifconfig | grep inet | grep -v '127.0.0.1' > "+ipFile;
- var ipAddr = null;
- var hostType = null;
-
- try {
- hostType = getBuildInfo().sysInfo.split(' ')[0];
-
- // os-specific methods
- if (hostType === "windows") {
- runProgram('cmd.exe', '/c', windowsCmd);
- ipAddr = cat(ipFile).match(/IPv4.*: (.*)/)[1];
- } else {
- runProgram('bash', '-c', unixCmd);
- ipAddr = cat(ipFile).replace(/addr:/g, "").match(/inet (.[^ ]*) /)[1];
- }
- } finally {
- removeFile(ipFile);
- }
- return ipAddr;
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/key1 b/src/mongo/gotools/test/qa-tests/jstests/libs/key1
deleted file mode 100644
index b5c19e4092f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/key1
+++ /dev/null
@@ -1 +0,0 @@
-foop de doop
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/key2 b/src/mongo/gotools/test/qa-tests/jstests/libs/key2
deleted file mode 100644
index cbde8212841..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/key2
+++ /dev/null
@@ -1 +0,0 @@
-other key
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/localhostnameCN.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/localhostnameCN.pem
deleted file mode 100644
index e6aca6a217d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/localhostnameCN.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDgTCCAmmgAwIBAgIBBTANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBvMRIwEAYDVQQD
-EwkxMjcuMC4wLjExDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMHTW9uZ29EQjEW
-MBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlvcmsxCzAJBgNV
-BAYTAlVTMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAiqQNGgQggL8S
-LlxRgpM3qoktt3J9Pd3WXkknz7OjQr4dyj9wd0VNuD6+Ycg1mBbopO+M/K/ZWv8c
-7WDMM13DDZtpWjm+Q6uFc+vlI9Q9bLHgsZF9kVGRb2bNAfV2bC5/UnO1ulQdHoIB
-p3e/Jtko2WmruzVQFGVMBGCY7RlMRyxe3R8rwNNFvooMRg8yq8tq0qBkVhh85kct
-HHPggXD4/uM2Yc/Q94t5MhDFabewdzsFOLqvI/Y5eIeZLhdIzAv37kolr8AuyqIR
-qcJKztXIrFbLgEZBjoCNkOYZOQE+l8iwwiZxoZN9Jv7x5LyiA+ijtQ+5aI/kMPqG
-nox+/bNFCQIDAQABoyMwITAfBgNVHREEGDAWgglsb2NhbGhvc3SCCTEyNy4wLjAu
-MTANBgkqhkiG9w0BAQUFAAOCAQEAVJJNuUVzMRaft17NH6AzMSTiJxMFWoafmYgx
-jZnzA42XDPoPROuN7Bst6WVYDNpPb1AhPDco9qDylSZl0d341nHAuZNc84fD0omN
-Mbqieu8WseRQ300cbnS8p11c9aYpO/fNQ5iaYhGsRT7pnLs9MIgR468KVjY2xt49
-V0rshG6RxZj83KKuJd0T4X+5UeYz4B677y+SR0aoK2I2Sh+cffrMX2LotHc2I+JI
-Y9SDLvQT7chD9GzaWz634kmy3EEY0LreMm6AxhMOsr0lbZx5O8wLTScSjKARJ6OH
-nPxM1gYT07mkNmfyEnl1ChAN0MPgcLHQqEfe7x7ZQSbAv2gWfA==
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpQIBAAKCAQEAiqQNGgQggL8SLlxRgpM3qoktt3J9Pd3WXkknz7OjQr4dyj9w
-d0VNuD6+Ycg1mBbopO+M/K/ZWv8c7WDMM13DDZtpWjm+Q6uFc+vlI9Q9bLHgsZF9
-kVGRb2bNAfV2bC5/UnO1ulQdHoIBp3e/Jtko2WmruzVQFGVMBGCY7RlMRyxe3R8r
-wNNFvooMRg8yq8tq0qBkVhh85kctHHPggXD4/uM2Yc/Q94t5MhDFabewdzsFOLqv
-I/Y5eIeZLhdIzAv37kolr8AuyqIRqcJKztXIrFbLgEZBjoCNkOYZOQE+l8iwwiZx
-oZN9Jv7x5LyiA+ijtQ+5aI/kMPqGnox+/bNFCQIDAQABAoIBAQAMiUT+Az2FJsHY
-G1Trf7Ba5UiS+/FDNNn7cJX++/lZQaOj9BSRVFzwuguw/8+Izxl+QIL5HlWDGupc
-tJICWwoWIuVl2S7RI6NPlhcEJF7hgzwUElnOWBfUgPEsqitpINM2e2wFSzHO3maT
-5AoO0zgUYK+8n9d74KT9CFcLqWvyS3iksK/FXfCZt0T1EoJ4LsDjeCTfVKqrku2U
-+fCnZZYNkrgUI7Hku94EJfOh462V4KQAUGsvllwb1lfmR5NR86G6VX6oyMGctL5e
-1M6XQv+JQGEmAe6uULtCUGh32fzwJ9Un3j2GXOHT0LWrVc5iLuXwwzQvCGaMYtKm
-FAIDpPxhAoGBAMtwzpRyhf2op/REzZn+0aV5FWKjeq69Yxd62RaOf2EetcPwvUOs
-yQXcP0KZv15VWU/XhZUmTkPf52f0YHV/b1Sm6wUOiMNQ4XpnRj2THf0N7RS4idMm
-VwtMf1pxqttxQVKPpOvPEiTyIh2Nx/juyfD4CWkOVNTvOCd1w+av6ukNAoGBAK51
-gIXDuwJ2e5h3IJyewN/HOZqlgPKyMjnACaeXQ5wPJSrz4+UkJkuXT2dYKhv6u7K/
-GtucTdvBIJeq61+LjjkYk7OVDzoqP/uWU7p1y7gU9LZq+7tgq7r8cgeaC3IBQe7X
-jdFPEy1+zAEBh6MfFjnLZ2Kop9qbH3cNih/g9pTtAoGBAJ8dmdUtRXNByCsa7Rv2
-243qiDlf14J4CdrBcK1dwm75j/yye7VEnO2Cd8/lZHGpm3MBBC/FiA06QElkL1V2
-2GKDMun/liP9TH1p7NwYBqp3i+ha9SE6qXXi3PCmWpXLnOWwB7OPf4d6AgjPbYpb
-aYKY3PNYDC2G9IqYZyI0kSy5AoGBAJ5Fe5PfPom9c+OeL7fnTpO16kyiWZnUkDxU
-PG4OjQfHtbCCEv6PDS8G1sKq+Yjor+A5/+O8qeX0D92I8oB720txQI5rbKUYL3PP
-raY7t9YJLPlRlY8o5KN+4vSCjF+hRG+qnr6FPqDHp8xB1wvl6AQGxIR8/csVcDZR
-0j2ZmhsBAoGAO1Cpk/hWXOLAhSj8P8Q/+3439HEctTZheVBd8q/TtdwXocaZMLi8
-MXURuVTw0GtS9TmdqOFXzloFeaMhJx6TQzZ2aPcxu95b7RjEDtVHus3ed2cSJ2El
-AuRvFT2RCVvTu1mM0Ti7id+d8QBcpbIpPjNjK2Wxir/19gtEawlqlkA=
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/localhostnameSAN.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/localhostnameSAN.pem
deleted file mode 100644
index 480300f29e1..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/localhostnameSAN.pem
+++ /dev/null
@@ -1,49 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDpDCCAoygAwIBAgIBBjANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjB5MRwwGgYDVQQD
-ExNzYW50ZXN0aG9zdG5hbWUuY29tMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoT
-B01vbmdvREIxFjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZ
-b3JrMQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
-AJKOLTNEPv08IVmhfkv6Xq1dT6pki76ggpJ7UpwdUSsTsWDKO2o1c7wnzEjfhYQ+
-CtlEvbYyL3O7f8AaO15WJdi53SMuWS+QfCKs6b0symYbinSXlZGb4oZYFSrodSxH
-+G8u+TUxyeaXgTHowMWArmTRi2LgtIwXwwHJawfhFDxji3cSmLAr5YQMAaXUynq3
-g0DEAGMaeOlyn1PkJ2ZfJsX2di+sceKb+KK1xT+2vUSsvnIumBCYqMhU6y3WjBWK
-6WrmOcsldWo4IcgyzwVRlZiuuYoe6ZsxZ4nMyTdYebALPqgkt8QVXqkgcjWK8F18
-nuqWIAn1ISTjj73H4cnzYv0CAwEAAaM8MDowOAYDVR0RBDEwL4INKi5leGFtcGxl
-LmNvbYIJMTI3LjAuMC4xgghtb3JlZnVuIYIJbG9jYWxob3N0MA0GCSqGSIb3DQEB
-BQUAA4IBAQA5M3U4wvQYI3jz/+Eh4POrJAs9eSRGkUhz1lP7D6Fcyp+BbbXB1fa9
-5qpD4bp1ZoDP2R2zca2uwwfd3DTWPbmwFMNqs2D7d0hgX71Vg9DCAwExFjoeRo44
-cCE9kakZtE3kT/tiH6SpYpnBa3dizxTmiY48z212Pw813SSXSPMN1myx5sMJof5I
-whJNQhSQOw6WHw5swZJZT4FkzxjQMrTWdF6r0d5EU9K2WWk5DTwq4QaysplB5l0H
-8qm+fnC6xI+2qgqMO9xqc6qMtHHICXtdUOup6wj/bdeo7bAQdVDyKlFKiYivDXvO
-RJNp2cwsBgxU+qdrtOLp7/j/0R3tUqWb
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAko4tM0Q+/TwhWaF+S/perV1PqmSLvqCCkntSnB1RKxOxYMo7
-ajVzvCfMSN+FhD4K2US9tjIvc7t/wBo7XlYl2LndIy5ZL5B8IqzpvSzKZhuKdJeV
-kZvihlgVKuh1LEf4by75NTHJ5peBMejAxYCuZNGLYuC0jBfDAclrB+EUPGOLdxKY
-sCvlhAwBpdTKereDQMQAYxp46XKfU+QnZl8mxfZ2L6xx4pv4orXFP7a9RKy+ci6Y
-EJioyFTrLdaMFYrpauY5yyV1ajghyDLPBVGVmK65ih7pmzFniczJN1h5sAs+qCS3
-xBVeqSByNYrwXXye6pYgCfUhJOOPvcfhyfNi/QIDAQABAoIBADqGMkClwS2pJHxB
-hEjc+4/pklWt/ywpttq+CpgzEOXN4GiRebaJD+WUUvzK3euYRwbKb6PhWJezyWky
-UID0j/qDBm71JEJdRWUnfdPAnja2Ss0Sd3UFNimF5TYUTC5ZszjbHkOC1WiTGdGP
-a+Oy5nF2SF4883x6RLJi963W0Rjn3jIW9LoLeTgm9bjWXg3iqonCo3AjREdkR/SG
-BZaCvulGEWl/A3a7NmW5EGGNUMvzZOxrqQz4EX+VnYdb7SPrH3pmQJyJpAqUlvD5
-y7pO01fI0wg9kOWiIR0vd3Gbm9NaFmlH9Gr2oyan3CWt1h1gPzkH/V17rZzVYb5L
-RnjLdyECgYEA6X16A5Gpb5rOVR/SK/JZGd+3z52+hRR8je4WhXkZqRZmbn2deKha
-LKZi1eVl11t8zitLg/OSN1uZ/873iESKtp/R6vcGcriUCd87cDh7KTyW/7ZW5jdj
-o6Y3Liai3Xrf6dL+V2xYw964Map9oK9qatYw/L+Ke6b9wbGi+hduf1kCgYEAoK8n
-pzctajS3Ntmk147n4ZVtcv78nWItBNH2B8UaofdkBlSRyUURsEY9nA34zLNWI0f3
-k59+cR13iofkQ0rKqJw1HbTTncrSsFqptyEDt23iWSmmaU3/9Us8lcNGqRm7a35V
-Km0XBFLnE0mGFGFoTpNt8oiR4WGASJPi482xkEUCgYEAwPmQn2SDCheDEr2zAdlR
-pN3O2EwCi5DMBK3TdUsKV0KJNCajwHY72Q1HQItQ6XXWp7sGta7YmOIfXFodIUWs
-85URdMXnUWeWCrayNGSp/gHytrNoDOuYcUfN8VnDX5PPfjyBM5X7ox7vUzUakXSJ
-WnVelXZlKR9yOOTs0xAMpjkCgYAbF61N6mXD5IOHwgajObsrM/CyVP/u4WDJ0UT0
-Zm1pJbc9wgCauQSUfiNhLpHmoc5CQJ4jy96b3+YJ+4OnPPMSntPt4FFV557CkWbQ
-M8bWpLZnZjhixP4FM9xRPA2r8WTCaRifAKnC1t+TRvBOe2YE6aK+I/zEzZW9pwG4
-ezQXKQKBgQAIBSJLa6xWbfbzqyPsvmRNgiEjIamF7wcb1sRjgqWM6sCzYwYv8f5v
-9C4YhNXEn+c5V2KevgYeg6iPSQuzEAfJx64QV7JD8kEBf5GNETnuW45Yg7KwKPD6
-ZCealfpy/o9iiNqbWqDNND91pj2/g5oZnac3misJg5tGCJbJsBFXag==
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/mockkrb5.conf b/src/mongo/gotools/test/qa-tests/jstests/libs/mockkrb5.conf
deleted file mode 100644
index 0f004f2de8a..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/mockkrb5.conf
+++ /dev/null
@@ -1,13 +0,0 @@
-[libdefaults]
- default_realm = 10GEN.ME
-
-[realms]
- 10GEN.ME = {
- kdc = kdc.10gen.me
- admin_server = kdc.10gen.me
- default_domain = 10gen.me
- }
-
-[domain_realm]
- .10gen.me = 10GEN.ME
- 10gen.me = 10GEN.ME
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/mockservice.keytab b/src/mongo/gotools/test/qa-tests/jstests/libs/mockservice.keytab
deleted file mode 100644
index 3529d5fcbc6..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/mockservice.keytab
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/mockuser.keytab b/src/mongo/gotools/test/qa-tests/jstests/libs/mockuser.keytab
deleted file mode 100644
index 35fd2ff06e7..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/mockuser.keytab
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/mongostat.js b/src/mongo/gotools/test/qa-tests/jstests/libs/mongostat.js
deleted file mode 100644
index d93ff4b8bee..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/mongostat.js
+++ /dev/null
@@ -1,114 +0,0 @@
-var exitCodeSuccess = 0;
-var exitCodeErr = 1;
-// Go reserves exit code 2 for its own use.
-var exitCodeBadOptions = 3;
-var exitCodeStopped = 4;
-
-// NOTE: On Windows, stopMongoProgramByPid doesn't terminiate a process in a
-// way that it can control its exit code.
-if (_isWindows()) {
- exitCodeStopped = exitCodeErr;
-}
-
-var rowRegex = /^sh\d+\|\s/;
-// portRegex finds the port on a line which has enough whitespace-delimited
-// values to be considered a stat line and not an error message
-var portRegex = /^sh\d+\|\s+\S+:(\d+)(\s+\S+){16}/;
-
-function statRows() {
- return rawMongoProgramOutput()
- .split("\n")
- .filter(function(r) {
- return r.match(rowRegex);
- })
- .map(function(r) {
- return r.replace(/^sh\d+\| /, "");
- });
-}
-
-function statFields(row) {
- return row.split(/\s/).filter(function(s) {
- return s !== "";
- });
-}
-
-function getLatestChunk() {
- var output = rawMongoProgramOutput();
- // mongostat outputs a blank line between each set of stats when there are
- // multiple hosts; we want just one chunk of stat lines
- var lineChunks = output.split("| \n");
- if (lineChunks.length === 1) {
- return lineChunks[0];
- }
- return lineChunks[lineChunks.length - 2];
-}
-
-function latestPortCounts() {
- var portCounts = {};
- getLatestChunk().split("\n").forEach(function(r) {
- var matches = r.match(portRegex);
- if (matches === null) {
- return;
- }
- var port = matches[1];
- if (!portCounts[port]) {
- portCounts[port] = 0;
- }
- portCounts[port]++;
- });
- return portCounts;
-}
-
-function hasPort(port) {
- port = String(port);
- return function() {
- return latestPortCounts()[port] >= 1;
- };
-}
-
-function lacksPort(port) {
- port = String(port);
- return function() {
- return latestPortCounts()[port] === undefined;
- };
-}
-
-function hasOnlyPorts(expectedPorts) {
- expectedPorts = expectedPorts.map(String);
- return function() {
- var portCounts = latestPortCounts();
- for (var port in portCounts) {
- if (expectedPorts.indexOf(port) === -1) {
- return false;
- }
- }
- for (var i in expectedPorts) {
- if (portCounts[expectedPorts[i]] !== 1) {
- return false;
- }
- }
- return true;
- };
-}
-
-function statCheck(args, checker) {
- clearRawMongoProgramOutput();
- pid = startMongoProgramNoConnect.apply(null, args);
- try {
- assert.soon(checker, "discoverTest wait timed out");
- return true;
- } catch (e) {
- return false;
- } finally {
- stopMongoProgramByPid(pid);
- }
-}
-
-function discoverTest(ports, connectHost) {
- return statCheck(["mongostat",
- "--host", connectHost,
- "--noheaders",
- "--discover"],
- hasOnlyPorts(ports));
-}
-
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/not_yet_valid.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/not_yet_valid.pem
deleted file mode 100644
index 7c021c0becd..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/not_yet_valid.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDhTCCAm2gAwIBAgIBETANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMjAwNzE3MTYwMDAwWhcNMjUwNzE3MTYwMDAwWjBzMRYwFAYDVQQD
-Ew1ub3RfeWV0X3ZhbGlkMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNVBAoTB01vbmdv
-REIxFjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5ldyBZb3JrMQsw
-CQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM2gF+Fo
-CeBKVlPyDAaEA7cjK75CxnzQy+oqw1j/vcfe/CfKL9MvDDXauR/9v1RRlww5zlxQ
-XJJtcMJtxN1EpP21cHrHCpJ/fRsCdMfJdD9MO6gcnclEI0Odwy5YI/57rAgxEuDC
-7z4d+M6z7PLq8DIwvRuhAZVTszeyTsCCkwfTJ/pisD2Ace75pS37t/ttQp+kQ+Vl
-QrfccHYxrScQ9i0JqBfrTULDl6ST76aINOaFKWqrLLkRUvE6pEkL/iP6xXUSKOsm
-uyc0yb0PK5Y/IVdrzwWUkabWEM27RAMH+CAx2iobk6REj0fsGySBzT2CaETZPjck
-vn/LYKqr+CvYjc8CAwEAAaMjMCEwHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcu
-MC4wLjEwDQYJKoZIhvcNAQEFBQADggEBADw37jpmhj/fgCZdF1NrDKLmWxb4hovQ
-Y9PRe6GsBOc1wH8Gbe4UkYAE41WUuT3xW9YpfCHLXxC7da6dhaBISWryX7n72abM
-xbfAghV3je5JAmC0E/OzQz8tTgENxJN/c4oqCQ9nVOOLjwWiim5kF0/NY8HCc/Sg
-OG9IdseRX72CavDaPxcqR9/5KKY/pxARMeyy3/D0FIB1Fwu5h9vjHEi5fGOqcizf
-S1KHfzAmTxVtjw6HWRGKmkPX0W0/lURWVkKRxvC8KkJIeKx3fl9U1PqCw0AVi5d/
-whYn4qHNFFp4OiVzXq3b5YoBy0dlHUePCIPT2GkGlV4NQKosZMJUkKo=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEowIBAAKCAQEAzaAX4WgJ4EpWU/IMBoQDtyMrvkLGfNDL6irDWP+9x978J8ov
-0y8MNdq5H/2/VFGXDDnOXFBckm1wwm3E3USk/bVwescKkn99GwJ0x8l0P0w7qByd
-yUQjQ53DLlgj/nusCDES4MLvPh34zrPs8urwMjC9G6EBlVOzN7JOwIKTB9Mn+mKw
-PYBx7vmlLfu3+21Cn6RD5WVCt9xwdjGtJxD2LQmoF+tNQsOXpJPvpog05oUpaqss
-uRFS8TqkSQv+I/rFdRIo6ya7JzTJvQ8rlj8hV2vPBZSRptYQzbtEAwf4IDHaKhuT
-pESPR+wbJIHNPYJoRNk+NyS+f8tgqqv4K9iNzwIDAQABAoIBAFWTmjyyOuIArhrz
-snOHv7AZUBw32DmcADGtqG1Cyi4DrHe22t6ORwumwsMArP8fkbiB2lNrEovSRkp0
-uqjH5867E1vVuJ2tt1hlVkrLmbi6Nl3JwxU/aVm7r7566kgAGmGyYsPt/PmiKamF
-Ekkq49pPlHSKNol6My0r5UCTVzO6uwW7dAa4GOQRI7bM7PVlxRVVeNzPH3yOsTzk
-smrkRgf8HbjtY7m/EHG281gu14ZQRCqzLshO2BtWbkx9dMXnNU5dRRaZ8Pe8XN0Z
-umsStcX6So6VFAqlwknZTi1/sqyIuQLfE+S9DocVQkvKFUgKpFddK8Nmqc8xPCKt
-UwR9hEECgYEA9kZ5KmUbzxQrF8Kn9G18AbZ/Cf6rE9fhs/J8OGcuuJ9QTjPO7pxV
-T7lGrIOX3dVu3+iHrYXZUZv+UTOePWx+ghqJ8ML7RdVsxAWMqh+1J0eBJKIdc9mt
-0hGkLEyyBbAlfNmvw8JugTUeZH2gA+VK9HoMTAjD+LvH164rrktauKECgYEA1b6z
-lZypAbAqnuCndcetcgatdd/bYNH5WWTgdZHqInt3k94EsUEHFNMQUbO+FNkOJ4qJ
-Jp7xrqkOUX+MPrzV5XYVapamlht9gvUtyxGq7DYndlq4mIsN5kReH++lqONBnWoG
-ZlbxvadkvPo+bK003hsl+E4F8X7xUssGGLvygG8CgYEAm/yLJkUgVgsqOER86R6n
-mtYipQv/A/SK6tU9xOPl/d46mS3LderjRjnN/9rhyAo1zfCUb14GBeDONlSBd9pO
-Ts3MbQiy6sqBt67kJ6UpspVhwPhFu2k25YVy/PQfFec591hSMaXnJEOm2nOPdKg4
-z5y2STqMFfGqZHvXAvCLp8ECgYA8oVGTmNKf9fbBBny5/iAG/jnp+8vg1O7kGqdI
-8lD14wvyV8IA/a8iixRP+Kpsg31uXe+1ktR/dNjo6UNA8JPD+RDuITmzzqx1n1KU
-DbjsNBhRjD5cluUkcjQ43uOg2oXcPxz9nqAH6hm7OUjHzwH2FsFYg9lPvXB6ybg6
-/+Uz5QKBgBxvTtLsZ3Cvvb3qezn4DdpLjlsrT6HWaTGqwEx8NYVBTFX/lT8P04tv
-NqFuQsDJ4gw0AZF7HqF49qdpnHEJ8tdHgBc/xDLFUMuKjON4IZtr0/j407K6V530
-m4q3ziHOu/lORDcZTz/YUjEzT8r7Qiv7QusWncvIWEiLSCC2dvvb
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/parallelTester.js b/src/mongo/gotools/test/qa-tests/jstests/libs/parallelTester.js
deleted file mode 100644
index a384d4eb59c..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/parallelTester.js
+++ /dev/null
@@ -1,268 +0,0 @@
-/**
- * The ParallelTester class is used to test more than one test concurrently
- */
-
-
-if (typeof _threadInject !== "undefined") {
- // print( "fork() available!" );
-
- Thread = function() {
- this.init.apply(this, arguments);
- };
- _threadInject(Thread.prototype);
-
- ScopedThread = function() {
- this.init.apply(this, arguments);
- };
- ScopedThread.prototype = new Thread(function() {});
- _scopedThreadInject(ScopedThread.prototype);
-
- fork = function() {
- var t = new Thread(function() {});
- Thread.apply(t, arguments);
- return t;
- };
-
- // Helper class to generate a list of events which may be executed by a ParallelTester
- EventGenerator = function(me, collectionName, mean, host) {
- this.mean = mean;
- if (host === undefined) {
- host = db.getMongo().host;
- }
- this.events = [me, collectionName, host];
- };
-
- EventGenerator.prototype._add = function(action) {
- this.events.push([Random.genExp(this.mean), action]);
- };
-
- EventGenerator.prototype.addInsert = function(obj) {
- this._add("t.insert( " + tojson(obj) + " )");
- };
-
- EventGenerator.prototype.addRemove = function(obj) {
- this._add("t.remove( " + tojson(obj) + " )");
- };
-
- EventGenerator.prototype.addUpdate = function(objOld, objNew) {
- this._add("t.update( " + tojson(objOld) + ", " + tojson(objNew) + " )");
- };
-
- EventGenerator.prototype.addCheckCount = function(count, query, shouldPrint, checkQuery) {
- query = query || {};
- shouldPrint = shouldPrint || false;
- checkQuery = checkQuery || false;
- var action = "assert.eq( " + count + ", t.count( " + tojson(query) + " ) );";
- if (checkQuery) {
- action += " assert.eq( " + count + ", t.find( " + tojson(query) + " ).toArray().length );";
- }
- if (shouldPrint) {
- action += " print( me + ' ' + " + count + " );";
- }
- this._add(action);
- };
-
- EventGenerator.prototype.getEvents = function() {
- return this.events;
- };
-
- EventGenerator.dispatch = function() {
- var args = argumentsToArray(arguments);
- var me = args.shift();
- var collectionName = args.shift();
- var host = args.shift();
- var m = new Mongo(host);
- var t = m.getDB("test")[collectionName];
- args.forEach(function(v) {
- sleep(v[0]);
- eval(v[1]); // eslint-disable-line no-eval
- });
- };
-
- // Helper class for running tests in parallel. It assembles a set of tests
- // and then calls assert.parallelests to run them.
- ParallelTester = function() {
- assert.neq(db.getMongo().writeMode(), "legacy", "wrong shell write mode");
- this.params = [];
- };
-
- ParallelTester.prototype.add = function(fun, args) {
- args = args || [];
- args.unshift(fun);
- this.params.push(args);
- };
-
- ParallelTester.prototype.run = function(msg, newScopes) {
- newScopes = newScopes || false;
- assert.parallelTests(this.params, msg, newScopes);
- };
-
- // creates lists of tests from jstests dir in a format suitable for use by
- // ParallelTester.fileTester. The lists will be in random order.
- // n: number of lists to split these tests into
- ParallelTester.createJstestsLists = function(n) {
- var params = [];
- for (var i = 0; i < n; ++i) {
- params.push([]);
- }
-
- var makeKeys = function(a) {
- var ret = {};
- a.forEach(function(v) {
- ret[v] = 1;
- });
- return ret;
- };
-
- // some tests can't run in parallel with most others
- var skipTests = makeKeys([
- "dbadmin.js",
- "repair.js",
- "cursor8.js",
- "recstore.js",
- "extent.js",
- "indexb.js",
-
- // tests turn on profiling
- "profile1.js",
- "profile3.js",
- "profile4.js",
- "profile5.js",
-
- "mr_drop.js",
- "mr3.js",
- "indexh.js",
- "apitest_db.js",
- "evalb.js",
- "evald.js",
- "evalf.js",
- "killop.js",
- "run_program1.js",
- "notablescan.js",
- "drop2.js",
- "dropdb_race.js",
- "fsync2.js", // May be placed in serialTestsArr once SERVER-4243 is fixed.
- "bench_test1.js",
- "padding.js",
- "queryoptimizera.js",
- "loglong.js", // log might overflow before
- // this has a chance to see the message
- "connections_opened.js", // counts connections, globally
- "opcounters_write_cmd.js",
- "currentop.js", // SERVER-8673, plus rwlock yielding issues
- "set_param1.js", // changes global state
- "geo_update_btree2.js", // SERVER-11132 test disables table scans
- "update_setOnInsert.js", // SERVER-9982
- ]);
-
- var parallelFilesDir = "jstests/core";
-
- // some tests can't be run in parallel with each other
- var serialTestsArr = [
- parallelFilesDir + "/fsync.js",
- parallelFilesDir + "/auth1.js",
-
- // These tests expect the profiler to be on or off at specific points
- // during the test run.
- parallelFilesDir + "/cursor6.js",
- parallelFilesDir + "/profile2.js",
- parallelFilesDir + "/updatee.js"
- ];
- var serialTests = makeKeys(serialTestsArr);
-
- // prefix the first thread with the serialTests
- // (which we will exclude from the rest of the threads below)
- params[0] = serialTestsArr;
- var files = listFiles(parallelFilesDir);
- files = Array.shuffle(files);
-
- i = 0;
- files.forEach(function(x) {
- if ((/[\/\\]_/.test(x.name)) ||
- (!/\.js$/.test(x.name)) ||
- (x.name.match(parallelFilesDir + "/(.*\\.js)")[1] in skipTests) ||
- (x.name in serialTests)) {
- print(" >>>>>>>>>>>>>>> skipping " + x.name);
- return;
- }
- // add the test to run in one of the threads.
- params[i % n].push(x.name);
- ++i;
- });
-
- // randomize ordering of the serialTests
- params[0] = Array.shuffle(params[0]);
-
- params.forEach(function(param) {
- param.unshift(i);
- });
-
- return params;
- };
-
- // runs a set of test files
- // first argument is an identifier for this tester, remaining arguments are file names
- ParallelTester.fileTester = function() {
- var args = argumentsToArray(arguments);
- var suite = args.shift();
- args.forEach(function(x) {
- print(" S" + suite + " Test : " + x + " ...");
- var time = Date.timeFunc(function() {
- load(x);
- }, 1);
- print(" S" + suite + " Test : " + x + " " + time + "ms");
- });
- };
-
- // params: array of arrays, each element of which consists of a function followed
- // by zero or more arguments to that function. Each function and its arguments will
- // be called in a separate thread.
- // msg: failure message
- // newScopes: if true, each thread starts in a fresh scope
- assert.parallelTests = function(params, msg, newScopes) {
- newScopes = newScopes || false;
- var wrapper = function(fun, argv) {
- // TODO: this doesn't need to use eval
- eval( // eslint-disable-line no-eval
- "var z = function() {" +
- "var __parallelTests__fun = " + fun.toString() + ";" +
- "var __parallelTests__argv = " + tojson(argv) + ";" +
- "var __parallelTests__passed = false;" +
- "try {" +
- "__parallelTests__fun.apply( 0, __parallelTests__argv );" +
- "__parallelTests__passed = true;" +
- "} catch ( e ) {" +
- "print('');" +
- "print( '********** Parallel Test FAILED: ' + tojson(e) );" +
- "print('');" +
- "}" +
- "return __parallelTests__passed;" +
- "}"
- );
- return z;
- };
- var runners = [];
- params.forEach(function(param) {
- var test = param.shift();
- var t;
- if (newScopes) {
- t = new ScopedThread(wrapper(test, param));
- } else {
- t = new Thread(wrapper(test, param));
- }
- runners.push(t);
- });
-
- runners.forEach(function(x) {
- x.start();
- });
- var nFailed = 0;
- // v8 doesn't like it if we exit before all threads are joined (SERVER-529)
- runners.forEach(function(x) {
- if (!x.returnData()) {
- ++nFailed;
- }
- });
- assert.eq(0, nFailed, msg);
- };
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/password_protected.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/password_protected.pem
deleted file mode 100644
index 25e47bc2402..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/password_protected.pem
+++ /dev/null
@@ -1,51 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDezCCAmOgAwIBAgIBCTANBgkqhkiG9w0BAQUFADB4MRswGQYDVQQDExJwYXNz
-d29yZF9wcm90ZWN0ZWQxDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMHTW9uZ29E
-QjEWMBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlvcmsxCzAJ
-BgNVBAYTAlVTMB4XDTE0MDcxNzE2MDAwMFoXDTIwMDcxNzE2MDAwMFoweDEbMBkG
-A1UEAxMScGFzc3dvcmRfcHJvdGVjdGVkMQ8wDQYDVQQLEwZLZXJuZWwxEDAOBgNV
-BAoTB01vbmdvREIxFjAUBgNVBAcTDU5ldyBZb3JrIENpdHkxETAPBgNVBAgTCE5l
-dyBZb3JrMQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBALT4r3Hcou2auIOHeihBSjk4bKQTVqI6r/stnkul359SRfKuzVA9gMQaRRDi
-MJoxczHJzS2FX+wElzBt2EUhfu3qpUJ4gJw7H4WjLx+mNnj/+6b4HUO4eRzH5hTE
-A+qgDH40qYjFDEjiARvybWo3IlDLeI/uFwlyUj5PZBUBc1LBBzNtCBfJ2MmHLhIx
-jzTFhkJZll673LL6BPHtJclXCazqKUZDLqObW4Ei6X4hdBOdC8v8Q6GMgC4BxLe0
-wsOpKYYeM3il4BtfiqDQB5ZPG0lgo1Y7OOyFHFXBA7oNkK8lykhdyH4iLt5L9mWo
-VKyZ79VqSODFuCqWo8n8kUTgA/0CAwEAAaMQMA4wDAYDVR0TBAUwAwEB/zANBgkq
-hkiG9w0BAQUFAAOCAQEAntxk8a0HcuPG8Fdjckp6WL+HKQQnUKdvSk06rPF0SHpN
-Ma4eZcaumROdtAYnPKvtbpq3DRCQlj59dlWPksEcYiXqf56TgcyAz5K5g5z9RbFi
-ArvAXJNRcDz1080NWGBUTPYyiKWR3PhtlYhJZ4r7fQIWLv4mifXHViw2roXXhsAY
-ubk9HOtrqE7x6NJXgR24aybxqI6TfAKfM+LJNtMwMFrPC+GHnhqMOs/jHJS38NIB
-TrKA63TdpYUroVu23/tGLQaJz352qgF4Di91RkUfnI528goj57pX78H8KRsSNVvs
-KHVNrxtZIez+pxxjBPnyfCH81swkiAPG9fdX+Hcu5A==
------END CERTIFICATE-----
------BEGIN ENCRYPTED PRIVATE KEY-----
-MIIFHzBJBgkqhkiG9w0BBQ0wPDAbBgkqhkiG9w0BBQwwDgQI6y3b7IxcANECAggA
-MB0GCWCGSAFlAwQBAgQQrTslOKC0GZZwq48v7niXYQSCBNBNKsTN7fyw60/EEDH0
-JUgxL83Wfb7pNP97/lV5qiclY1mwcKz44kXQaesFTzhiwzAMOpbI/ijEtsNU25wV
-wtTgjAC3Em/+5/ygrmAu7hgacIRssspovmsgw029E9iOkyBd1VIrDVMi7HLHf0iU
-2Zq18QF20az2pXNMDipmVJkpc9NvjSdqka5+375pJuisspEWCDBd11K10jzCWqB5
-q3Rm1IIeq+mql6KT1rJcUoeE0facDc9GDYBiF/MfIKQ3FrZy/psqheCfL1UDUMyc
-mnm9GJO5+bCuHkg8ni0Zo5XXsf2VEFt0yt6lSucoOP43flucQaHnFKcn+5DHjDXv
-S6Eb5wEG9qWtzwWy/9DfRbkj6FxUgT3SFgizo/uLmdqFCJCnYkHUD1OuYCDmoIXP
-VTinwgK4lO/vrGfoPQrgJmdlnwHRWYjlB8edMCbmItaj2Esh3FBS12y976+UT0Sk
-8n5HsZAEYScDyNArVhrLUZRgF+r+bgZ28TDFO0MISPCAbZjhvq6lygS3dEmdTUW3
-cFDe1deNknWxZcv4UpJW4Nq6ckxwXBfTB1VFzjp7/vXrK/Sd9t8zi6vKTO8OTqc4
-KrlLXBgz0ouP/cxhYDykUrKXE2Eb0TjeAN1txZWo3fIFzXUvDZCphQEZNUqsFUxH
-86V2lwqVzKrFq6UpTgKrfTw/2ePQn9dQgd7iFWDTWjRkbzA5aAgTSVP8xQRoIOeQ
-epXtP9202kEz3h28SZYK7QBOTTX9xNmV/dzDTsi9nXZ6KtsP/aGFE5hh95jvESx/
-wlOBAPW4HR33rSYalvQPE7RjjLZHOKuYIllUBGlTOfgdA+WUXR3KxiLNPdslPBPV
-+O6aDyerhWoQwE7TFwhP/FpxL/46hOu4iq4fgqfjddBTq8z5jG3c3zzogDjoDzBF
-LEQDcbenUCGbEQ7zxXsXtr3QinJ+aAejDO38hp1h9ROb5LF53/9H2j/16nby/jPX
-7kp2weRSKGJ0B6AVuS9pTsQz4+E3icsIgBWSU6qtcUz2GO2QxnFuvT9LEVnyMNN2
-IKMIEKi2FsUMddHGXLULTANlzUMocdHrd5j81eqcFPhMOFOiHpgwiwxqZyBYOLRl
-Fe7x5dLVWoLgjJagZj8uYnJbExDsfFLjEx8p4Z+rejJIC5CqZLbz9sDgCtIL+92k
-+x4mlT1Rfmz9pU+RQqik83nFFRBGWxeW9iWWEgocWtmezvnK6E241v78zkqxNkvF
-JJo7BsBw7DiEHEfLhBZYuqV2q6+kwqgYrzyGIwAJkBGrkYfalVzgR+3/uN04h005
-M3jQRpSkDVGYr3JKEAlh3Sc+JD9VPbu6/RXNwy5mY67UCgWGaFwRqJE3DC9aKfNC
-OET8m8+8oQgFzhw3pNpENsgwR+Sx3K4q0GI3YwxT02pieBFNQaw53O3B3TtoCjkk
-UsuyIWqcLonwo4I3z0kjU3gEFN+0m4E4/A1DNt0J3rsKN+toCk1FqbxQg9xTZzXu
-hYmA3HMMwugzXmCanqBhmMsniPg+dRxCIfiHZhLuEpjKxZWcMWcW4M6l/wbM+LbE
-oDcTuI9ezfPTZ3xA8hNIHBT3MhuI7EJQnvKKvJDJeyX5sAtmSsSFqhEr8QZD8RgV
-5H9eOyUdfcWxLlstcq982V0oGg==
------END ENCRYPTED PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/server.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/server.pem
deleted file mode 100644
index df2b49163d6..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/server.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDfjCCAmagAwIBAgIBBzANBgkqhkiG9w0BAQUFADB0MRcwFQYDVQQDEw5LZXJu
-ZWwgVGVzdCBDQTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYw
-FAYDVQQHEw1OZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UE
-BhMCVVMwHhcNMTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBsMQ8wDQYDVQQD
-EwZzZXJ2ZXIxDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMHTW9uZ29EQjEWMBQG
-A1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlvcmsxCzAJBgNVBAYT
-AlVTMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp76KJeDczBqjSPJj
-5f8DHdtrWpQDK9AWNDlslWpi6+pL8hMqwbX0D7hC2r3kAgccMyFoNIudPqIXfXVd
-1LOh6vyY+jveRvqjKW/UZVzZeiL4Gy4bhke6R8JRC3O5aMKIAbaiQUAI1Nd8LxIt
-LGvH+ia/DFza1whgB8ym/uzVQB6igOifJ1qHWJbTtIhDKaW8gvjOhv5R3jzjfLEb
-R9r5Q0ZyE0lrO27kTkqgBnHKPmu54GSzU/r0HM3B+Sc/6UN+xNhNbuR+LZ+EvJHm
-r4de8jhW8wivmjTIvte33jlLibQ5nYIHrlpDLEwlzvDGaIio+OfWcgs2WuPk98MU
-tht0IQIDAQABoyMwITAfBgNVHREEGDAWgglsb2NhbGhvc3SCCTEyNy4wLjAuMTAN
-BgkqhkiG9w0BAQUFAAOCAQEANoYxvVFsIol09BQA0fwryAye/Z4dYItvKhmwB9VS
-t99DsmJcyx0P5meB3Ed8SnwkD0NGCm5TkUY/YLacPP9uJ4SkbPkNZ1fRISyShCCn
-SGgQUJWHbCbcIEj+vssFb91c5RFJbvnenDkQokRvD2VJWspwioeLzuwtARUoMH3Y
-qg0k0Mn7Bx1bW1Y6xQJHeVlnZtzxfeueoFO55ZRkZ0ceAD/q7q1ohTXi0vMydYgu
-1CB6VkDuibGlv56NdjbttPJm2iQoPaez8tZGpBo76N/Z1ydan0ow2pVjDXVOR84Y
-2HSZgbHOGBiycNw2W3vfw7uK0OmiPRTFpJCmewDjYwZ/6w==
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEAp76KJeDczBqjSPJj5f8DHdtrWpQDK9AWNDlslWpi6+pL8hMq
-wbX0D7hC2r3kAgccMyFoNIudPqIXfXVd1LOh6vyY+jveRvqjKW/UZVzZeiL4Gy4b
-hke6R8JRC3O5aMKIAbaiQUAI1Nd8LxItLGvH+ia/DFza1whgB8ym/uzVQB6igOif
-J1qHWJbTtIhDKaW8gvjOhv5R3jzjfLEbR9r5Q0ZyE0lrO27kTkqgBnHKPmu54GSz
-U/r0HM3B+Sc/6UN+xNhNbuR+LZ+EvJHmr4de8jhW8wivmjTIvte33jlLibQ5nYIH
-rlpDLEwlzvDGaIio+OfWcgs2WuPk98MUtht0IQIDAQABAoIBACgi1ilECXCouwMc
-RDzm7Jb7Rk+Q9MVJ79YlG08Q+oRaNjvAzE03PSN5wj1WjDTUALJXPvi7oy82V4qE
-R6Q6Kvbv46aUJpYzKFEk2dw7ACpSLa1LNfjGNtMusnecA/QF/8bxLReRu8s5mBQn
-NDnZvCqllLbfjNlAvsF+/UIn5sqFZpAZPMtPwkTAeh5ge8H9JvrG8y8aXsiFGAhV
-Z7tMZyn8wPCUrRi14NLvVB4hxM66G/tuTp8r9AmeTU+PV+qbCnKXd+v0IS52hvX9
-z75OPfAc66nm4bbPCapb6Yx7WaewPXXU0HDxeaT0BeQ/YfoNa5OT+ZOX1KndSfHa
-VhtmEsECgYEA3m86yYMsNOo+dkhqctNVRw2N+8gTO28GmWxNV9AC+fy1epW9+FNR
-yTQXpBkRrR7qrd5mF7WBc7vAIiSfVs021RMofzn5B1x7jzkH34VZtlviNdE3TZhx
-lPinqo0Yy3UEksgsCBJFIofuCmeTLk4ZtqoiZnXr35RYibaZoQdUT4kCgYEAwQ6Y
-xsKFYFks1+HYl29kR0qUkXFlVbKOhQIlj/dPm0JjZ0xYkUxmzoXD68HrOWgz7hc2
-hZaQTgWf+8cRaZNfh7oL+Iglczc2UXuwuUYguYssD/G6/ZPY15PhItgCghaU5Ewy
-hMwIJ81NENY2EQTgk/Z1KZitXdVJfHl/IPMQgdkCgYASdqkqkPjaa5dDuj8byO8L
-NtTSUYlHJbAmjBbfcyTMG230/vkF4+SmDuznci1FcYuJYyyWSzqzoKISM3gGfIJQ
-rYZvCSDiu4qGGPXOWANaX8YnMXalukGzW/CO96dXPB9lD7iX8uxKMX5Q3sgYz+LS
-hszUNHWf2XB//ehCtZkKAQKBgQCxL2luepeZHx82H9T+38BkYgHLHw0HQzLkxlyd
-LjlE4QCEjSB4cmukvkZbuYXfEVEgAvQKVW6p/SWhGkpT4Gt8EXftKV9dyF21GVXQ
-JZnhUOcm1xBsrWYGLXYi2agrpvgONBTlprERfq5tdnz2z8giZL+RZswu45Nnh8bz
-AcKzuQKBgQCGOQvKvNL5XKKmws/KRkfJbXgsyRT2ubO6pVL9jGQG5wntkeIRaEpT
-oxFtWMdPx3b3cxtgSP2ojllEiISk87SFIN1zEhHZy/JpTF0GlU1qg3VIaA78M1p2
-ZdpUsuqJzYmc3dDbQMepIaqdW4xMoTtZFyenUJyoezz6eWy/NlZ/XQ==
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/servers.js b/src/mongo/gotools/test/qa-tests/jstests/libs/servers.js
deleted file mode 100644
index 2715b369e46..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/servers.js
+++ /dev/null
@@ -1,1092 +0,0 @@
-// Wrap whole file in a function to avoid polluting the global namespace
-(function() {
- jsTestOptions = function () {
- if (TestData) {
- return Object.merge(_jsTestOptions, {
- setParameters: TestData.setParameters,
- setParametersMongos: TestData.setParametersMongos,
- storageEngine: TestData.storageEngine,
- wiredTigerEngineConfigString: TestData.wiredTigerEngineConfigString,
- wiredTigerCollectionConfigString: TestData.wiredTigerCollectionConfigString,
- wiredTigerIndexConfigString: TestData.wiredTigerIndexConfigString,
- noJournal: TestData.noJournal,
- noJournalPrealloc: TestData.noJournalPrealloc,
- auth: TestData.auth,
- keyFile: TestData.keyFile,
- authUser: "__system",
- authPassword: TestData.keyFileData,
- authMechanism: TestData.authMechanism,
- adminUser: TestData.adminUser || "admin",
- adminPassword: TestData.adminPassword || "password",
- useLegacyConfigServers: TestData.useLegacyConfigServers || false,
- useLegacyReplicationProtocol: TestData.useLegacyReplicationProtocol || false,
- enableEncryption: TestData.enableEncryption,
- encryptionKeyFile: TestData.encryptionKeyFile,
- auditDestination: TestData.auditDestination,
- useSSL: TestData.useSSL,
- minPort: TestData.minPort,
- maxPort: TestData.maxPort,
- });
- }
- return _jsTestOptions;
- };
-
- // Shim to allow compatibility with newer shells.
- if (typeof stopMongod === 'undefined') {
- stopMongod = _stopMongoProgram;
- }
- if (typeof startMongod === 'undefined') {
- startMongod = function() {
- argArray = arguments;
- if (jsTestOptions().useSSL) {
- if (argArray.indexOf('--sslMode') < 0) {
- argArray.push(
- '--sslMode', 'requireSSL',
- '--sslPEMKeyFile', 'jstests/libs/server.pem',
- '--sslCAFile', 'jstests/libs/ca.pem',
- '--sslWeakCertificateValidation');
- }
- }
- _startMongod.apply(null, argArray);
- };
- }
-
- _parsePath = function() {
- var dbpath = "";
- for (var i = 0; i < arguments.length; ++i) {
- if (arguments[i] === "--dbpath") {
- dbpath = arguments[i + 1];
- }
- }
-
- if (dbpath === "") {
- throw Error("No dbpath specified");
- }
-
- return dbpath;
- };
-
- _parsePort = function() {
- var port = "";
- for (var i = 0; i < arguments.length; ++i) {
- if (arguments[i] === "--port") {
- port = arguments[i + 1];
- }
- }
-
- if (port === "") {
- throw Error("No port specified");
- }
- return port;
- };
-
- connectionURLTheSame = function(a, b) {
-
- if (a === b) {
- return true;
- }
-
- if (!a || !b) {
- return false;
- }
-
- if (a.host) {
- return connectionURLTheSame(a.host, b);
- }
- if (b.host) {
- return connectionURLTheSame(a, b.host);
- }
-
- if (a.name) {
- return connectionURLTheSame(a.name, b);
- }
- if (b.name) {
- return connectionURLTheSame(a, b.name);
- }
-
- if (a.indexOf("/") < 0 && b.indexOf("/") < 0) {
- a = a.split(":");
- b = b.split(":");
-
- if (a.length !== b.length) {
- return false;
- }
-
- if (a.length === 2 && a[1] !== b[1]) {
- return false;
- }
-
- if (a[0] === "localhost" || a[0] === "127.0.0.1") {
- a[0] = getHostName();
- }
- if (b[0] === "localhost" || b[0] === "127.0.0.1") {
- b[0] = getHostName();
- }
-
- return a[0] === b[0];
- }
- var a0 = a.split("/")[0];
- var b0 = b.split("/")[0];
- return a0 === b0;
- };
-
- assert(connectionURLTheSame("foo", "foo"));
- assert(!connectionURLTheSame("foo", "bar"));
-
- assert(connectionURLTheSame("foo/a,b", "foo/b,a"));
- assert(!connectionURLTheSame("foo/a,b", "bar/a,b"));
-
- createMongoArgs = function(binaryName, args) {
- var fullArgs = [binaryName];
-
- if (args.length === 1 && isObject(args[0])) {
- var o = args[0];
- for (var k in o) {
- if (o.hasOwnProperty(k)) {
- if (k === "v" && isNumber(o[k])) {
- var n = o[k];
- if (n > 0) {
- if (n > 10) {
- n = 10;
- }
- var temp = "-";
- while (n-- > 0) {
- temp += "v";
- }
- fullArgs.push(temp);
- }
- } else {
- fullArgs.push("--" + k);
- if (o[k] !== "") {
- fullArgs.push(String(o[k]));
- }
- }
- }
- }
- } else {
- for (var i=0; i<args.length; i++) {
- fullArgs.push(args[i]);
- }
- }
-
- return fullArgs;
- };
-
-
- MongoRunner = function() {};
-
- MongoRunner.dataDir = "/data/db";
- MongoRunner.dataPath = "/data/db/";
- MongoRunner.usedPortMap = {};
-
- MongoRunner.VersionSub = function(regex, version) {
- this.regex = regex;
- this.version = version;
- };
-
- // These patterns allow substituting the binary versions used for each
- // version string to support the dev/stable MongoDB release cycle.
- MongoRunner.binVersionSubs = [
- new MongoRunner.VersionSub(/^latest$/, ""),
- new MongoRunner.VersionSub(/^oldest-supported$/, "1.8"),
- // To-be-updated when 3.0 becomes available
- new MongoRunner.VersionSub(/^last-stable$/, "2.6"),
- // Latest unstable and next stable are effectively the same release
- new MongoRunner.VersionSub(/^2\.7(\..*){0,1}/, ""),
- new MongoRunner.VersionSub(/^2\.8(\..*){0,1}/, ""),
- new MongoRunner.VersionSub(/^3\.0(\..*){0,1}/, ""),
- new MongoRunner.VersionSub(/^3\.1(\..*){0,1}/, ""),
- ];
-
- MongoRunner.getBinVersionFor = function(version) {
-
- // If this is a version iterator, iterate the version via toString()
- if (version instanceof MongoRunner.versionIterator.iterator) {
- version = version.toString();
- }
-
- // No version set means we use no suffix, this is *different* from "latest"
- // since latest may be mapped to a different version.
- version = version || "";
- version = version.trim();
- if (version === "") {
- return "";
- }
-
- // See if this version is affected by version substitutions
- for (var i = 0; i < MongoRunner.binVersionSubs.length; i++) {
- var sub = MongoRunner.binVersionSubs[i];
- if (sub.regex.test(version)) {
- version = sub.version;
- }
- }
-
- return version;
- };
-
- MongoRunner.areBinVersionsTheSame = function(versionA, versionB) {
- versionA = MongoRunner.getBinVersionFor(versionA);
- versionB = MongoRunner.getBinVersionFor(versionB);
-
- if (versionA === "" || versionB === "") {
- return versionA === versionB;
- }
-
- return versionA.startsWith(versionB) ||
- versionB.startsWith(versionA);
- };
-
- MongoRunner.logicalOptions = {
- runId: true,
- pathOpts: true,
- remember: true,
- noRemember: true,
- appendOptions: true,
- restart: true,
- noCleanData: true,
- cleanData: true,
- startClean: true,
- forceLock: true,
- useLogFiles: true,
- logFile: true,
- useHostName: true,
- useHostname: true,
- noReplSet: true,
- forgetPort: true,
- arbiter: true,
- noJournalPrealloc: true,
- noJournal: true,
- binVersion: true,
- waitForConnect: true,
- };
-
- MongoRunner.toRealPath = function(path, pathOpts) {
- // Replace all $pathOptions with actual values
- pathOpts = pathOpts || {};
- path = path.replace(/\$dataPath/g, MongoRunner.dataPath);
- path = path.replace(/\$dataDir/g, MongoRunner.dataDir);
- for (key in pathOpts) { // eslint-disable-line guard-for-in
- path = path.replace(RegExp("\\$" + RegExp.escape(key), "g"), pathOpts[key]);
- }
-
- // Relative path
- // Detect Unix and Windows absolute paths
- // as well as Windows drive letters
- // Also captures Windows UNC paths
- if (!path.match(/^(\/|\\|[A-Za-z]:)/)) {
- if (path !== "" && !path.endsWith("/")) {
- path += "/";
- }
-
- path = MongoRunner.dataPath + path;
- }
-
- return path;
- };
-
- MongoRunner.toRealDir = function(path, pathOpts) {
- path = MongoRunner.toRealPath(path, pathOpts);
- if (path.endsWith("/")) {
- path = path.substring(0, path.length - 1);
- }
- return path;
- };
-
- MongoRunner.toRealFile = MongoRunner.toRealDir;
-
- MongoRunner.nextOpenPort = function() {
- if (typeof allocatePort === "function") {
- return allocatePort();
- }
-
- var i = 0;
- while (MongoRunner.usedPortMap[String(27000 + i)]) {
- i++;
- }
- MongoRunner.usedPortMap[String(27000 + i)] = true;
-
- return 27000 + i;
- };
-
- /**
- * Returns an iterator object which yields successive versions on toString(), starting from a
- * random initial position, from an array of versions.
- *
- * If passed a single version string or an already-existing version iterator, just returns the
- * object itself, since it will yield correctly on toString()
- *
- * @param {Array.<String>}|{String}|{versionIterator}
- */
- MongoRunner.versionIterator = function(arr, isRandom) {
-
- // If this isn't an array of versions, or is already an iterator, just use it
- if (typeof arr === "string") {
- return arr;
- }
- if (arr.isVersionIterator) {
- return arr;
- }
-
- isRandom = isRandom || false;
-
- // Starting pos
- var i = isRandom ? parseInt(Random.rand() * arr.length) : 0;
-
- return new MongoRunner.versionIterator.iterator(i, arr);
- };
-
- MongoRunner.versionIterator.iterator = function(i, arr) {
-
- this.toString = function() {
- i = (i + 1) % arr.length;
- print("Returning next version : " + i +
- " (" + arr[i] + ") from " + tojson(arr) + "...");
- return arr[i];
- };
-
- this.isVersionIterator = true;
-
- };
-
- /**
- * Converts the args object by pairing all keys with their value and appending
- * dash-dash (--) to the keys. The only exception to this rule are keys that
- * are defined in MongoRunner.logicalOptions, of which they will be ignored.
- *
- * @param {string} binaryName
- * @param {Object} args
- *
- * @return {Array.<String>} an array of parameter strings that can be passed
- * to the binary.
- */
- MongoRunner.arrOptions = function(binaryName, args) {
- var fullArgs = [""];
-
- // isObject returns true even if "args" is an array, so the else branch of this statement is
- // dead code. See SERVER-14220.
- if (isObject(args) || (args.length === 1 && isObject(args[0]))) {
- var o = isObject(args) ? args : args[0];
-
- // If we've specified a particular binary version, use that
- if (o.binVersion && o.binVersion !== "") {
- binaryName += "-" + o.binVersion;
- }
-
- // Manage legacy options
- var isValidOptionForBinary = function(option, value) {
- if (!o.binVersion) {
- return true;
- }
-
- // Version 1.x options
- if (o.binVersion.startsWith("1.")) {
- return ["nopreallocj"].indexOf(option) < 0;
- }
-
- return true;
- };
-
- var addOptionsToFullArgs = function(k, v) {
- if (v === undefined || v === null) {
- return;
- }
-
- fullArgs.push("--" + k);
-
- if (v !== "") {
- fullArgs.push("" + v);
- }
- };
-
- for (var k in o) {
- // Make sure our logical option should be added to the array of options
- if (!o.hasOwnProperty(k) ||
- k in MongoRunner.logicalOptions ||
- !isValidOptionForBinary(k, o[k])) {
- continue;
- }
-
- if ((k === "v" || k === "verbose") && isNumber(o[k])) {
- var n = o[k];
- if (n > 0) {
- if (n > 10) {
- n = 10;
- }
- var temp = "-";
- while (n-- > 0) {
- temp += "v";
- }
- fullArgs.push(temp);
- }
- } else if (k === "setParameter" && isObject(o[k])) {
- // If the value associated with the setParameter option is an object, we want
- // to add all key-value pairs in that object as separate --setParameters.
- for (var l = 0; l < Object.keys(o[k]).length; l++) {
- var paramKey = Object.keys(o[k])[l];
- addOptionsToFullArgs(k, "" + paramKey + "=" + o[k][paramKey]);
- }
- } else {
- addOptionsToFullArgs(k, o[k]);
- }
- }
- } else {
- for (var i=0; i<args.length; i++) {
- fullArgs.push(args[i]);
- }
- }
-
- fullArgs[0] = binaryName;
- return fullArgs;
- };
-
- MongoRunner.arrToOpts = function(arr) {
- var opts = {};
- for (var i = 1; i < arr.length; i++) {
- if (arr[i].startsWith("-")) {
- var opt = arr[i].replace(/^-/, "").replace(/^-/, "");
- if (arr.length > i + 1 && !arr[i + 1].startsWith("-")) {
- opts[opt] = arr[i + 1];
- i++;
- } else {
- opts[opt] = "";
- }
-
- if (opt.replace(/v/g, "") === "") {
- opts["verbose"] = opt.length;
- }
- }
- }
-
- return opts;
- };
-
- MongoRunner.savedOptions = {};
-
- MongoRunner.mongoOptions = function(opts) {
- // Don't remember waitForConnect
- var waitForConnect = opts.waitForConnect;
- delete opts.waitForConnect;
-
- // If we're a mongo object
- if (opts.getDB) {
- opts = {restart: opts.runId};
- }
-
- // Initialize and create a copy of the opts
- opts = Object.merge(opts || {}, {});
-
- opts.restart = opts.restart || false;
-
- // RunId can come from a number of places
- // If restart is passed as an old connection
- if (opts.restart && opts.restart.getDB) {
- opts.runId = opts.restart.runId;
- opts.restart = true;
- } else if (isObject(opts.restart)) {
- // If it's the runId itself
- opts.runId = opts.restart;
- opts.restart = true;
- }
-
- if (isObject(opts.remember)) {
- opts.runId = opts.remember;
- opts.remember = true;
- } else if (opts.remember === undefined) {
- // Remember by default if we're restarting
- opts.remember = opts.restart;
- }
-
- // If we passed in restart : <conn> or runId : <conn>
- if (isObject(opts.runId) && opts.runId.runId) {
- opts.runId = opts.runId.runId;
- }
-
- if (opts.restart && opts.remember) {
- opts = Object.merge(MongoRunner.savedOptions[opts.runId], opts);
- }
-
- // Create a new runId
- opts.runId = opts.runId || ObjectId();
-
- // Save the port if required
- if (!opts.forgetPort) {
- opts.port = opts.port || MongoRunner.nextOpenPort();
- }
-
- var shouldRemember = (!opts.restart && !opts.noRemember) || (opts.restart && opts.appendOptions);
-
- // Normalize and get the binary version to use
- opts.binVersion = MongoRunner.getBinVersionFor(opts.binVersion);
-
- if (shouldRemember) {
- MongoRunner.savedOptions[opts.runId] = Object.merge(opts, {});
- }
-
- // Default for waitForConnect is true
- opts.waitForConnect = (waitForConnect === undefined || waitForConnect === null) ?
- true : waitForConnect;
-
- if (jsTestOptions().useSSL) {
- opts.sslMode = opts.sslMode || "requireSSL";
- opts.sslPEMKeyFile = opts.sslPEMKeyFile || "jstests/libs/server.pem";
- opts.sslCAFile = opts.sslCAFile || "jstests/libs/ca.pem";
-
- // Needed for jstest/ssl/upgrade_to_ssl.js
- opts.sslWeakCertificateValidation = "";
-
- // Needed for jstest/ssl/ssl_hostname_validation.js
- opts.sslAllowInvalidHostnames = "";
- }
-
- if (jsTestOptions().useX509 && !opts.clusterAuthMode) {
- opts.clusterAuthMode = "x509";
- }
-
- opts.port = opts.port || MongoRunner.nextOpenPort();
- MongoRunner.usedPortMap[String(parseInt(opts.port))] = true;
-
- opts.pathOpts = Object.merge(opts.pathOpts || {}, {
- port: String(opts.port),
- runId: String(opts.runId),
- });
-
- return opts;
- };
-
- /**
- * @option {object} opts
- *
- * {
- * dbpath {string}
- * useLogFiles {boolean}: use with logFile option.
- * logFile {string}: path to the log file. If not specified and useLogFiles
- * is true, automatically creates a log file inside dbpath.
- * noJournalPrealloc {boolean}
- * noJournal {boolean}
- * keyFile
- * replSet
- * oplogSize
- * }
- */
- MongoRunner.mongodOptions = function(opts) {
- opts = MongoRunner.mongoOptions(opts);
- opts.dbpath = MongoRunner.toRealDir(opts.dbpath || "$dataDir/mongod-$port",
- opts.pathOpts);
- opts.pathOpts = Object.merge(opts.pathOpts, {dbpath: opts.dbpath});
-
- if (!opts.logFile && opts.useLogFiles) {
- opts.logFile = opts.dbpath + "/mongod.log";
- } else if (opts.logFile) {
- opts.logFile = MongoRunner.toRealFile(opts.logFile, opts.pathOpts);
- }
-
- if (opts.logFile !== undefined) {
- opts.logpath = opts.logFile;
- }
-
- if (jsTestOptions().noJournalPrealloc || opts.noJournalPrealloc) {
- opts.nopreallocj = "";
- }
-
- if (jsTestOptions().noJournal || opts.noJournal) {
- opts.nojournal = "";
- }
-
- if (jsTestOptions().keyFile && !opts.keyFile) {
- opts.keyFile = jsTestOptions().keyFile;
- }
-
- if (jsTestOptions().useSSL) {
- opts.sslMode = opts.sslMode || "requireSSL";
- opts.sslPEMKeyFile = opts.sslPEMKeyFile || "jstests/libs/server.pem";
- opts.sslCAFile = opts.sslCAFile || "jstests/libs/ca.pem";
-
- // Needed for jstest/ssl/upgrade_to_ssl.js
- opts.sslWeakCertificateValidation = "";
-
- // Needed for jstest/ssl/ssl_hostname_validation.js
- opts.sslAllowInvalidHostnames = "";
- }
-
- if (jsTestOptions().useX509 && !opts.clusterAuthMode) {
- opts.clusterAuthMode = "x509";
- }
-
- if (opts.noReplSet) {
- opts.replSet = null;
- }
- if (opts.arbiter) {
- opts.oplogSize = 1;
- }
-
- return opts;
- };
-
- MongoRunner.mongosOptions = function(opts) {
- opts = MongoRunner.mongoOptions(opts);
-
- // Normalize configdb option to be host string if currently a host
- if (opts.configdb && opts.configdb.getDB) {
- opts.configdb = opts.configdb.host;
- }
-
- opts.pathOpts = Object.merge(opts.pathOpts, {
- configdb: opts.configdb.replace(/:|,/g, "-")
- });
-
- if (!opts.logFile && opts.useLogFiles) {
- opts.logFile = MongoRunner.toRealFile("$dataDir/mongos-$configdb-$port.log",
- opts.pathOpts);
- } else if (opts.logFile) {
- opts.logFile = MongoRunner.toRealFile(opts.logFile, opts.pathOpts);
- }
-
- if (opts.logFile !== undefined) {
- opts.logpath = opts.logFile;
- }
-
- if (jsTestOptions().keyFile && !opts.keyFile) {
- opts.keyFile = jsTestOptions().keyFile;
- }
-
- return opts;
- };
-
- /**
- * Starts a mongod instance.
- *
- * @param {Object} opts
- *
- * {
- * useHostName {boolean}: Uses hostname of machine if true
- * forceLock {boolean}: Deletes the lock file if set to true
- * dbpath {string}: location of db files
- * cleanData {boolean}: Removes all files in dbpath if true
- * startClean {boolean}: same as cleanData
- * noCleanData {boolean}: Do not clean files (cleanData takes priority)
- *
- * @see MongoRunner.mongodOptions for other options
- * }
- *
- * @return {Mongo} connection object to the started mongod instance.
- *
- * @see MongoRunner.arrOptions
- */
- MongoRunner.runMongod = function(opts) {
- opts = opts || {};
- var useHostName = false;
- var runId = null;
- var waitForConnect = true;
- var fullOptions = opts;
-
- if (isObject(opts)) {
- opts = MongoRunner.mongodOptions(opts);
- fullOptions = opts;
-
- useHostName = opts.useHostName || opts.useHostname;
- runId = opts.runId;
- waitForConnect = opts.waitForConnect;
-
- if (opts.forceLock) {
- removeFile(opts.dbpath + "/mongod.lock");
- }
- if ((opts.cleanData || opts.startClean) || (!opts.restart && !opts.noCleanData)) {
- print("Resetting db path '" + opts.dbpath + "'");
- resetDbpath(opts.dbpath);
- }
-
- opts = MongoRunner.arrOptions("mongod", opts);
- }
-
- var mongod = MongoRunner.startWithArgs(opts, waitForConnect);
- if (!waitForConnect) {
- mongos = {};
- }
- if (!mongod) {
- return null;
- }
-
- mongod.commandLine = MongoRunner.arrToOpts(opts);
- mongod.name = (useHostName ? getHostName() : "localhost") + ":" + mongod.commandLine.port;
- mongod.host = mongod.name;
- mongod.port = parseInt(mongod.commandLine.port);
- mongod.runId = runId || ObjectId();
- mongod.dbpath = fullOptions.dbpath;
- mongod.savedOptions = MongoRunner.savedOptions[mongod.runId];
- mongod.fullOptions = fullOptions;
-
- return mongod;
- };
-
- MongoRunner.runMongos = function(opts) {
- opts = opts || {};
- var useHostName = false;
- var runId = null;
- var waitForConnect = true;
- var fullOptions = opts;
-
- if (isObject(opts)) {
- opts = MongoRunner.mongosOptions(opts);
- fullOptions = opts;
-
- useHostName = opts.useHostName || opts.useHostname;
- runId = opts.runId;
- waitForConnect = opts.waitForConnect;
-
- opts = MongoRunner.arrOptions("mongos", opts);
- }
-
- var mongos = MongoRunner.startWithArgs(opts, waitForConnect);
- if (!waitForConnect) {
- mongos = {};
- }
- if (!mongos) {
- return null;
- }
-
- mongos.commandLine = MongoRunner.arrToOpts(opts);
- mongos.name = (useHostName ? getHostName() : "localhost") + ":" + mongos.commandLine.port;
- mongos.host = mongos.name;
- mongos.port = parseInt(mongos.commandLine.port);
- mongos.runId = runId || ObjectId();
- mongos.savedOptions = MongoRunner.savedOptions[mongos.runId];
- mongos.fullOptions = fullOptions;
-
- return mongos;
- };
-
- /**
- * Kills a mongod process.
- *
- * @param {number} port the port of the process to kill
- * @param {number} signal The signal number to use for killing
- * @param {Object} opts Additional options. Format:
- * {
- * auth: {
- * user {string}: admin user name
- * pwd {string}: admin password
- * }
- * }
- *
- * Note: The auth option is required in a authenticated mongod running in Windows since
- * it uses the shutdown command, which requires admin credentials.
- */
- MongoRunner.stopMongod = function(port, signal, opts) {
- if (!port) {
- print("Cannot stop mongo process " + port);
- return;
- }
-
- signal = signal || 15;
-
- if (port.port) {
- port = parseInt(port.port);
- }
-
- if (port instanceof ObjectId) {
- opts = MongoRunner.savedOptions(port);
- if (opts) {
- port = parseInt(opts.port);
- }
- }
-
- var exitCode = stopMongod(parseInt(port), parseInt(signal), opts);
-
- delete MongoRunner.usedPortMap[String(parseInt(port))];
-
- return exitCode;
- };
-
- MongoRunner.stopMongos = MongoRunner.stopMongod;
-
- MongoRunner.isStopped = function(port) {
- if (!port) {
- print("Cannot detect if process " + port + " is stopped.");
- return;
- }
-
- if (port.port) {
- port = parseInt(port.port);
- }
-
- if (port instanceof ObjectId) {
- opts = MongoRunner.savedOptions(port);
- if (opts) {
- port = parseInt(opts.port);
- }
- }
-
- return !MongoRunner.usedPortMap[String(parseInt(port))];
- };
-
- /**
- * Starts an instance of the specified mongo tool
- *
- * @param {String} binaryName The name of the tool to run
- * @param {Object} opts options to pass to the tool
- * {
- * binVersion {string}: version of tool to run
- * }
- *
- * @see MongoRunner.arrOptions
- */
- MongoRunner.runMongoTool = function(binaryName, opts) {
- opts = opts || {};
- // Normalize and get the binary version to use
- opts.binVersion = MongoRunner.getBinVersionFor(opts.binVersion);
-
- var argsArray = MongoRunner.arrOptions(binaryName, opts);
-
- return runMongoProgram.apply(null, argsArray);
- };
-
- // Given a test name figures out a directory for that test to use for dump files and makes sure
- // that directory exists and is empty.
- MongoRunner.getAndPrepareDumpDirectory = function(testName) {
- var dir = MongoRunner.dataPath + testName + "_external/";
- resetDbpath(dir);
- return dir;
- };
-
- // Start a mongod instance and return a 'Mongo' object connected to it.
- // This function's arguments are passed as command line arguments to mongod.
- // The specified 'dbpath' is cleared if it exists, created if not.
- // var conn = startMongodEmpty("--port", 30000, "--dbpath", "asdf");
- startMongodEmpty = function () {
- var args = createMongoArgs("mongod", arguments);
-
- var dbpath = _parsePath.apply(null, args);
- resetDbpath(dbpath);
-
- return startMongoProgram.apply(null, args);
- };
- startMongod = function () {
- print("startMongod WARNING DELETES DATA DIRECTORY THIS IS FOR TESTING ONLY");
- return startMongodEmpty.apply(null, arguments);
- };
- startMongodNoReset = function() {
- var args = createMongoArgs("mongod", arguments);
- return startMongoProgram.apply(null, args);
- };
-
- startMongos = function(args) {
- return MongoRunner.runMongos(args);
- };
-
- /**
- * Returns a new argArray with any test-specific arguments added.
- */
- function appendSetParameterArgs(argArray) {
- var programName = argArray[0];
- if (programName.endsWith('mongod') || programName.endsWith('mongos')) {
- if (jsTest.options().enableTestCommands) {
- argArray.push('--setParameter', "enableTestCommands=1");
- }
- if (jsTest.options().authMechanism && jsTest.options().authMechanism !== "SCRAM-SHA-1") {
- var hasAuthMechs = false;
- for (i in argArray) {
- if (typeof argArray[i] === 'string' &&
- argArray[i].indexOf('authenticationMechanisms') !== -1) {
- hasAuthMechs = true;
- break;
- }
- }
- if (!hasAuthMechs) {
- argArray.push('--setParameter', "authenticationMechanisms=" + jsTest.options().authMechanism);
- }
- }
- if (jsTest.options().auth) {
- argArray.push('--setParameter', "enableLocalhostAuthBypass=false");
- }
- if (jsTestOptions().useSSL) {
- if (argArray.indexOf('--sslMode') < 0) {
- argArray.push(
- '--sslMode', 'requireSSL',
- '--sslPEMKeyFile', 'jstests/libs/server.pem',
- '--sslCAFile', 'jstests/libs/ca.pem',
- '--sslWeakCertificateValidation');
- }
- }
-
- if (programName.endsWith('mongos')) {
- // mongos only options
- // apply setParameters for mongos
- if (jsTest.options().setParametersMongos) {
- var params = jsTest.options().setParametersMongos.split(",");
- if (params && params.length > 0) {
- params.forEach(function(p) {
- if (p) {
- argArray.push('--setParameter', p);
- }
- });
- }
- }
- } else if (programName.endsWith('mongod')) {
- // mongod only options
- // set storageEngine for mongod
- if (jsTest.options().storageEngine) {
- if (argArray.indexOf("--storageEngine") < 0) {
- argArray.push('--storageEngine', jsTest.options().storageEngine);
- }
- }
- if (jsTest.options().wiredTigerEngineConfigString) {
- argArray.push('--wiredTigerEngineConfigString', jsTest.options().wiredTigerEngineConfigString);
- }
- if (jsTest.options().wiredTigerCollectionConfigString) {
- argArray.push('--wiredTigerCollectionConfigString', jsTest.options().wiredTigerCollectionConfigString);
- }
- if (jsTest.options().wiredTigerIndexConfigString) {
- argArray.push('--wiredTigerIndexConfigString', jsTest.options().wiredTigerIndexConfigString);
- }
- // apply setParameters for mongod
- if (jsTest.options().setParameters) {
- params = jsTest.options().setParameters.split(",");
- if (params && params.length > 0) {
- params.forEach(function(p) {
- if (p) {
- argArray.push('--setParameter', p);
- }
- });
- }
- }
- }
- }
- return argArray;
- }
-
- /**
- * Start a mongo process with a particular argument array. If we aren't waiting for connect,
- * return null.
- */
- MongoRunner.startWithArgs = function(argArray, waitForConnect) {
- // TODO: Make there only be one codepath for starting mongo processes
- argArray = appendSetParameterArgs(argArray);
- var port = _parsePort.apply(null, argArray);
- var pid = _startMongoProgram.apply(null, argArray);
-
- var conn = null;
- if (waitForConnect) {
- assert.soon(function() {
- try {
- conn = new Mongo("127.0.0.1:" + port);
- return true;
- } catch (e) {
- if (!checkProgram(pid)) {
- print("Could not start mongo program at " + port + ", process ended");
- return true;
- }
- }
- return false;
- }, "unable to connect to mongo program on port " + port, 600 * 1000);
- }
-
- return conn;
- };
-
- /**
- * DEPRECATED
- *
- * Start mongod or mongos and return a Mongo() object connected to there.
- * This function's first argument is "mongod" or "mongos" program name, \
- * and subsequent arguments to this function are passed as
- * command line arguments to the program.
- */
- startMongoProgram = function() {
- var port = _parsePort.apply(null, arguments);
-
- // Enable test commands.
- // TODO: Make this work better with multi-version testing so that we can support
- // enabling this on 2.4 when testing 2.6
- var args = Array.from(arguments);
- args = appendSetParameterArgs(args);
- var pid = _startMongoProgram.apply(null, args);
-
- var m;
- assert.soon(function() {
- try {
- m = new Mongo("127.0.0.1:" + port);
- return true;
- } catch (e) {
- if (!checkProgram(pid)) {
-
- print("Could not start mongo program at " + port + ", process ended");
-
- // Break out
- m = null;
- return true;
- }
- }
- return false;
- }, "unable to connect to mongo program on port " + port, 600 * 1000);
-
- return m;
- };
-
- runMongoProgram = function() {
- var args = Array.from(arguments);
- var progName = args[0];
-
- if (jsTestOptions().auth) {
- args = args.slice(1);
- args.unshift(progName,
- '-u', jsTestOptions().authUser,
- '-p', jsTestOptions().authPassword,
- '--authenticationDatabase=admin'
- );
- }
-
- if (jsTestOptions().useSSL) {
- args.push("--ssl",
- "--sslPEMKeyFile", "jstests/libs/client.pem",
- "--sslCAFile", "jstests/libs/ca.pem",
- "--sslAllowInvalidHostnames");
- }
-
- if (progName === 'mongo' && !_useWriteCommandsDefault()) {
- progName = args[0];
- args = args.slice(1);
- args.unshift(progName, '--useLegacyWriteOps');
- }
-
- return _runMongoProgram.apply(null, args);
- };
-
- // Start a mongo program instance. This function's first argument is the
- // program name, and subsequent arguments to this function are passed as
- // command line arguments to the program. Returns pid of the spawned program.
- startMongoProgramNoConnect = function() {
- var args = Array.from(arguments);
- var progName = args[0];
-
- if (jsTestOptions().auth) {
- args = args.slice(1);
- args.unshift(progName,
- '-u', jsTestOptions().authUser,
- '-p', jsTestOptions().authPassword,
- '--authenticationDatabase=admin');
- }
-
- if (jsTestOptions().useSSL) {
- args.push("--ssl",
- "--sslPEMKeyFile", "jstests/libs/client.pem",
- "--sslCAFile", "jstests/libs/ca.pem",
- "--sslAllowInvalidHostnames");
- }
-
- if (progName === 'mongo' && !_useWriteCommandsDefault()) {
- args = args.slice(1);
- args.unshift(progName, '--useLegacyWriteOps');
- }
-
- return _startMongoProgram.apply(null, args);
- };
-
- myPort = function() {
- var m = db.getMongo();
- if (m.host.match(/:/)) {
- return m.host.match(/:(.*)/)[1];
- }
- return 27017;
- };
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/servers_misc.js b/src/mongo/gotools/test/qa-tests/jstests/libs/servers_misc.js
deleted file mode 100644
index f0f5cfe7ceb..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/servers_misc.js
+++ /dev/null
@@ -1,379 +0,0 @@
-
-/**
- * Run a mongod process.
- *
- * After initializing a MongodRunner, you must call start() on it.
- * @param {int} port port to run db on, use allocatePorts(num) to requision
- * @param {string} dbpath path to use
- * @param {boolean} peer pass in false (DEPRECATED, was used for replica pair host)
- * @param {boolean} arbiter pass in false (DEPRECATED, was used for replica pair host)
- * @param {array} extraArgs other arguments for the command line
- * @param {object} options other options include no_bind to not bind_ip to 127.0.0.1
- * (necessary for replica set testing)
- */
-MongodRunner = function(port, dbpath, peer, arbiter, extraArgs, options) {
- this.port_ = port;
- this.dbpath_ = dbpath;
- this.peer_ = peer;
- this.arbiter_ = arbiter;
- this.extraArgs_ = extraArgs;
- this.options_ = options ? options : {};
-};
-
-/**
- * Start this mongod process.
- *
- * @param {boolean} reuseData If the data directory should be left intact (default is to wipe it)
- */
-MongodRunner.prototype.start = function(reuseData) {
- var args = [];
- if (reuseData) {
- args.push("mongod");
- }
- args.push(
- "--port", this.port_,
- "--dbpath", this.dbpath_,
- "--nohttpinterface",
- "--noprealloc",
- "--smallfiles");
- if (!this.options_.no_bind) {
- args.push("--bind_ip", "127.0.0.1");
- }
- if (this.extraArgs_) {
- args = args.concat(this.extraArgs_);
- }
- removeFile(this.dbpath_ + "/mongod.lock");
- if (reuseData) {
- return startMongoProgram.apply(null, args);
- }
- return startMongod.apply(null, args);
-};
-
-MongodRunner.prototype.port = function() {
- return this.port_;
-};
-
-MongodRunner.prototype.toString = function() {
- return [this.port_, this.dbpath_, this.peer_, this.arbiter_].toString();
-};
-
-ToolTest = function(name, extraOptions) {
- this.useSSL = jsTestOptions().useSSL;
- this.name = name;
- this.options = extraOptions;
- this.port = allocatePorts(1)[0];
- this.baseName = "jstests_tool_" + name;
- this.root = MongoRunner.dataPath + this.baseName;
- this.dbpath = this.root + "/";
- this.ext = this.root + "_external/";
- this.extFile = this.root + "_external/a";
- resetDbpath(this.dbpath);
- resetDbpath(this.ext);
-};
-
-ToolTest.prototype.startDB = function(coll) {
- assert(!this.m, "db already running");
-
- var options = {
- port: this.port,
- dbpath: this.dbpath,
- nohttpinterface: "",
- noprealloc: "",
- smallfiles: "",
- bind_ip: "127.0.0.1",
- };
-
- Object.extend(options, this.options);
-
- if (this.useSSL) {
- Object.extend(options, {
- sslMode: "requireSSL",
- sslPEMKeyFile: "jstests/libs/server.pem",
- sslCAFile: "jstests/libs/ca.pem",
- sslWeakCertificateValidation: "",
- });
- }
-
- this.m = startMongoProgram.apply(null, MongoRunner.arrOptions("mongod", options));
- this.db = this.m.getDB(this.baseName);
- if (coll) {
- return this.db.getCollection(coll);
- }
- return this.db;
-};
-
-ToolTest.prototype.stop = function() {
- if (!this.m) {
- return;
- }
- MongoRunner.stopMongod(this.port);
- this.m = null;
- this.db = null;
-
- print('*** ' + this.name + " completed successfully ***");
-};
-
-ToolTest.prototype.runTool = function() {
- var a = ["mongo" + arguments[0]];
-
- var hasdbpath = false;
-
- for (var i=1; i<arguments.length; i++) {
- a.push(arguments[i]);
- if (arguments[i] === "--dbpath") {
- hasdbpath = true;
- }
- }
-
- if (this.useSSL) {
- a = a.concat(["--ssl",
- "--sslPEMKeyFile", "jstests/libs/server.pem",
- "--sslCAFile", "jstests/libs/ca.pem",
- "--sslAllowInvalidHostnames"]);
- }
-
- if (!hasdbpath) {
- a.push("--host");
- a.push("127.0.0.1:" + this.port);
- }
-
- return runMongoProgram.apply(null, a);
-};
-
-
-ReplTest = function(name, ports) {
- this.name = name;
- this.ports = ports || allocatePorts(2);
-};
-
-ReplTest.prototype.getPort = function(master) {
- if (master) {
- return this.ports[0];
- }
- return this.ports[1];
-};
-
-ReplTest.prototype.getPath = function(master) {
- var p = MongoRunner.dataPath + this.name + "-";
- if (master) {
- p += "master";
- } else {
- p += "slave";
- }
- return p;
-};
-
-ReplTest.prototype.getOptions = function(master, extra, putBinaryFirst, norepl) {
- if (!extra) {
- extra = {};
- }
-
- if (!extra.oplogSize) {
- extra.oplogSize = "40";
- }
-
- var a = [];
- if (putBinaryFirst) {
- a.push("mongod");
- }
- a.push("--nohttpinterface",
- "--noprealloc",
- "--bind_ip", "127.0.0.1",
- "--smallfiles",
- "--port", this.getPort(master),
- "--dbpath", this.getPath(master));
-
- if (jsTestOptions().noJournal) {
- a.push("--nojournal");
- }
- if (jsTestOptions().noJournalPrealloc) {
- a.push("--nopreallocj");
- }
- if (jsTestOptions().keyFile) {
- a.push("--keyFile", jsTestOptions().keyFile);
- }
-
- if (jsTestOptions().useSSL) {
- if (!Array.contains(a, "--sslMode")) {
- a.push("--sslMode", "requireSSL");
- }
- if (!Array.contains(a, "--sslPEMKeyFile")) {
- a.push("--sslPEMKeyFile", "jstests/libs/server.pem");
- }
- if (!Array.contains(a, "--sslCAFile")) {
- a.push("--sslCAFile", "jstests/libs/ca.pem");
- }
- a.push("--sslWeakCertificateValidation");
- }
- if (jsTestOptions().useX509 && !Array.contains(a, "--clusterAuthMode")) {
- a.push("--clusterAuthMode", "x509");
- }
-
- if (!norepl) {
- if (master) {
- a.push("--master");
- } else {
- a.push("--slave", "--source", "127.0.0.1:" + this.ports[0]);
- }
- }
-
- for (var k in extra) {
- if (!extra.hasOwnProperty(k)) {
- continue;
- }
- var v = extra[k];
- if (k in MongoRunner.logicalOptions) {
- continue;
- }
- a.push("--" + k);
- if (v !== undefined && v !== null && v !== "") {
- a.push(v);
- }
- }
-
- return a;
-};
-
-ReplTest.prototype.start = function(master, options, restart, norepl) {
- var lockFile = this.getPath(master) + "/mongod.lock";
- removeFile(lockFile);
- var o = this.getOptions(master, options, restart, norepl);
-
- if (restart) {
- var conn = startMongoProgram.apply(null, o);
- if (!master) {
- conn.setSlaveOk();
- }
- return conn;
- }
- conn = startMongod.apply(null, o);
- if (jsTestOptions().keyFile || jsTestOptions().auth || jsTestOptions().useX509) {
- jsTest.authenticate(conn);
- }
- if (!master) {
- conn.setSlaveOk();
- }
- return conn;
-};
-
-ReplTest.prototype.stop = function(master, signal) {
- if (arguments.length === 0) {
- this.stop(true);
- this.stop(false);
- return;
- }
-
- print('*** ' + this.name + " completed successfully ***");
- return MongoRunner.stopMongod(this.getPort(master), signal || 15);
-};
-
-if (typeof allocatePort === 'function') {
- allocatePorts = function (numPorts) {
- var ports = [];
- for (var i = 0; i < numPorts; i++) {
- ports.push(allocatePort());
- }
- return ports;
- };
-} else {
- allocatePorts = function(n, startPort) {
- var ret = [];
- var start = startPort || 31000;
- for (var i = start; i < start + n; ++i) {
- ret.push(i);
- }
- return ret;
- };
-}
-
-
-SyncCCTest = function(testName, extraMongodOptions) {
- this._testName = testName;
- this._connections = [];
-
- for (var i=0; i<3; i++) {
- this._connections.push(startMongodTest(30000 + i, testName + i, false, extraMongodOptions));
- }
-
- this.url = this._connections.map(function(z) {
- return z.name;
- }).join(",");
- this.conn = new Mongo(this.url);
-};
-
-SyncCCTest.prototype.stop = function() {
- for (var i=0; i<this._connections.length; i++) {
- MongoRunner.stopMongod(30000 + i);
- }
-
- print('*** ' + this._testName + " completed successfully ***");
-};
-
-SyncCCTest.prototype.checkHashes = function(dbname, msg) {
- var hashes = this._connections.map(function(z) {
- return z.getDB(dbname).runCommand("dbhash");
- });
-
- for (var i=1; i<hashes.length; i++) {
- assert.eq(hashes[0].md5, hashes[i].md5, "checkHash on " + dbname + " " + msg + "\n" + tojson(hashes));
- }
-};
-
-SyncCCTest.prototype.tempKill = function(num) {
- num = num || 0;
- MongoRunner.stopMongod(30000 + num);
-};
-
-SyncCCTest.prototype.tempStart = function(num) {
- num = num || 0;
- this._connections[num] = startMongodTest(30000 + num, this._testName + num, true);
-};
-
-
-function startParallelShell(jsCode, port, noConnect) {
- var x;
-
- var args = ["mongo"];
-
- // Convert function into call-string
- if (typeof (jsCode) === "function") {
- var id = Math.floor(Math.random() * 100000);
- jsCode = "var f" + id + " = " + jsCode.toString() + ";f" + id + "();";
- } else if (typeof (jsCode) === "string") {
- // do nothing
- } else {
- throw Error("bad first argument to startParallelShell");
- }
-
- if (noConnect) {
- args.push("--nodb");
- } else if (typeof (db) === "object") {
- jsCode = "db = db.getSiblingDB('" + db.getName() + "');" + jsCode;
- }
-
- if (TestData) {
- jsCode = "TestData = " + tojson(TestData) + ";" + jsCode;
- }
-
- args.push("--eval", jsCode);
-
- if (typeof db === "object") {
- var hostAndPort = db.getMongo().host.split(':');
- var host = hostAndPort[0];
- args.push("--host", host);
- if (!port && hostAndPort.length >= 2) {
- port = hostAndPort[1];
- }
- }
- if (port) {
- args.push("--port", port);
- }
-
-
- x = startMongoProgramNoConnect.apply(null, args);
- return function() {
- return waitProgram(x);
- };
-}
-
-var testingReplication = false;
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/slow_weekly_util.js b/src/mongo/gotools/test/qa-tests/jstests/libs/slow_weekly_util.js
deleted file mode 100644
index e6b31eede9d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/slow_weekly_util.js
+++ /dev/null
@@ -1,25 +0,0 @@
-
-SlowWeeklyMongod = function(name) {
- this.name = name;
- this.port = 30201;
-
- this.start = new Date();
-
- this.conn = startMongodEmpty(
- "--port", this.port,
- "--dbpath", MongoRunner.dataPath + this.name,
- "--smallfiles",
- "--nojournal");
-};
-
-SlowWeeklyMongod.prototype.getDB = function(name) {
- return this.conn.getDB(name);
-};
-
-SlowWeeklyMongod.prototype.stop = function() {
- stopMongod(this.port);
- var end = new Date();
- print("slowWeekly test: " + this.name + " completed successfully in "
- + ((end.getTime() - this.start.getTime()) / 1000) + " seconds");
-};
-
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/smoke.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/smoke.pem
deleted file mode 100644
index 7dddf222386..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/smoke.pem
+++ /dev/null
@@ -1,48 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDYTCCAkmgAwIBAgIBCDANBgkqhkiG9w0BAQUFADBrMQ4wDAYDVQQDEwVzbW9r
-ZTEPMA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQHEw1O
-ZXcgWW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMwHhcN
-MTQwNzE3MTYwMDAwWhcNMjAwNzE3MTYwMDAwWjBrMQ4wDAYDVQQDEwVzbW9rZTEP
-MA0GA1UECxMGS2VybmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQHEw1OZXcg
-WW9yayBDaXR5MREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMwggEiMA0G
-CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDb4fOWDomCPyYesh42pQ/bEHdK7r73
-06x1hdku9i+nytCSxhhuITGC1FA4ZIbYdQC/jgfzC0D+SDFKCCyNZA/2Pxam9y3F
-QHrueNtD9bw/OB98D6hC2fCow5OxUqWDkee2hQRTwLKDzec+H72AkwURh8oTfJsl
-LL/1YITZs9kfs59r8HG2YAT7QBbg3xBmK0wZvL4V/FY/OeeR92pIgjUU/6xm/1LU
-bhNHl5JTrXQxPpmvDb1ysiI0mMLeUz7UI+Pe/9mn91dHwgkprWyFi6VnV3/aW7DC
-nW/DklOPD8vMWu2A6iYU0fZbcj4vGM607vst5QLDMoD5Y2ilrKLiTRa5AgMBAAGj
-EDAOMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAJc64d76+eyNGX6C
-5r4IdFF3zJjkLs/NcSMReUTEv4zAdJCn7c1FNRkQBS3Ky2CeSGmiyYOhWZ7usv7x
-EvprmHouWsrQXV+o5EIW366e5wzg0c5KWO3oBIRjx4hDkRSQSjJjy5NFrc8fAW9x
-eeaHFWdqk3CHvqBhd32QYEs4+7v8hBYM3PBkj8qghXta4ZZS89cTMSjhu5s4Opje
-qUzGzoHat2VBdYzIpVOorYMFXObwCeQkCAXO5epuGZ0QhML66hc7FuOsW75kI9aW
-QXVoM/z2Gb1wbBYnwHOXtClK783S3RdV0uJun/pVj+VeHb6fyIQRmC5d0eJ0C8mY
-X+acnvA=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpQIBAAKCAQEA2+Hzlg6Jgj8mHrIeNqUP2xB3Su6+99OsdYXZLvYvp8rQksYY
-biExgtRQOGSG2HUAv44H8wtA/kgxSggsjWQP9j8WpvctxUB67njbQ/W8PzgffA+o
-QtnwqMOTsVKlg5HntoUEU8Cyg83nPh+9gJMFEYfKE3ybJSy/9WCE2bPZH7Ofa/Bx
-tmAE+0AW4N8QZitMGby+FfxWPznnkfdqSII1FP+sZv9S1G4TR5eSU610MT6Zrw29
-crIiNJjC3lM+1CPj3v/Zp/dXR8IJKa1shYulZ1d/2luwwp1vw5JTjw/LzFrtgOom
-FNH2W3I+LxjOtO77LeUCwzKA+WNopayi4k0WuQIDAQABAoIBAQDRFgAaDcLGfqQS
-Bk/iqHz2U6cMMxCW+sqAioGmPWW9iYdiOkra1meNP7T0mur7A+9tN3LpsybfZeiw
-vCsZXDAteXph1KPKcPE0uOnPqumRuB2ATCc1Qqas5CUaNju7a8/J6Jzfw1o9KVud
-4HLDw4nLTLNkalXhOLdkbp6FoZZypAgc8OnSdw7z9Kri6VndkddX3fWv4t203XwT
-AvBxvy4Qfblz6VKYRnjj2CPvo/kD+ncFEg+S6u8/LkghTX7CYeMHdTC0P9jOcEK2
-PMm3kS3sX7VkypsAirYK5QtBWxur+mINxfOBDtRlA2RaJQnikRiGb14bMkLx8Liy
-JNjEHSLdAoGBAP9+KpjniozZIbrcS79wdRrW+ARyDp1Plzyd4nQxfWmQ//nsnK5T
-EYCFXWTR/ldkAoHpD+bGGU02p1+1u4vmWqw/x+Qy56Gh/eylhe0RvYEjkVLyreuc
-bXu0BFlKVgRlBq1ZyXnr2lz3bAIZxvZs13lZn6qVPMt7w2/JTCal9jw7AoGBANxR
-sGik9mq/678nzLiNlf/LcwIz7siuyISoWDOaVEVva0uorqctVqL95w0f+3FXqBO/
-5BiJRFo5D8SfzRjkNkJ7V+rm+7/CjtsjEw2Ue+ZJYPlm+Wr545GYmhU9QH9NLZIN
-JBwTVWjLgdsyQyi0Gc+xMraBwEwoyS8cO17uHO2bAoGBANRmO91/6BPt0ve4epR9
-Vi1o9yki9PlcmHtBOmikWAFyFQvd4+eckVlKBflyBkL6locPjTOqDpC9VengeDj2
-2PyHzZLtqtkZhbK9bJhIfkWknwTZUTMliXMkldTxUo82uZVVpoRgSdmtq7IXYeut
-UnjExFMY3EDB9BizvUYIBKvPAoGAViQ6bS/SiPpxGlRdXus88r6BQSM9AYoVLIkF
-s2dr+5oMwZA6eXLopOHRLPiMP0yekto8PLuu1ffpil9QuaLA9E11moqlc9yGLngQ
-QwcDSo72M41nh8Qcjhi0ZgmE5kEuyCQLMk783fRz2VhVmdyRGvuVcHZa0WxA/QJ0
-1DEVbnECgYEA3i2PGHUvU2TIFNvubw3qdH5y7FXafF+O0ulQ8e6r/CbVAG14Z6xP
-RHLc7/JIYK9CG1PWCbkjiHZ4MsKFuRWFrUMrwSj8M3euCaEIxa/Co60qQ/CnZiZ6
-geleTtUcTZ2T0pqGLnrHwlzhLpCkPJPyjcfQjjEZRwd0bVFX6b3C/rw=
------END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/test_background_ops.js b/src/mongo/gotools/test/qa-tests/jstests/libs/test_background_ops.js
deleted file mode 100644
index 08c12cb90aa..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/test_background_ops.js
+++ /dev/null
@@ -1,334 +0,0 @@
-//
-// Utilities related to background operations while other operations are working
-//
-
-/**
- * Allows synchronization between background ops and the test operations
- */
-var waitForLock = function(mongo, name) {
- var ts = new ObjectId();
- var lockColl = mongo.getCollection("config.testLocks");
-
- lockColl.update({_id: name, state: 0}, {$set: {state: 0}}, true);
-
- //
- // Wait until we can set the state to 1 with our id
- //
-
- var startTime = new Date().getTime();
-
- assert.soon(function() {
- lockColl.update({_id: name, state: 0}, {$set: {ts: ts, state: 1}});
- var gleObj = lockColl.getDB().getLastErrorObj();
-
- if (new Date().getTime() - startTime > 20 * 1000) {
- print("Waiting for...");
- printjson(gleObj);
- printjson(lockColl.findOne());
- printjson(ts);
- }
-
- return gleObj.n === 1 || gleObj.updatedExisting;
- }, "could not acquire lock", 30 * 1000, 100);
-
- print("Acquired lock " + tojson({_id: name, ts: ts}) + " curr : " +
- tojson(lockColl.findOne({_id: name})));
-
- // Set the state back to 0
- var unlock = function() {
- print("Releasing lock " + tojson({_id: name, ts: ts}) + " curr : " +
- tojson(lockColl.findOne({_id: name})));
- lockColl.update({_id: name, ts: ts}, {$set: {state: 0}});
- };
-
- // Return an object we can invoke unlock on
- return {unlock: unlock};
-};
-
-/**
- * Allows a test or background op to say it's finished
- */
-var setFinished = function(mongo, name, finished) {
- if (finished || finished === undefined || finished === null) {
- mongo.getCollection("config.testFinished").update({_id: name}, {_id: name}, true);
- } else {
- mongo.getCollection("config.testFinished").remove({_id: name});
- }
-};
-
-/**
- * Checks whether a test or background op is finished
- */
-var isFinished = function(mongo, name) {
- return mongo.getCollection("config.testFinished").findOne({_id: name}) !== null;
-};
-
-/**
- * Sets the result of a background op
- */
-var setResult = function(mongo, name, result, err) {
- mongo.getCollection("config.testResult").update({_id: name}, {_id: name, result: result, err: err}, true);
-};
-
-/**
- * Gets the result for a background op
- */
-var getResult = function(mongo, name) {
- return mongo.getCollection("config.testResult").findOne({_id: name});
-};
-
-/**
- * Overrides the parallel shell code in mongo
- */
-function startParallelShell(jsCode, port) {
- var x;
- if (port) {
- x = startMongoProgramNoConnect("mongo", "--port", port, "--eval", jsCode);
- } else {
- x = startMongoProgramNoConnect("mongo", "--eval", jsCode, db ? db.getMongo().host : null);
- }
-
- return function() {
- jsTestLog("Waiting for shell " + x + "...");
- waitProgram(x);
- jsTestLog("Shell " + x + " finished.");
- };
-}
-
-var RandomFunctionContext = function(context) {
- Random.srand(context.seed);
- Random.randBool = function() {
- return Random.rand() > 0.5;
- };
-
- Random.randInt = function(min, max) {
- if (max === undefined) {
- max = min;
- min = 0;
- }
- return min + Math.floor(Random.rand() * max);
- };
-
- Random.randShardKey = function() {
- var numFields = 2; // Random.randInt(1, 3)
- var key = {};
- for (var i = 0; i < numFields; i++) {
- var field = String.fromCharCode("a".charCodeAt() + i);
- key[field] = 1;
- }
- return key;
- };
-
- Random.randShardKeyValue = function(shardKey) {
- var keyValue = {};
- for (field in shardKey) {
- if (!shardKey.hasOwnProperty(field)) {
- continue;
- }
- keyValue[field] = Random.randInt(1, 100);
- }
- return keyValue;
- };
-
- Random.randCluster = function() {
- var numShards = 2; // Random.randInt( 1, 10 )
- var rs = false; // Random.randBool()
- var st = new ShardingTest({
- shards: numShards,
- mongos: 4,
- other: {separateConfig: true, rs: rs}
- });
- return st;
- };
-};
-
-
-startParallelOps = function(mongo, proc, args, context) {
- var procName = proc.name + "-" + new ObjectId();
- var seed = new ObjectId(new ObjectId().valueOf().split("").reverse().join(""))
- .getTimestamp().getTime();
-
- // Make sure we aren't finished before we start
- setFinished(mongo, procName, false);
- setResult(mongo, procName, undefined, undefined);
-
- // TODO: Make this a context of its own
- var procContext = {
- procName: procName,
- seed: seed,
- waitForLock: waitForLock,
- setFinished: setFinished,
- isFinished: isFinished,
- setResult: setResult,
- setup: function(context, stored) {
- waitForLock = function() {
- return context.waitForLock(db.getMongo(), context.procName);
- };
- setFinished = function(finished) {
- return context.setFinished(db.getMongo(), context.procName, finished);
- };
- isFinished = function() {
- return context.isFinished(db.getMongo(), context.procName);
- };
- setResult = function(result, err) {
- return context.setResult(db.getMongo(), context.procName, result, err);
- };
- },
- };
-
- var bootstrapper = function(stored) {
- var procContext = stored.procContext;
- procContext.setup(procContext, stored);
-
- var contexts = stored.contexts;
- eval("contexts = " + contexts); // eslint-disable-line no-eval
-
- for (var i = 0; i < contexts.length; i++) {
- if (typeof (contexts[i]) !== "undefined") {
- // Evaluate all contexts
- contexts[i](procContext);
- }
- }
-
- var operation = stored.operation;
- eval("operation = " + operation); // eslint-disable-line no-eval
-
- var args = stored.args;
- eval("args = " + args); // eslint-disable-line no-eval
-
- result = undefined;
- err = undefined;
-
- try {
- result = operation.apply(null, args);
- } catch (e) {
- err = e;
- }
-
- setResult(result, err);
- };
-
- var contexts = [RandomFunctionContext, context];
-
- var testDataColl = mongo.getCollection("config.parallelTest");
-
- testDataColl.insert({
- _id: procName,
- bootstrapper: tojson(bootstrapper),
- operation: tojson(proc),
- args: tojson(args),
- procContext: procContext,
- contexts: tojson(contexts),
- });
-
- assert.eq(null, testDataColl.getDB().getLastError());
-
- var bootstrapStartup =
- "{ var procName = '" + procName + "'; " +
- "var stored = db.getMongo().getCollection( '" + testDataColl + "' )" +
- ".findOne({ _id : procName }); " +
- "var bootstrapper = stored.bootstrapper; " +
- "eval( 'bootstrapper = ' + bootstrapper ); " +
- "bootstrapper( stored ); " +
- "}";
-
-
- var oldDB = db;
- db = mongo.getDB("test"); // eslint-disable-line no-native-reassign
-
- jsTest.log("Starting " + proc.name + " operations...");
-
- var rawJoin = startParallelShell(bootstrapStartup);
-
- db = oldDB; // eslint-disable-line no-native-reassign
-
-
- var join = function() {
- setFinished(mongo, procName, true);
-
- rawJoin();
- result = getResult(mongo, procName);
-
- assert.neq(result, null);
-
- if (result.err) {
- throw Error("Error in parallel ops " + procName + " : "
- + tojson(result.err));
- }
- return result.result;
- };
-
- join.isFinished = function() {
- return isFinished(mongo, procName);
- };
-
- join.setFinished = function(finished) {
- return setFinished(mongo, procName, finished);
- };
-
- join.waitForLock = function(name) {
- return waitForLock(mongo, name);
- };
-
- return join;
-};
-
-
-//
-// Some utility operations
-//
-
-function moveOps(collName, options) {
- options = options || {};
-
- var admin = db.getMongo().getDB("admin");
- var config = db.getMongo().getDB("config");
- var shards = config.shards.find().toArray();
- var shardKey = config.collections.findOne({_id: collName}).key;
-
- while (!isFinished()) {
- var findKey = Random.randShardKeyValue(shardKey);
- var toShard = shards[Random.randInt(shards.length)]._id;
-
- try {
- printjson(admin.runCommand({
- moveChunk: collName,
- find: findKey,
- to: toShard,
- }));
- } catch (e) {
- printjson(e);
- }
-
- sleep(1000);
- }
-
- jsTest.log("Stopping moveOps...");
-}
-
-function splitOps(collName, options) {
- options = options || {};
-
- var admin = db.getMongo().getDB("admin");
- var config = db.getMongo().getDB("config");
- var shards = config.shards.find().toArray();
- var shardKey = config.collections.findOne({_id: collName}).key;
-
- while (!isFinished()) {
- var middleKey = Random.randShardKeyValue(shardKey);
-
- try {
- printjson(admin.runCommand({
- split: collName,
- middle: middleKey,
- }));
- } catch (e) {
- printjson(e);
- }
-
- sleep(1000);
- }
-
- jsTest.log("Stopping splitOps...");
-}
-
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/testconfig b/src/mongo/gotools/test/qa-tests/jstests/libs/testconfig
deleted file mode 100644
index 4b09f37ad13..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/testconfig
+++ /dev/null
@@ -1,6 +0,0 @@
-fastsync = true
-#comment line
-#commentedflagwithan = false
-version = false
-help = false
-sysinfo = false
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/testconfig.json b/src/mongo/gotools/test/qa-tests/jstests/libs/testconfig.json
deleted file mode 100644
index 5af32aad7d3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/testconfig.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "fastsync" : true,
- "version" : false
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/trace_missing_docs.js b/src/mongo/gotools/test/qa-tests/jstests/libs/trace_missing_docs.js
deleted file mode 100644
index ebee080dcc7..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/trace_missing_docs.js
+++ /dev/null
@@ -1,99 +0,0 @@
-
-//
-// On error inserting documents, traces back and shows where the document was dropped
-//
-
-function traceMissingDoc(coll, doc, mongos) {
- if (mongos) {
- coll = mongos.getCollection(String(coll));
- } else {
- mongos = coll.getMongo();
- }
-
- var config = mongos.getDB("config");
- var shards = config.shards.find().toArray();
- for (var i = 0; i < shards.length; i++) {
- shards[i].conn = new Mongo(shards[i].host);
- }
-
- var shardKeyPatt = config.collections.findOne({_id: String(coll)}).key;
-
- // Project out the shard key
- var shardKey = {};
- for (var k in shardKeyPatt) {
- if (doc[k] === undefined || docs[k] === null) {
- jsTest.log("Shard key " + tojson(shardKey)
- + " not found in doc " + tojson(doc)
- + ", falling back to _id search...");
- shardKeyPatt = {_id: 1};
- shardKey = {_id: doc['_id']};
- break;
- }
- shardKey[k] = doc[k];
- }
-
- if (doc['_id'] === undefined) {
- jsTest.log("Id not found in doc " + tojson(doc) + " cannot trace oplog entries.");
- return;
- }
-
- jsTest.log("Using shard key : " + tojson(shardKey));
-
- var allOps = [];
- for (i = 0; i < shards.length; i++) {
- var oplog = shards[i].conn.getCollection("local.oplog.rs");
- if (!oplog.findOne()) {
- oplog = shards[i].conn.getCollection("local.oplog.$main");
- }
-
- if (!oplog.findOne()) {
- jsTest.log("No oplog was found on shard " + shards[i]._id);
- continue;
- }
-
- var addKeyQuery = function(query, prefix) {
- for (var k in shardKey) { // eslint-disable-line guard-for-in
- query[prefix + '.' + k] = shardKey[k];
- }
- return query;
- };
-
- var addToOps = function(cursor) { // eslint-disable-line no-loop-func
- cursor.forEach(function(doc) {
- doc.shard = shards[i]._id;
- doc.realTime = new Date(doc.ts.getTime() * 1000);
- allOps.push(doc);
- });
- };
-
- // Find ops
- addToOps(oplog.find(addKeyQuery({op: 'i'}, 'o')));
- var updateQuery = {
- $or: [
- addKeyQuery({op: 'u'}, 'o2'),
- {op: 'u', 'o2._id': doc['_id']},
- ],
- };
- addToOps(oplog.find(updateQuery));
- addToOps(oplog.find({op: 'd', 'o._id': doc['_id']}));
- }
-
- var compareOps = function(opA, opB) {
- if (opA.ts < opB.ts) {
- return -1;
- }
- if (opB.ts < opA.ts) {
- return 1;
- }
- return 0;
- };
-
- allOps.sort(compareOps);
-
- print("Ops found for doc " + tojson(doc) + " on each shard:\n");
- for (i = 0; i < allOps.length; i++) {
- printjson(allOps[i]);
- }
-
- return allOps;
-}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/trusted-ca.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/trusted-ca.pem
deleted file mode 100644
index 2a0e139e184..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/trusted-ca.pem
+++ /dev/null
@@ -1,22 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDpjCCAo6gAwIBAgIDAghHMA0GCSqGSIb3DQEBBQUAMHwxHzAdBgNVBAMTFlRy
-dXN0ZWQgS2VybmVsIFRlc3QgQ0ExDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMH
-TW9uZ29EQjEWMBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlv
-cmsxCzAJBgNVBAYTAlVTMB4XDTE2MDMzMTE0NTY1NVoXDTM2MDMzMTE0NTY1NVow
-fDEfMB0GA1UEAxMWVHJ1c3RlZCBLZXJuZWwgVGVzdCBDQTEPMA0GA1UECxMGS2Vy
-bmVsMRAwDgYDVQQKEwdNb25nb0RCMRYwFAYDVQQHEw1OZXcgWW9yayBDaXR5MREw
-DwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUA
-A4IBDwAwggEKAoIBAQCePFHZTydC96SlSHSyu73vw//ddaE33kPllBB9DP2L7yRF
-6D/blFmno9fSM+Dfg64VfGV+0pCXPIZbpH29nzJu0DkvHzKiWK7P1zUj8rAHaX++
-d6k0yeTLFM9v+7YE9rHoANVn22aOyDvTgAyMmA0CLn+SmUy6WObwMIf9cZn97Znd
-lww7IeFNyK8sWtfsVN4yRBnjr7kKN2Qo0QmWeFa7jxVQptMJQrY8k1PcyVUOgOjQ
-ocJLbWLlm9k0/OMEQSwQHJ+d9weUbKjlZ9ExOrm4QuuA2tJhb38baTdAYw3Jui4f
-yD6iBAGD0Jkpc+3YaWv6CBmK8NEFkYJD/gn+lJ75AgMBAAGjMTAvMAwGA1UdEwQF
-MAMBAf8wHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEwDQYJKoZIhvcN
-AQEFBQADggEBADYikjB6iwAUs6sglwkE4rOkeMkJdRCNwK/5LpFJTWrDjBvBQCdA
-Y5hlAVq8PfIYeh+wEuSvsEHXmx7W29X2+p4VuJ95/xBA6NLapwtzuiijRj2RBAOG
-1EGuyFQUPTL27DR3+tfayNykDclsVDNN8+l7nt56j8HojP74P5OMHtn+6HX5+mtF
-FfZMTy0mWguCsMOkZvjAskm6s4U5gEC8pYEoC0ZRbfUdyYsxZe/nrXIFguVlVPCB
-XnfB/0iG9t+VH5cUVj1LP9skXTW4kXfhQmljUuo+EVBNR6n2nfTnpoC65WeAgHV4
-V+s9mJsUv2x72KtKYypqEVT0gaJ1WIN9N1s=
------END CERTIFICATE-----
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/trusted-client.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/trusted-client.pem
deleted file mode 100644
index dec32375c1b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/trusted-client.pem
+++ /dev/null
@@ -1,49 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDnTCCAoWgAwIBAgIDA1clMA0GCSqGSIb3DQEBBQUAMHwxHzAdBgNVBAMTFlRy
-dXN0ZWQgS2VybmVsIFRlc3QgQ0ExDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMH
-TW9uZ29EQjEWMBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlv
-cmsxCzAJBgNVBAYTAlVTMB4XDTE2MDMzMTE2MDY0OVoXDTM2MDMzMTE2MDY0OVow
-gYAxIzAhBgNVBAMTGlRydXN0ZWQgS2VybmVsIFRlc3QgQ2xpZW50MQ8wDQYDVQQL
-EwZLZXJuZWwxEDAOBgNVBAoTB01vbmdvREIxFjAUBgNVBAcTDU5ldyBZb3JrIENp
-dHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcN
-AQEBBQADggEPADCCAQoCggEBAKI9cGBnH5wcthvFT1FdfQTw1EvOgtfBHVEMRFZH
-bupMnAqP69id0bf7SdBWzx4A1f1ws1RkeL5ot2u5T9NwsFzGvRBQ5onFtDnC3eKB
-OwapCk2B82mlx4xZBjewg+NbxoRJBUWGqB0LykaVUHxM6BGgwExNAyXQ9syPSyNZ
-NIr+zDrLdTfjKklmDkv9jSCB/T3t80kQPY+04u98buUe7wGM0WQFbVNoYrSkZ6Ja
-O+G8bpXP4hXIXsxOHucjBeJc1KR+lxEMw3wInZ2KjjMv7HsFIIOQg5pkMDXibSU6
-cNUZTA2MrzZ+t7TeAQyOTzfGlaatfvJYxU7v4u0W5jxeV60CAwEAAaMjMCEwHwYD
-VR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEwDQYJKoZIhvcNAQEFBQADggEB
-AHI6Rfq/UqAoPxiz5bqby2FnGOrskotgXhn1JkZLGCfllx5WgMuLnu1bvjoym567
-HySqAXQOqEWm6XRU7SVOA+69e4OLWX+HSKjFRuG5Ip67UpihZMgyLKuGRBwfnbXj
-14o+xbWjXCgVZEI7vzT7q/7/W1mXj680fHs93Zog561Id4Tf3DYkOoMawSfeF4iu
-8hcYjlJYjFb3ZvM1wokicmEwtY0+YbBGVo8xh5jYdfCLzYLxc3CpP5eXJtMvGE/x
-RnyiY3f7hkUZMibnREPS6kpQVEh36DT21C0OB8s7TcMU7yMKgVdqL1udmEkiKXTj
-H7v/s+7d54O0tr5+IysCAoA=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEogIBAAKCAQEAoj1wYGcfnBy2G8VPUV19BPDUS86C18EdUQxEVkdu6kycCo/r
-2J3Rt/tJ0FbPHgDV/XCzVGR4vmi3a7lP03CwXMa9EFDmicW0OcLd4oE7BqkKTYHz
-aaXHjFkGN7CD41vGhEkFRYaoHQvKRpVQfEzoEaDATE0DJdD2zI9LI1k0iv7MOst1
-N+MqSWYOS/2NIIH9Pe3zSRA9j7Ti73xu5R7vAYzRZAVtU2hitKRnolo74bxulc/i
-FchezE4e5yMF4lzUpH6XEQzDfAidnYqOMy/sewUgg5CDmmQwNeJtJTpw1RlMDYyv
-Nn63tN4BDI5PN8aVpq1+8ljFTu/i7RbmPF5XrQIDAQABAoIBAGg9iYKnP4wSdn+J
-WtkwdC9EfWLnoPH3RlrYwt+crgskhe3TYvmfDSxk7JxL6m+god1hGBfVJi9RIOi5
-/Cwib25s0vU0xasnuBCUv/PUjJRO8Cu0nyz2Myxd1rzZUSQ3x2kfcZ+mUUW4WZLY
-RQpYb5ND8coUgT0+8hOkzeY8XqIe5c4VrX16mA+uoIMsr4QHxe0pl59oY57V3Der
-+gsaGuWZ5hDvfuoCOx03Cuc1pTx0T8ZHdliu/xe+np3ETFdQ/1cyJMAJW3w15qKt
-L6AfkeRaMAgqxs2zU1rgPdJddRS7MaSJnpDtMjeyJpeNCMDJ/h3ihgSM1SM1QCtY
-tcnWdIECgYEA1/SVGV9McAvtVtQA1D7kPEo0ifCG8frUfGy2yK9aasFHhLS3JXXY
-4R0Fy/pOwdKNtnN3ZKd0Y0wcmlwPepg2HKlUFJdjqEKZctMzOscy3n08F4AV2rLc
-48q2XLLIQNN/JuqcaeRgQByvP6YL0YuqqsAPiRhTeYgJxp4c+JgbmDUCgYEAwFL7
-jzYwmud3HEgrfpXxDoWpemlKqCqe0cUix0OtR8XfCukZ5lbnC7Mu/FtqBitVdQYd
-2r1nRK66fTJejblNd1E4TG0sIwucI5B24I7XeG78r2kECnFT+vBE8BA6c/y8nTjz
-grWVMeR3n7WFxaTL/VW/kapW2YddWPq1Jh4q4JkCgYB2QBk8kXBbkkxd1Sy3C9ES
-KlfmiGx8KCseDrFv5oUOG9O7mPEIdCVT7v5zmRZzD4ZFt6hS11bl4JFw/KQFLz/C
-Jf5CYDtTsVQxCfDtaJI0PkMfYyWUYYiOuztsOwFobeccOi931HPX510W7yddkKrd
-YNmg6k8bJyCjP4UBotjJWQKBgElP2KDQ0VpbHWZkhF/unEMi5GXLOTA9fukLsqQu
-wiD35nvsO3k4az5kgWalGhdb8Wl4eWzmgjUGPgR3bN+tYUA4b7OCci6xwEU2Tnpv
-OOeptxzOdUHdzVt8t2qjZQTNtMBh80FCIqswIgF5WpLqrO/W/f1y50Roe0bt2pu7
-KDERAoGAbhEL6OCfX6Pf5hCggb4ymm9zmAndRKHAAJ2WQ2p6v+r1vm838n6y8r7Q
-Fqc3B7NIDDYzX4oyQZepOnHNF/UPEyVyGvJ8LBDruyiuAdGakVEvSHZ+ml4LnS06
-msP5hsHh9s4ptVcaF3/mNllBys+FwEWvLewgfVPJrDBNINFvYZ8=
------END RSA PRIVATE KEY-----
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/trusted-server.pem b/src/mongo/gotools/test/qa-tests/jstests/libs/trusted-server.pem
deleted file mode 100644
index caaee422a44..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/trusted-server.pem
+++ /dev/null
@@ -1,49 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDnTCCAoWgAwIBAgIDCWhIMA0GCSqGSIb3DQEBBQUAMHwxHzAdBgNVBAMTFlRy
-dXN0ZWQgS2VybmVsIFRlc3QgQ0ExDzANBgNVBAsTBktlcm5lbDEQMA4GA1UEChMH
-TW9uZ29EQjEWMBQGA1UEBxMNTmV3IFlvcmsgQ2l0eTERMA8GA1UECBMITmV3IFlv
-cmsxCzAJBgNVBAYTAlVTMB4XDTE2MDMzMTE2MDUyM1oXDTM2MDMzMTE2MDUyM1ow
-gYAxIzAhBgNVBAMTGlRydXN0ZWQgS2VybmVsIFRlc3QgU2VydmVyMQ8wDQYDVQQL
-EwZLZXJuZWwxEDAOBgNVBAoTB01vbmdvREIxFjAUBgNVBAcTDU5ldyBZb3JrIENp
-dHkxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcN
-AQEBBQADggEPADCCAQoCggEBAJwOsMO+MhWmHcSOdjFrZYjPMOt8uIqJ1pR/JI/E
-hTdUq7fXPHZNhUTzwX1JZB+QkXhnJiAf2ZQOnl7R49vudoPdOZo6bJQP8/Hy5F79
-B8Nw9xrcPuzGBRK3IpY7j9gnAOC5jvN2zh+nHoqNhPmarpKgbDeLosABiDFSHiCE
-degHziJ0Tj0AJ6GRbeHeTvv5K4lLwMzyYnpkG0cMpLvLIUwJa22Vp8PujMcmjX9W
-ASmSXJmcszYKjaRc7HB6ronIEZWy//PSXlvuk8xYaM40HkGy2gN6wV+2Z45QdDds
-NxUuu56TzJ7z7as/vYXXsIc/TSmvM02S01JWUjWeVGc1sb8CAwEAAaMjMCEwHwYD
-VR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEwDQYJKoZIhvcNAQEFBQADggEB
-AAKLZiQxz3NYvc04GDMRIUDfR50RMw4SuXXgGYTpPUP/akOes11+u5iKhgyKQ+ca
-TttX8mwwvNjQFN8hjBodsrWK9avMUenJBk+Y2ztzLSpKAmC7NUUM6sFB1D3yocsG
-aH5EuyH/dcAdb9z5vYurriRfd1ldmyGAqvA6lKGp1zxTAi0WWbYIZia0LyVoH98p
-x0s+amrSMvkVqIK+qV+CVqW2dNLe+kREjGxzGidCSfHZrHncuTX8/10xHUbAQW0z
-EWF6epmm+jniwgh2Zs/xe7+eY1Nzfq0ly06MVKCs1/lZ0vhAHGZ7V6yBX5zig02x
-VAHb45KqzmYGwKErO7ZFY2I=
------END CERTIFICATE-----
------BEGIN RSA PRIVATE KEY-----
-MIIEpQIBAAKCAQEAnA6ww74yFaYdxI52MWtliM8w63y4ionWlH8kj8SFN1Srt9c8
-dk2FRPPBfUlkH5CReGcmIB/ZlA6eXtHj2+52g905mjpslA/z8fLkXv0Hw3D3Gtw+
-7MYFErciljuP2CcA4LmO83bOH6ceio2E+ZqukqBsN4uiwAGIMVIeIIR16AfOInRO
-PQAnoZFt4d5O+/kriUvAzPJiemQbRwyku8shTAlrbZWnw+6MxyaNf1YBKZJcmZyz
-NgqNpFzscHquicgRlbL/89JeW+6TzFhozjQeQbLaA3rBX7ZnjlB0N2w3FS67npPM
-nvPtqz+9hdewhz9NKa8zTZLTUlZSNZ5UZzWxvwIDAQABAoIBAQCQFHQY1NHy8OKM
-5aaz697bV8dns0fCCI7HnTdJUPxZYGAGJL8azmmbhp1+qbK5/cSA8GLfx+ge7PxE
-uO3x0RE0n5weC5DRhoUIPeOg22Y+iF5sOyoReqWWaOSS5bzhqOkDke4sU+TsjmQB
-MbWyqaBBmcEv60jAkumF97X++azOIm1EqTXfSu1K7gqtiL9H9T8vIYOOuTAduOsD
-el/v5QQbWb3e/NLhcmzHL6rPcR/9jCn1rJ9HAhAqm6eKZS2cAgTGLLtCUhumVliO
-bEIm2fcQ5h+BDZc5EF/SURKvUaFx/xTIQ5s1oEKN8iN+kIYzgbZ/Ds/GOo7nWVmy
-1KZswK05AoGBANBvT/vSpI7vokmph+GifjToHeinceg3pssf8mHw8xv3H0mZxBkt
-CJq6rFwKwMH8K9tQfBqp4hfVgfdAWZyKqqo1Mtecohzb9D0GLYZ6Of18pAZK/aEt
-L8ADuGYbLAFAS10z4djBSqlud82d194zSgfLP3FYRsj5Ni8w9bPuMOKVAoGBAL+r
-gd1/B+kkbO/NAprjXAFT91Wjf+YMQgM8vOMXlq7LlGeGQSb0zVZpdDtZ3ohqdu1i
-38y0G/CvBLddm8VkC3/fhfO8xW8PjdRBbF87j1k4HerAxcLOO91z+MHFMbUryOJc
-U0aAzJB3B4E491xaXTL8jZLYxmgJtc4jBcLKzmIDAoGBAKXf39w9Hx5tUE6k7vE+
-uodqLdsn3nt6Rm+iRedxtFcODEUrbKbIcu+IHYDGQe5eu5w2af1iMv7auCpHeMke
-hYEdAxAZo92poa4qy3IYtSuo1HP5m+x3pGd/znDbsOJyA0fx8QrpkHxT4F2u/srj
-MEgRlLSkFvj7cwaNRQvjQ94dAoGAA2+4wVbgtm5fwaDkVhCTeradrZxj06UOne49
-2Lh4jCO8QmrmyiMDd3QmkFXZJor6HOFz78Ce657Hr93uyAg2KJHCXg9ZXtdhjJer
-sL1poYjfCHFyWj7GVf8ZS6gUbxIc5OoQ2CfBAyoPKWLzFGXOW/apNyPJ0t2xs8Nu
-/AIU1y8CgYEAyUhyJe8XaDOjoR9D1freHY6Vt9NkofDpYfWL4+y/kGD45yCDBXj0
-LYAD89/Qog1MbPN8FGrgMu2b3dI0i+iZlWduvQRn71QepT6wiB84ivxjECSpZGoH
-2F0SM1MVAK/f4Dm9H9Kaukq2BpsN8Uhvzg2EUFg1mLJ+OBArgT524Ys=
------END RSA PRIVATE KEY-----
diff --git a/src/mongo/gotools/test/qa-tests/jstests/libs/wc_framework.js b/src/mongo/gotools/test/qa-tests/jstests/libs/wc_framework.js
deleted file mode 100644
index 51670f520eb..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/libs/wc_framework.js
+++ /dev/null
@@ -1,72 +0,0 @@
-// runWCTest executes a tool against a number of configurations. A given replica set will have nodes prevented
-// from replicating and the tool should either pass or fail based on the supplied write concern. As a final test,
-// the tools is run with w:3, and waits for all three nodes to come back online, simulating a slowly-replicated write.
-var runWCTest = function runWCTest(progName, rs, toolTest, testWriteConcern, testProgramNoConnect) {
- jsTest.log("testing that "+progName+" deals with write concern");
-
- function windowsEscape(json) {
- if (_isWindows()) {
- json = '"' + json.replace(/"/g, '\\"') + '"';
- }
- return json;
- }
-
- // grab the two secondary nodes
- var masterPort = rs.getPrimary().port;
- var members = [];
- var ports = [];
- for (var i = 0; i < rs.nodes.length; i++) {
- if (rs.nodes[i].port !== masterPort) {
- members.push(rs.nodes[i].getDB("admin"));
- ports.push(rs.nodes[i].port);
- }
- }
- var member1 = members[0];
- var member2 = members[1];
-
- testWriteConcern(0, [], progName+" without write concern to a fully functioning repl-set should succeed");
-
- testWriteConcern(0, ['--writeConcern=majority'], progName+" with majority to a fully functioning repl-set should succeed");
-
- testWriteConcern(0, ['--writeConcern={w:1,wtimeout:10000}'], progName+" with w:1,timeout:10000 to a fully functioning repl-set should succeed");
-
- testWriteConcern(0, ['--writeConcern={w:2,wtimeout:10000}'], progName+" with w:2,timeout:10000 to a fully functioning repl-set should succeed");
-
- jsTest.log("stopping node on port " + ports[0] + " from doing any further syncing");
- member1.runCommand({configureFailPoint: 'rsSyncApplyStop', mode: 'alwaysOn'});
- sleep(2000);
-
- testWriteConcern(0, ['--writeConcern={w:1,wtimeout:10000}'], progName+" with w:1,timeout:10000 repl-set with 2 working nodes should succeed");
-
- testWriteConcern(0, ['--writeConcern={w:2,wtimeout:10000}'], progName+" with w:2,timeout:10000 repl-set with 2 working nodes should succeed");
-
- testWriteConcern(0, ['--writeConcern=majority'], progName+" with majority with two working nodes should succeed");
-
- testWriteConcern(1, ['--writeConcern={w:3,wtimeout:2000}'], progName+" with w:3,timeout:2000 repl-set with two working nodes should fail");
-
- jsTest.log("stopping second node on port " + ports[1] + " from doing any further syncing");
- member2.runCommand({configureFailPoint: 'rsSyncApplyStop', mode: 'alwaysOn'});
- sleep(2000);
-
- testWriteConcern(1, [windowsEscape('--writeConcern={w:"majority",wtimeout:2000}')], progName+" with majority with one working node should fail");
-
- testWriteConcern(1, ['--writeConcern={w:2,wtimeout:10000}'], progName+" with w:2,timeout:10000 with one working node should fail");
-
- testWriteConcern(0, ['--writeConcern={w:1,wtimeout:10000}'], progName+" with w:1,timeout:10000 repl-set with one working nodes should succeed");
-
- jsTest.log(progName+" with w:3 concern and no working member and no timeout waits until member are available");
- pid = testProgramNoConnect();
-
- sleep(2000);
-
- assert(checkProgram(pid), progName+" with w:3 and no working members should not have finished");
-
- jsTest.log("starting stopped members");
-
- member1.runCommand({configureFailPoint: 'rsSyncApplyStop', mode: 'off'});
- member2.runCommand({configureFailPoint: 'rsSyncApplyStop', mode: 'off'});
-
- jsTest.log("waiting for "+progName+" to finish");
- ret = waitProgram(pid);
- assert.eq(0, ret, progName+" with w:3 should succeed once enough members start working");
-};
diff --git a/src/mongo/gotools/test/qa-tests/jstests/oplog/all_ops_tests.js b/src/mongo/gotools/test/qa-tests/jstests/oplog/all_ops_tests.js
deleted file mode 100644
index dd3b97c2923..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/oplog/all_ops_tests.js
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * This test creates a fake oplog and uses it to test correct behavior of
- * all possible op codes.
- */
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var OPLOG_INSERT_CODE = 'i';
- var OPLOG_COMMAND_CODE = 'c';
- var OPLOG_UPDATE_CODE = 'u';
- var OPLOG_REMOVE_CODE = 'd';
- var OPLOG_NOOP_CODE = 'n';
- var CURRENT_OPLOG_VERSION = 2;
-
- var toolTest = getToolTest('applyAllOpsTest');
- var commonToolArgs = getCommonToolArguments();
-
- // Get the db that we'll insert the fake oplog into
- var db = toolTest.db.getSiblingDB('foo');
- db.dropDatabase();
- db.getSiblingDB('rs').dropDatabase();
-
- // Create capped collection
- db.getSiblingDB('rs').createCollection('rs_test', {capped: true, size: 4});
-
- // Add a bunch of operations to the fake oplog
-
- // Create a collection to drop
- db.getSiblingDB('rs').rs_test.insert({
- ts: new Timestamp(),
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_COMMAND_CODE,
- ns: "foo.$cmd",
- o: {create: "baz"}
- });
-
- // Insert a doc
- db.getSiblingDB('rs').rs_test.insert({
- ts: new Timestamp(),
- h: 0,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_INSERT_CODE,
- o: {
- _id: 0
- },
- ns: 'foo.baz'
- });
-
- // Drop the doc's database
- db.getSiblingDB('rs').rs_test.insert({
- ts: new Timestamp(),
- h: 1,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_COMMAND_CODE,
- o: {
- dropDatabase: 1
- },
- ns: 'foo.$cmd'
- });
-
- // Create the collection
- db.getSiblingDB('rs').rs_test.insert({
- ts: new Timestamp(),
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_COMMAND_CODE,
- ns: "foo.$cmd",
- o: {create: "bar"}
- });
-
- // Insert 2 docs
- db.getSiblingDB('rs').rs_test.insert({
- ts: new Timestamp(),
- h: 2,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_INSERT_CODE,
- o: {
- _id: 1
- },
- ns: 'foo.bar'
- });
-
- db.getSiblingDB('rs').rs_test.insert({
- ts: new Timestamp(),
- h: 3,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_INSERT_CODE,
- o: {
- _id: 2
- },
- ns: 'foo.bar'
- });
-
- // Remove first doc
- db.getSiblingDB('rs').rs_test.insert({
- ts: new Timestamp(),
- h: 4,
- b: true,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_REMOVE_CODE,
- o: {
- _id: 1
- },
- ns: 'foo.bar'
- });
-
- // Update the second doc
- db.getSiblingDB('rs').rs_test.insert({
- ts: new Timestamp(),
- h: 5,
- b: true,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_UPDATE_CODE,
- o2: {
- _id: 2
- },
- o: {
- _id: 2,
- x: 1
- },
- ns: 'foo.bar'
- });
-
- // Noop
- db.getSiblingDB('rs').rs_test.insert({
- ts: new Timestamp(),
- h: 6,
- op: OPLOG_NOOP_CODE,
- ns: 'foo.bar',
- o: {x: 'noop'}
- });
-
- var args = ['oplog', '--oplogns', 'rs.rs_test',
- '--from', '127.0.0.1:' + toolTest.port].concat(commonToolArgs);
-
- if (toolTest.isSharded) {
- // When applying ops to a sharded cluster,
- assert(toolTest.runTool.apply(toolTest, args) !== 0,
- 'mongooplog should fail when running applyOps on a sharded cluster');
-
- var expectedError =
- 'error applying ops: applyOps not allowed through mongos';
- assert.strContains.soon(expectedError, rawMongoProgramOutput,
- 'mongooplog crash should output the correct error message');
-
- assert.eq(0, db.bar.count({}),
- 'mongooplog should not have applied any operations');
- } else {
- // Running with default --seconds should apply all operations
- assert.eq(toolTest.runTool.apply(toolTest, args), 0,
- 'mongooplog should succeed');
-
- assert.eq(1, db.bar.count({}),
- 'mongooplog should apply all operations');
- assert.eq(0, db.baz.count({}), 'mongooplog should have dropped db');
- assert.eq(1, db.bar.count({_id: 2}),
- 'mongooplog should have applied correct ops');
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/oplog/apply_ops_tests.js b/src/mongo/gotools/test/qa-tests/jstests/oplog/apply_ops_tests.js
deleted file mode 100644
index d79aebbc680..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/oplog/apply_ops_tests.js
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * This test creates a fake oplog and uses it to test correct behavior of
- * --oplogns and --seconds
- */
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var OPLOG_INSERT_CODE = 'i';
- var OPLOG_UPDATE_CODE = 'u';
- // unused: OPLOG_COMMAND_CODE = 'c';
- var CURRENT_OPLOG_VERSION = 2;
-
- // Oplog TS is in seconds since unix epoch
- var TEST_START = Math.floor(new Date().getTime() / 1000);
- var toolTest = getToolTest('oplogSuccessTest');
- var commonToolArgs = getCommonToolArguments();
-
- // Get the db that we'll insert the fake oplog into
- var db = toolTest.db.getSiblingDB('gnr');
- db.dropDatabase();
-
- // Create capped collection
- db.createCollection('rs_test', {capped: true, max: 4});
- // Create test collection
- db.createCollection('greatest_hits');
-
- // Add a bunch of operations to the fakeoplog
- var tracks = ['Welcome to the Jungle', 'Sweet Child O\' Mine', 'Patience',
- 'Paradise City', 'Knockin\' on Heaven\'s Door', 'Civil War'];
-
- tracks.forEach(function(track, index) {
- db.rs_test.insert({
- ts: new Timestamp(TEST_START - index * 10000 - 1, 1),
- h: index,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_INSERT_CODE,
- o: {
- _id: track
- },
- ns: 'gnr.greatest_hits'
- });
- });
-
- tracks.forEach(function(track, index) {
- db.rs_test.insert({
- ts: new Timestamp(TEST_START - index * 10000 - 1, 2),
- h: index,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_UPDATE_CODE,
- o2: {
- _id: track
- },
- o: {
- _id: track,
- index: index
- },
- ns: 'gnr.greatest_hits'
- });
- });
-
- var args = ['oplog', '--oplogns', 'gnr.rs_test',
- '--from', '127.0.0.1:' + toolTest.port].concat(commonToolArgs);
-
- assert.eq(0, db.getSiblingDB('gnr').greatest_hits.count({}),
- 'target collection should be empty before mongooplog runs');
-
- if (toolTest.isSharded) {
- // When applying ops to a sharded cluster,
- assert(toolTest.runTool.apply(toolTest, args) !== 0,
- 'mongooplog should fail when running applyOps on a sharded cluster');
-
- var expectedError =
- 'error applying ops: applyOps not allowed through mongos';
- assert.strContains.soon(expectedError, rawMongoProgramOutput,
- 'mongooplog crash should output the correct error message');
-
- assert.eq(0, db.greatest_hits.count({}),
- 'mongooplog should not have applied any operations');
- } else {
- // Running with default --seconds should apply all operations
- assert.eq(toolTest.runTool.apply(toolTest, args), 0,
- 'mongooplog should succeed');
-
- assert.eq(6, db.greatest_hits.count({}),
- 'mongooplog should apply all operations');
- tracks.forEach(function(track, index) {
- assert.eq(1, db.greatest_hits.count({_id: track, index: index}),
- 'mongooplog should have inserted a doc with _id="' + track + '" and ' +
- 'updated it to have index=' + index);
- });
-
- // Running a second time should have no effect
- assert.eq(toolTest.runTool.apply(toolTest, args), 0,
- 'mongooplog should succeed');
- assert.eq(6, db.greatest_hits.count({}),
- 'mongooplog should apply all operations');
- tracks.forEach(function(track, index) {
- assert.eq(1, db.greatest_hits.count({_id: track, index: index}),
- 'mongooplog should have inserted a doc with _id="' + track + '" and ' +
- 'updated it to have index=' + index);
- });
-
- db.greatest_hits.drop();
- db.createCollection('greatest_hits');
-
- // Running with `--seconds 25000` should apply last 3 operations, which
- // have timestamps T - 1, T - 10001, and T - 20001 (roughly)
- var last3Seconds = args.concat(['--seconds', 25000]);
- assert.eq(toolTest.runTool.apply(toolTest, last3Seconds), 0,
- 'mongooplog should succeed');
-
- assert.eq(3, db.greatest_hits.count({}),
- '`mongooplog --seconds 25000` should apply 3 operations');
- tracks.slice(0, 3).forEach(function(track, index) {
- assert.eq(1, db.greatest_hits.count({_id: track, index: index}),
- 'mongooplog should have inserted a doc with _id="' + track + '" and ' +
- 'updated it to have index=' + index);
- });
-
- db.greatest_hits.drop();
- db.createCollection('greatest_hits');
-
- // Running with `--seconds 0` should apply no operations
- var noOpsArgs = args.concat(['--seconds', 0]);
- assert.eq(toolTest.runTool.apply(toolTest, noOpsArgs), 0,
- 'mongooplog should succeed');
-
- assert.eq(0, db.greatest_hits.count({}),
- '`mongooplog --seconds 0` should apply 0 operations');
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/oplog/asymmetric_ssl_test.js b/src/mongo/gotools/test/qa-tests/jstests/oplog/asymmetric_ssl_test.js
deleted file mode 100644
index 0e897349c9d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/oplog/asymmetric_ssl_test.js
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * If SSL is enabled in the config, this test starts mongod with SSL off and
- * tests that we get a sensible failure. Otherwise, it runs with --ssl and
- * asserts that we get a sensible failure.
- *
- * Note: this requires an SSL-enabled tool suite
- */
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = getToolTest('oplogAsymmetricSSLTest');
- var commonToolArgs = getCommonToolArguments();
- var sslOpts = [
- '--ssl',
- '--sslPEMKeyFile', 'jstests/libs/client.pem'
- ];
-
- if (toolTest.useSSL) {
- var port = allocatePort();
-
- // this mongod is actually started with SSL flags because of `useSSL`
- startMongod('--auth', '--port', port,
- '--dbpath', MongoRunner.dataPath + 'oplogAsymmetricSSLTest2');
-
- var args = ['mongooplog'].concat(commonToolArgs).concat(
- '--from', '127.0.0.1:' + toolTest.port, '--host', '127.0.0.1', '--port', port);
-
- // mongooplog run without SSL against a destination server started with SSL should fail
- jsTest.log("Running mongooplog without SSL against mongod with SSL");
- assert.neq(runProgram.apply(this, args), 0,
- 'mongooplog should fail when run without SSL flags against destination host (--host) ' +
- 'started with SSL');
- } else {
- // toolTest.runTool will add the underlying --host argument for the mongod started without SSL
- args = ['oplog'].concat(commonToolArgs).concat(sslOpts).concat(
- '--from', '127.0.0.1:' + toolTest.port);
-
- // mongooplog run with SSL against a destination server not started with SSL should fail
- jsTest.log("Running mongooplog with SSL against mongod without SSL");
- assert.neq(toolTest.runTool.apply(toolTest, args), 0,
- 'mongooplog should fail when run with SSL flags against destination host (--host) ' +
- 'not started with SSL');
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/oplog/deprecated_flags_tests.js b/src/mongo/gotools/test/qa-tests/jstests/oplog/deprecated_flags_tests.js
deleted file mode 100644
index 53dac128274..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/oplog/deprecated_flags_tests.js
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Tests that we provide helpful output when user tries to use flags that were
- * deprecated in 2.7.x
- */
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var toolTest = getToolTest('oplogDeprecatedFlagTest');
- var commonToolArgs = getCommonToolArguments();
- var expectedError = 'error parsing command line options: --dbpath and related ' +
- 'flags are not supported in 3.0 tools.';
-
- var verifyFlagFails = function(flag) {
- var args = ['oplog'].concat(commonToolArgs).concat(flag);
- assert(toolTest.runTool.apply(toolTest, args) !== 0,
- 'mongooplog should fail when --dbpath specified');
-
- assert.strContains.soon(expectedError, rawMongoProgramOutput,
- 'mongooplog should output the correct error message');
- };
-
- verifyFlagFails('--dbpath');
- verifyFlagFails('--directoryperdb');
- verifyFlagFails('--journal');
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/oplog/drop_database_test.js b/src/mongo/gotools/test/qa-tests/jstests/oplog/drop_database_test.js
deleted file mode 100644
index e4c51ed8323..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/oplog/drop_database_test.js
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Tests behavior when oplog contains an operation to drop itself
- */
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var OPLOG_INSERT_CODE = 'i';
- var OPLOG_COMMAND_CODE = 'c';
- var CURRENT_OPLOG_VERSION = 2;
-
- var toolTest = getToolTest('oplogDropDbTest');
- var commonToolArgs = getCommonToolArguments();
-
- // Get the db that we'll insert the fake oplog into
- var db = toolTest.db.getSiblingDB('foo');
- db.dropDatabase();
-
- // Create capped collection on foo
- db.createCollection('rs_test', {capped: true, size: 4});
-
- // Create test collection
- db.createCollection("baz");
-
- // Insert a doc
- db.rs_test.insert({
- ts: new Timestamp(),
- h: 0,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_INSERT_CODE,
- o: {
- _id: 0
- },
- ns: 'foo.baz'
- });
-
- // Drop foo, which also includes the rs_test collection that the oplog is in
- db.rs_test.insert({
- ts: new Timestamp(),
- h: 1,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_COMMAND_CODE,
- o: {
- dropDatabase: 1
- },
- ns: 'foo.$cmd'
- });
-
- // Recreate collection
- db.rs_test.insert({
- ts: new Timestamp(),
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_COMMAND_CODE,
- ns: "foo.$cmd",
- o: {create: "baz"},
- });
-
- // Insert another doc
- db.rs_test.insert({
- ts: new Timestamp(),
- h: 2,
- v: CURRENT_OPLOG_VERSION,
- op: OPLOG_INSERT_CODE,
- o: {
- _id: 1
- },
- ns: 'foo.baz'
- });
-
- var args = ['oplog', '--oplogns', 'foo.rs_test',
- '--from', '127.0.0.1:' + toolTest.port].concat(commonToolArgs);
-
- if (toolTest.isSharded) {
- // When applying ops to a sharded cluster,
- assert(toolTest.runTool.apply(toolTest, args) !== 0,
- 'mongooplog should fail when running applyOps on a sharded cluster');
-
- var expectedError =
- 'error applying ops: applyOps not allowed through mongos';
- assert.strContains.soon(expectedError, rawMongoProgramOutput,
- 'mongooplog crash should output the correct error message');
-
- assert.eq(0, db.baz.count({}),
- 'mongooplog should not have applied any operations');
- } else {
- // Running with default --seconds should apply all operations
- assert.eq(toolTest.runTool.apply(toolTest, args), 0,
- 'mongooplog should succeed');
-
- assert.eq(1, db.baz.count({_id: 1}), 'should have restored the document');
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/oplog/informational_flags_test.js b/src/mongo/gotools/test/qa-tests/jstests/oplog/informational_flags_test.js
deleted file mode 100644
index a6536ecba4a..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/oplog/informational_flags_test.js
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Tests that the informational flags --version and --help give reasonable
- * output.
- */
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var toolTest = getToolTest('oplogInformationalFlagTest');
- var commonToolArgs = getCommonToolArguments();
-
- var verifyFlagOutput = function(flag, expected) {
- var args = ['oplog'].concat(commonToolArgs).concat(flag);
- assert.eq(toolTest.runTool.apply(toolTest, args), 0,
- 'mongooplog should succeed with ' + flag);
-
- assert.strContains.soon(expected, rawMongoProgramOutput,
- 'mongooplog ' + flag + " should produce output that contains '" +
- expected + "'");
- };
-
- verifyFlagOutput('--help', 'Usage:');
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/oplog/oplog_server_ko_test.js b/src/mongo/gotools/test/qa-tests/jstests/oplog/oplog_server_ko_test.js
deleted file mode 100644
index 007897db30e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/oplog/oplog_server_ko_test.js
+++ /dev/null
@@ -1,52 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var toolTest = getToolTest('OplogServerKOTest');
- var commonToolArgs = getCommonToolArguments();
-
- // Overwrite global db object for startParallelShell()
- db = toolTest.db.getSiblingDB('foo'); // eslint-disable-line no-native-reassign
- db.dropDatabase();
-
- var port = allocatePort();
- startMongod('--auth', '--port', port,
- '--dbpath', MongoRunner.dataPath + 'oplogServerKOTest2');
-
- var start = Date.now();
-
- // Insert into a fake oplog as fast as possible for 20 seconds
- while (Date.now() - start < 20000) {
- db.test.insert({breakfast: 'bacon'}, {w: 0});
- }
-
- // Run parallel shell that waits for mongooplog to start and kills the
- // server
- if (!toolTest.isReplicaSet || !toolTest.authCommand) {
- // shutdownServer() is flakey on replica sets because of localhost
- // exception, so do a stepdown instead
- print('Nothing to do: can only run server KO test with replica set + auth');
- return;
- }
- // Start a parallel shell to kill the server
- startParallelShell(
- 'sleep(1000); ' +
- (toolTest.authCommand || '') +
- 'print(\'Killing server!\');' +
- 'db.getSiblingDB(\'admin\').shutdownServer({ force: true });');
-
- var args = ['oplog',
- '--from', '127.0.0.1:' + toolTest.port].concat(commonToolArgs);
-
- assert(toolTest.runTool.apply(toolTest, args) !== 0,
- 'mongooplog should crash gracefully when remote server dies');
-
- var expected = 'error communicating with server';
- assert.strContains.soon(expected, rawMongoProgramOutput,
- 'Should output sensible error message when host server dies');
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/oplog/real_oplog_tests.js b/src/mongo/gotools/test/qa-tests/jstests/oplog/real_oplog_tests.js
deleted file mode 100644
index bf16b6da742..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/oplog/real_oplog_tests.js
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Tests correct behavior when operating against a live oplog
- */
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var toolTest = getToolTest('oplogRealOplogTest');
- var commonToolArgs = getCommonToolArguments();
-
- // Get the db that we'll insert operations into
- var db = toolTest.db.getSiblingDB('gnr');
- db.dropDatabase();
-
- // Sleep for a long time so we can safely use --seconds to get the
- // right operations to verify that the `dropDatabase` and subsequent
- // inserts and updates get applied
- db.test.insert({x: 1});
-
- var LONG_SLEEP_TIME = 5000;
- sleep(LONG_SLEEP_TIME);
-
- db.dropDatabase();
-
- // Do 6 inserts and 6 updates
- var tracks = ['Welcome to the Jungle', 'Sweet Child O\' Mine', 'Patience',
- 'Paradise City', 'Knockin\' on Heaven\'s Door', 'Civil War'];
-
- tracks.forEach(function(track) {
- db.greatest_hits.insert({
- _id: track
- });
- });
-
- tracks.forEach(function(track, index) {
- db.greatest_hits.update({_id: track}, {$set: {index: index}});
- });
-
- var args = ['oplog', '--seconds', '1',
- '--from', '127.0.0.1:' + toolTest.port].concat(commonToolArgs);
-
- if (toolTest.isSharded) {
- // When applying ops to a sharded cluster,
- assert(toolTest.runTool.apply(toolTest, args) !== 0,
- 'mongooplog should fail when running applyOps on a sharded cluster');
-
- var expectedError =
- 'error applying ops: applyOps not allowed through mongos';
- assert.strContains.soon(expectedError, rawMongoProgramOutput,
- 'mongooplog crash should output the correct error message');
-
- assert.eq(0, db.greatest_hits.count({}),
- 'mongooplog should not have applied any operations');
- } else {
- // Running should apply the drop followed by 6 updates and 6 inserts,
- // but not the { x: 1 } insert.
- assert.eq(toolTest.runTool.apply(toolTest, args), 0,
- 'mongooplog should succeed');
-
- assert.eq(6, db.greatest_hits.count({}),
- 'mongooplog should apply all operations');
- assert.eq(0, db.test.count(), 'mongooplog should not have restored an ' +
- 'insert that happened before the --seconds cutoff');
- tracks.forEach(function(track, index) {
- assert.eq(1, db.greatest_hits.count({_id: track, index: index}),
- 'mongooplog should have inserted a doc with _id="' + track + '" and ' +
- 'updated it to have index=' + index);
- });
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/oplog/unreachable_host_tests.js b/src/mongo/gotools/test/qa-tests/jstests/oplog/unreachable_host_tests.js
deleted file mode 100644
index 47a0a48c489..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/oplog/unreachable_host_tests.js
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Tests behavior when the host provided in --host or in --from is unreachable
- */
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- // unused: var CURRENT_MONGOD_RELEASE = '3.0';
-
- var toolTest = getToolTest('oplogUnreachableHostsTest');
- var commonToolArgs = getCommonToolArguments();
-
- var fromUnreachableError = 'error connecting to source db';
- var args = ['oplog'].concat(commonToolArgs).concat('--from',
- 'doesnte.xist:27999');
- assert(toolTest.runTool.apply(toolTest, args) !== 0,
- 'mongooplog should fail when --from is not reachable');
-
- assert.strContains.soon(fromUnreachableError, rawMongoProgramOutput,
- 'mongooplog should output correct error when "from" is not reachable');
-
- // Clear output
- clearRawMongoProgramOutput();
-
- /** Overwrite so toolTest.runTool doesn't append --host */
- toolTest.runTool = function() {
- arguments[0] = 'mongo' + arguments[0];
- return runMongoProgram.apply(null, arguments);
- };
-
- args = ['oplog'].concat(commonToolArgs).concat('--host', 'doesnte.xist',
- '--from', '127.0.0.1:' + toolTest.port);
- assert(toolTest.runTool.apply(toolTest, args) !== 0,
- 'mongooplog should fail when --host is not reachable');
-
- output = rawMongoProgramOutput();
- var hostUnreachableError = 'error connecting to destination db';
-
- assert(output.indexOf(hostUnreachableError) !== -1,
- 'mongooplog should output correct error when "host" is not reachable');
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/15k_collections.js b/src/mongo/gotools/test/qa-tests/jstests/restore/15k_collections.js
deleted file mode 100644
index 7bdbaceab60..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/15k_collections.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// this tests that we can restore a large number of collections, resolving
-// an issue raised by TOOLS-1088
-// @tags: [requires_many_files, requires_large_ram]
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = getToolTest('15k_collections');
- var commonToolArgs = getCommonToolArguments();
-
- var dbOne = toolTest.db.getSiblingDB('dbOne');
-
- for (var i=0; i<=15000; i++) {
- collName = "Coll" + i;
- dbOne.createCollection(collName);
- }
-
- // dump it
- var dumpTarget = '15k_collections_dump';
- resetDbpath(dumpTarget);
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database so it's empty
- dbOne.dropDatabase();
-
- // restore it
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret, "restore to empty DB should have returned successfully");
-
- // success
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/24_to_28.js b/src/mongo/gotools/test/qa-tests/jstests/restore/24_to_28.js
deleted file mode 100644
index d350429a3fd..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/24_to_28.js
+++ /dev/null
@@ -1,72 +0,0 @@
-// This test requires mongo 2.4.x, and mongo 3.0.0 releases
-// @tags: [requires_mongo_24, requires_mongo_30]
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
-
- // skip tests requiring wiredTiger storage engine on pre 3.0 mongod
- if (TestData && TestData.storageEngine === 'wiredTiger') {
- return;
- }
-
- // Skip this test if running with SSL turned on, because the common tool args are not
- // compatible with 2.4 servers.
- if (TestData && TestData.useSSL) {
- return;
- }
- // Tests using mongorestore to restore a dump from a 2.4 mongod to a 3.0 mongod.
-
- jsTest.log('Testing running mongorestore restoring data from a 2.4 mongod to'+
- ' a 3.0 mongod');
-
- var toolTest = new ToolTest('24_to_28', {binVersion: '2.4'});
- toolTest.startDB('foo');
-
- // where we'll put the dump
- var dumpTarget = '24_to_28_dump';
- resetDbpath(dumpTarget);
-
- // the db and collection we'll be using
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.coll;
-
- // insert some documents
- var data = [];
- for (var i = 0; i < 50; i++) {
- data.push({_id: i});
- }
- testColl.insertMany(data);
- // sanity check the insert worked
- assert.eq(50, testColl.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump'].concat(getDumpTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // drop the database
- testDB.dropDatabase();
-
- // restart the mongod as a 3.0
- stopMongod(toolTest.port);
- toolTest.m = null;
- toolTest.db = null;
- delete toolTest.options.binVersion;
- toolTest.startDB('foo');
-
- // refresh the db and coll reference
- testDB = toolTest.db.getSiblingDB('test');
- testColl = testDB.coll;
-
- // restore the data
- ret = toolTest.runTool.apply(toolTest, ['restore'].concat(getRestoreTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // make sure the data was restored
- assert.eq(50, testColl.count());
- for (i = 0; i < 50; i++) {
- assert.eq(1, testColl.count({_id: i}));
- }
-
- // success
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/26_to_28.js b/src/mongo/gotools/test/qa-tests/jstests/restore/26_to_28.js
deleted file mode 100644
index d38808887ae..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/26_to_28.js
+++ /dev/null
@@ -1,67 +0,0 @@
-// This test requires mongo 2.6.x, and mongo 3.0.0 releases
-// @tags: [requires_mongo_26, requires_mongo_30]
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
-
- // skip tests requiring wiredTiger storage engine on pre 3.0 mongod
- if (TestData && TestData.storageEngine === 'wiredTiger') {
- return;
- }
-
- // Tests using mongorestore to restore a dump from a 2.6 mongod to a 3.0 mongod.
- jsTest.log('Testing running mongorestore restoring data from a 2.6 mongod to'+
- ' a 3.0 mongod');
-
- var toolTest = new ToolTest('26_to_28', {binVersion: '2.6'});
- toolTest.startDB('foo');
-
- // where we'll put the dump
- var dumpTarget = '26_to_28_dump';
- resetDbpath(dumpTarget);
-
- // the db and collection we'll be using
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.coll;
-
- // insert some documents
- var data = [];
- for (var i = 0; i < 50; i++) {
- data.push({_id: i});
- }
- testColl.insertMany(data);
- // sanity check the insert worked
- assert.eq(50, testColl.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump'].concat(getDumpTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // drop the database
- testDB.dropDatabase();
-
- // restart the mongod as a 3.0
- stopMongod(toolTest.port);
- toolTest.m = null;
- toolTest.db = null;
- delete toolTest.options.binVersion;
- toolTest.startDB('foo');
-
- // refresh the db and coll reference
- testDB = toolTest.db.getSiblingDB('test');
- testColl = testDB.coll;
-
- // restore the data
- ret = toolTest.runTool.apply(toolTest, ['restore'].concat(getRestoreTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // make sure the data was restored
- assert.eq(50, testColl.count());
- for (i = 0; i < 50; i++) {
- assert.eq(1, testColl.count({_id: i}));
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/28_to_26.js b/src/mongo/gotools/test/qa-tests/jstests/restore/28_to_26.js
deleted file mode 100644
index decc904465f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/28_to_26.js
+++ /dev/null
@@ -1,68 +0,0 @@
-// This test requires mongo 2.6.x, and mongo 3.0.0 releases
-// @tags: [requires_mongo_26, requires_mongo_30]
-(function() {
- load("jstests/configs/standard_dump_targets.config.js");
-
- // skip tests requiring wiredTiger storage engine on pre 2.8 mongod
- if (TestData && TestData.storageEngine === 'wiredTiger') {
- return;
- }
-
- // Tests using mongorestore to restore a dump from a 2.8 mongod to a 2.6 mongod.
-
- jsTest.log('Testing running mongorestore restoring data from a 2.8 mongod to'+
- ' a 2.6 mongod');
-
- var toolTest = new ToolTest('28_to_26');
- toolTest.startDB('foo');
-
- // where we'll put the dump
- var dumpTarget = '28_to_26_dump';
- resetDbpath(dumpTarget);
-
- // the db and collection we'll be using
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.coll;
-
- // insert some documents
- var data = [];
- for (var i = 0; i < 50; i++) {
- data.push({_id: i});
- }
- testColl.insertMany(data);
- // sanity check the insert worked
- assert.eq(50, testColl.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump'].concat(getDumpTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // drop the database
- testDB.dropDatabase();
-
- // restart the mongod as a 2.6
- stopMongod(toolTest.port);
- toolTest.m = null;
- toolTest.db = null;
- toolTest.options = toolTest.options || {};
- toolTest.options.binVersion = '2.6';
- resetDbpath(toolTest.dbpath);
- toolTest.startDB('foo');
-
- // refresh the db and coll reference
- testDB = toolTest.db.getSiblingDB('test');
- testColl = testDB.coll;
-
- // restore the data
- ret = toolTest.runTool.apply(toolTest, ['restore'].concat(getRestoreTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // make sure the data was restored
- assert.eq(50, testColl.count());
- for (i = 0; i < 50; i++) {
- assert.eq(1, testColl.count({_id: i}));
- }
-
- // success
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/archive_stdout.js b/src/mongo/gotools/test/qa-tests/jstests/restore/archive_stdout.js
deleted file mode 100644
index 825d95916f0..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/archive_stdout.js
+++ /dev/null
@@ -1,54 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- var toolTest = getToolTest('archive_stdout');
- var baseArgs = getCommonToolArguments();
- baseArgs = baseArgs.concat('--port', toolTest.port);
-
- if (toolTest.useSSL) {
- baseArgs = baseArgs.concat([
- '--ssl',
- '--sslPEMKeyFile', 'jstests/libs/server.pem',
- '--sslCAFile', 'jstests/libs/ca.pem',
- '--sslAllowInvalidHostnames']);
- }
- if (dump_targets === 'gzip') {
- baseArgs = baseArgs.concat('--gzip');
- }
- var dumpArgs = ['mongodump', '--archive'].concat(baseArgs);
- var restoreArgs = ['mongorestore', '--archive', '--drop'].concat(baseArgs);
-
- dumpArgs[0] = 'PATH=.:$PATH ' + dumpArgs[0];
- restoreArgs[0] = 'PATH=.:$PATH ' + restoreArgs[0];
- if (_isWindows()) {
- dumpArgs[0] += '.exe';
- restoreArgs[0] += '.exe';
- }
-
- var testDb = toolTest.db;
- testDb.dropDatabase();
- var fooData = [];
- var barData = [];
- for (var i = 0; i < 500; i++) {
- fooData.push({i: i});
- barData.push({i: i*5});
- }
- testDb.foo.insertMany(fooData);
- testDb.bar.insertMany(barData);
- assert.eq(500, testDb.foo.count(), 'foo should have our test documents');
- assert.eq(500, testDb.bar.count(), 'bar should have our test documents');
-
- var ret = runProgram('bash', '-c', dumpArgs.concat('|', restoreArgs).join(' '));
- assert.eq(0, ret, "bash execution should succeed");
-
- for (i = 0; i < 500; i++) {
- assert.eq(1, testDb.foo.find({i: i}).count(), 'document #'+i+' not in foo');
- assert.eq(1, testDb.bar.find({i: i*5}).count(), 'document #'+i+' not in bar');
- }
- assert.eq(500, testDb.foo.count(), 'foo should have our test documents');
- assert.eq(500, testDb.bar.count(), 'bar should have our test documents');
-
- testDb.dropDatabase();
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/bad_options.js b/src/mongo/gotools/test/qa-tests/jstests/restore/bad_options.js
deleted file mode 100644
index 1639dfa645f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/bad_options.js
+++ /dev/null
@@ -1,54 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
-
- // Tests running mongorestore with bad command line options.
-
- jsTest.log('Testing running mongorestore with bad'+
- ' command line options');
-
- var toolTest = new ToolTest('incompatible_flags');
- toolTest.startDB('foo');
-
- // run restore with both --objcheck and --noobjcheck specified
- var ret = toolTest.runTool.apply(toolTest, ['restore',
- '--objcheck', '--noobjcheck']
- .concat(getRestoreTarget('restore/testdata/dump_empty')));
- assert.neq(0, ret);
-
- // run restore with --oplogLimit with a bad timestamp
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--oplogReplay', '--oplogLimit',
- 'xxx']
- .concat(getRestoreTarget('restore/testdata/dump_with_oplog')));
- assert.neq(0, ret);
-
- // run restore with a negative --w value
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--w', '-1']
- .concat(getRestoreTarget('jstests/restore/testdata/dump_empty')));
- assert.neq(0, ret);
-
- // run restore with an invalid db name
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'billy.crystal']
- .concat(getRestoreTarget('jstests/restore/testdata/blankdb')));
- assert.neq(0, ret);
-
- // run restore with an invalid collection name
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'test',
- '--collection', '$money']
- .concat(getRestoreTarget('jstests/restore/testdata/blankcoll/blank.bson')));
- assert.neq(0, ret);
-
- // run restore with an invalid verbosity value
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '-v', 'torvalds']
- .concat(getRestoreTarget('restore/testdata/dump_empty')));
- assert.neq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/blank_collection_bson.js b/src/mongo/gotools/test/qa-tests/jstests/restore/blank_collection_bson.js
deleted file mode 100644
index e3d2f62f037..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/blank_collection_bson.js
+++ /dev/null
@@ -1,43 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- if (dump_targets !== "standard") {
- print('skipping test incompatable with archiving or compression');
- return assert(true);
- }
-
- // Tests using mongorestore to restore data from a blank collection
- // file, with both a missing and blank metadata file.
-
- jsTest.log('Testing restoration from a blank collection file');
-
- var toolTest = getToolTest('blank_collection_bson');
- var commonToolArgs = getCommonToolArguments();
-
- // run the restore with the blank collection file and no
- // metadata file. it should succeed, but insert nothing.
- var ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'test',
- '--collection', 'blank']
- .concat(getRestoreTarget('jstests/restore/testdata/blankcoll/blank.bson'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
- assert.eq(0, toolTest.db.getSiblingDB('test').blank.count());
-
- // run the restore with the blank collection file and a blank
- // metadata file. it should succeed, but insert nothing.
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'test',
- '--collection', 'blank']
- .concat(getRestoreTarget('jstests/restore/testdata/blankcoll/blank_metadata.bson'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
- assert.eq(0, toolTest.db.getSiblingDB('test').blank.count());
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/blank_db.js b/src/mongo/gotools/test/qa-tests/jstests/restore/blank_db.js
deleted file mode 100644
index 1d3c85e3e0b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/blank_db.js
+++ /dev/null
@@ -1,29 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- if (dump_targets === "archive") {
- print('skipping test incompatable with archiving');
- return assert(true);
- }
-
- // Tests using mongorestore to restore data from a blank db directory.
-
- jsTest.log('Testing restoration from a blank db directory');
-
- var toolTest = getToolTest('blank_db');
- var commonToolArgs = getCommonToolArguments();
-
- // run the restore with the blank db directory. it should succeed, but
- // insert nothing.
- var ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'test']
- .concat(getRestoreTarget('jstests/restore/testdata/blankdb'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/conflicting_auth_schema_version.js b/src/mongo/gotools/test/qa-tests/jstests/restore/conflicting_auth_schema_version.js
deleted file mode 100644
index 88c33a69002..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/conflicting_auth_schema_version.js
+++ /dev/null
@@ -1,138 +0,0 @@
-// This test requires mongo 2.6.x releases
-// @tags: [requires_mongo_26]
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
-
- // Tests using mongorestore to restore a dump containing users. If there is
- // conflicting authSchemaVersion in the admin.system.version document, it
- // should be ignored, and the restore should complete successfully.
-
- jsTest.log('Testing restoring a dump with a potentially conflicting'+
- ' authSchemaVersion in the database');
-
- if (dump_targets !== "standard") {
- print('skipping test incompatable with archiving or compression');
- return assert(true);
- }
-
- var runTest = function(sourceDBVersion, dumpVersion, restoreVersion, destDBVersion, shouldSucceed) {
-
- jsTest.log('Running with sourceDBVersion=' + (sourceDBVersion || 'latest') +
- ', dumpVersion=' + (dumpVersion || 'latest') + ', restoreVersion=' +
- (restoreVersion || 'latest') + ', and destDBVersion=' +
- (destDBVersion || 'latest') + ', expected to pass=' + shouldSucceed);
-
- var toolTest = new ToolTest('conflicting_auth_schema_version',
- {binVersion: sourceDBVersion, auth: ''});
- toolTest.startDB('foo');
-
- // where we'll put the dump
- var dumpTarget = 'conflicting_auth_schema_version_dump';
- resetDbpath(dumpTarget);
-
- // the admin db, and the non-admin db we'll be using
- var adminDB = toolTest.db.getSiblingDB('admin');
- var testDB = toolTest.db.getSiblingDB('test');
-
- // create a user admin
- adminDB.createUser({
- user: 'admin',
- pwd: 'password',
- roles: [
- {role: 'userAdminAnyDatabase', db: 'admin'},
- {role: 'readWriteAnyDatabase', db: 'admin'},
- {role: 'backup', db: 'admin'},
- ],
- });
- var authInfo = {user: 'admin', pwd: 'password'};
- if (sourceDBVersion === "2.6") {
- authInfo.mechanism = "MONGODB-CR";
- }
- assert.eq(1, adminDB.auth(authInfo));
-
- // add some data
- var data = [];
- for (var i = 0; i < 10; i++) {
- data.push({_id: i});
- }
- testDB.data.insertMany(data);
-
- // sanity check the data was inserted
- assert.eq(10, testDB.data.count());
-
- // dump all the data
- args = ['mongodump' + (dumpVersion ? ('-'+dumpVersion) : ''),
- '--username', 'admin',
- '--password', 'password', '--port', toolTest.port]
- .concat(getDumpTarget(dumpTarget));
- if (sourceDBVersion === "2.6") {
- args.push("--authenticationMechanism=MONGODB-CR");
- }
- var ret = runMongoProgram.apply(this, args);
- assert.eq(0, ret);
-
- // restart the mongod, with a clean db path
- stopMongod(toolTest.port);
- resetDbpath(toolTest.dbpath);
- toolTest.m = null;
- toolTest.db = null;
- toolTest.options.binVersion = destDBVersion;
- toolTest.startDB('foo');
-
- // refresh the db references
- adminDB = toolTest.db.getSiblingDB('admin');
- testDB = toolTest.db.getSiblingDB('test');
-
- // create a new user admin
- adminDB.createUser({
- user: 'admin28',
- pwd: 'password',
- roles: [
- {role: 'userAdminAnyDatabase', db: 'admin'},
- {role: 'readWriteAnyDatabase', db: 'admin'},
- {role: 'restore', db: 'admin'},
- ],
- });
-
- var authInfoDest = {user: 'admin28', pwd: 'password'};
- if (destDBVersion === "2.6") {
- authInfoDest.mechanism = "MONGODB-CR";
- }
- assert.eq(1, adminDB.auth(authInfoDest));
-
- // do a full restore
- args = ['mongorestore' + (restoreVersion ? ('-'+restoreVersion) : ''),
- '--username', 'admin28',
- '--password', 'password',
- '--port', toolTest.port,
- '--stopOnError']
- .concat(getRestoreTarget(dumpTarget));
-
- ret = runMongoProgram.apply(this, args);
-
- if (shouldSucceed) {
- assert.eq(0, ret);
- // make sure the data and users are all there
- assert.eq(10, testDB.data.count());
- for (i = 0; i < 10; i++) {
- assert.eq(1, testDB.data.count({_id: i}));
- }
- var users = adminDB.getUsers();
- assert.eq(2, users.length);
- assert(users[0].user === 'admin' || users[1].user === 'admin');
- assert(users[0].user === 'admin28' || users[1].user === 'admin28');
- } else {
- assert.neq(0, ret);
- }
- // success
- toolTest.stop();
- };
-
- // 'undefined' triggers latest
- runTest('2.6', '2.6', undefined, '2.6', true);
- runTest('2.6', '2.6', undefined, undefined, true);
- runTest('2.6', undefined, undefined, undefined, true);
- runTest(undefined, undefined, undefined, '2.6', false);
- runTest(undefined, undefined, undefined, undefined, true);
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/different_collection.js b/src/mongo/gotools/test/qa-tests/jstests/restore/different_collection.js
deleted file mode 100644
index 77c073d1f7f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/different_collection.js
+++ /dev/null
@@ -1,91 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests using mongorestore to restore data to a different collection
- // then it was dumped from.
-
- jsTest.log('Testing restoration to a different collection');
-
- if (dump_targets === 'archive') {
- jsTest.log('Skipping test unsupported against archive targets');
- return assert(true);
- }
-
- var toolTest = getToolTest('different_collection');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'different_collection_dump';
- resetDbpath(dumpTarget);
-
- // the db we will dump from
- var sourceDB = toolTest.db.getSiblingDB('source');
- // the collection we will dump from
- var sourceCollName = 'sourceColl';
-
- // insert a bunch of data
- var data = [];
- for (var i = 0; i < 500; i++) {
- data.push({_id: i});
- }
- sourceDB[sourceCollName].insertMany(data);
- // sanity check the insertion worked
- assert.eq(500, sourceDB[sourceCollName].count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump'].concat(getDumpTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // restore just the collection into a different collection
- // in the same database
- var destCollName = 'destColl';
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'source',
- '--collection', destCollName]
- .concat(getRestoreTarget(dumpTarget+'/source/sourceColl.bson'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored correctly
- assert.eq(500, sourceDB[destCollName].count());
- for (i = 0; i < 500; i++) {
- assert.eq(1, sourceDB[destCollName].count({_id: i}));
- }
-
- // restore just the collection into a similarly-named collection
- // in a different database
- var destDB = toolTest.db.getSiblingDB('dest');
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'dest',
- '--collection', sourceCollName]
- .concat(getRestoreTarget(dumpTarget+'/source/sourceColl.bson'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored correctly
- assert.eq(500, destDB[sourceCollName].count());
- for (i = 0; i < 500; i++) {
- assert.eq(1, destDB[sourceCollName].count({_id: i}));
- }
-
- // restore just the collection into a different collection
- // in a different database
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'dest',
- '--collection', destCollName]
- .concat(getRestoreTarget(dumpTarget+'/source/sourceColl.bson'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored correctly
- assert.eq(500, destDB[destCollName].count());
- for (i = 0; i < 500; i++) {
- assert.eq(1, destDB[destCollName].count({_id: i}));
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/different_db.js b/src/mongo/gotools/test/qa-tests/jstests/restore/different_db.js
deleted file mode 100644
index b92d9cda9c3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/different_db.js
+++ /dev/null
@@ -1,86 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests using mongorestore to restore data to a different db than
- // it was dumped from.
-
- jsTest.log('Testing restoration to a different db');
-
- if (dump_targets === 'archive') {
- jsTest.log('Skipping test unsupported against archive targets');
- return assert(true);
- }
-
- var toolTest = getToolTest('different_db');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'different_db_dump';
- resetDbpath(dumpTarget);
-
- // the db we will dump from
- var sourceDB = toolTest.db.getSiblingDB('source');
- // the db we will restore to
- var destDB = toolTest.db.getSiblingDB('dest');
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // we'll use two collections
- var collNames = ['coll1', 'coll2'];
-
- // insert a bunch of data
- collNames.forEach(function(collName) {
- var data = [];
- for (var i = 0; i < 500; i++) {
- data.push({_id: i+'_'+collName});
- }
- sourceDB[collName].insertMany(data);
- // sanity check the insertion worked
- assert.eq(500, sourceDB[collName].count());
- });
-
- // dump the data
- ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // restore the data to a different db
- ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'dest']
- .concat(getRestoreTarget(dumpTarget+'/source'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored
- collNames.forEach(function(collName) {
- assert.eq(500, destDB[collName].count());
- for (var i = 0; i < 500; i++) {
- assert.eq(1, destDB[collName].count({_id: i+'_'+collName}));
- }
- });
-
- // restore the data to another different db
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--nsFrom', '$db$.$collection$',
- '--nsTo', 'otherdest.$db$_$collection$']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
- destDB = toolTest.db.getSiblingDB('otherdest');
- collNames.forEach(function(collName) {
- assert.eq(500, destDB['source_'+collName].count());
- for (var i = 0; i < 500; i++) {
- assert.eq(1, destDB['source_'+collName].count({_id: i+'_'+collName}));
- }
- });
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/drop_authenticated_user.js b/src/mongo/gotools/test/qa-tests/jstests/restore/drop_authenticated_user.js
deleted file mode 100644
index c832c4785d9..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/drop_authenticated_user.js
+++ /dev/null
@@ -1,111 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
-
- // Tests running mongorestore with --drop and --restoreDbUsersAndRoles,
- // in addition to --auth, and makes sure the authenticated user does not
- // get dropped before it can complete the restore job.
-
- jsTest.log('Testing dropping the authenticated user with mongorestore');
-
- var toolTest = new ToolTest('drop_authenticated_user', {auth: ''});
- toolTest.startDB('foo');
-
- // where we'll put the dump
- var dumpTarget = 'drop_authenticated_user_dump';
- resetDbpath(dumpTarget);
-
- // we'll use the admin db so that the user we are restoring as
- // is part of the db we are restoring
- var adminDB = toolTest.db.getSiblingDB('admin');
-
- // create the users we'll need for the dump
- adminDB.createUser({
- user: 'admin',
- pwd: 'password',
- roles: [
- {role: 'userAdmin', db: 'admin'},
- {role: 'readWrite', db: 'admin'},
- ],
- });
- adminDB.auth('admin', 'password');
-
- adminDB.createUser({
- user: 'backup',
- pwd: 'password',
- roles: [{role: 'backup', db: 'admin'}],
- });
-
- // create a role
- adminDB.createRole({
- role: 'extraRole',
- privileges: [{
- resource: {db: 'admin', collection: ''},
- actions: ['find'],
- }],
- roles: [],
- });
-
- // insert some data
- var data = [];
- for (var i = 0; i < 10; i++) {
- data.push({_id: i});
- }
- adminDB.data.insertMany(data);
- // sanity check the insertion worked
- assert.eq(10, adminDB.data.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump',
- '--username', 'backup',
- '--password', 'password']
- .concat(getDumpTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // drop all the data, but not the users or roles
- adminDB.data.remove({});
- // sanity check the removal worked
- assert.eq(0, adminDB.data.count());
-
- // now create the restore user, so that we can use it for the restore but it is
- // not part of the dump
- adminDB.createUser({
- user: 'restore',
- pwd: 'password',
- roles: [{role: 'restore', db: 'admin'}],
- });
-
- // insert some data to be removed when --drop is run
- data = [];
- for (i = 10; i < 20; i++) {
- data.push({_id: i});
- }
- adminDB.data.insertMany(data);
- // sanity check the insertion worked
- assert.eq(10, adminDB.data.count());
-
- // restore the data, specifying --drop
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--drop',
- '--username', 'restore',
- '--password', 'password']
- .concat(getRestoreTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // make sure the existing data was removed, and replaced with the dumped data
- assert.eq(10, adminDB.data.count());
- for (i = 0; i < 10; i++) {
- assert.eq(1, adminDB.data.count({_id: i}));
- }
-
- // make sure the correct roles and users exist - that the restore user was dropped
- var users = adminDB.getUsers();
- assert.eq(2, users.length);
- assert(users[0].user === 'backup' || users[1].user === 'backup');
- assert(users[0].user === 'admin' || users[1].user === 'admin');
- assert.eq(1, adminDB.getRoles().length);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/drop_nonexistent_db.js b/src/mongo/gotools/test/qa-tests/jstests/restore/drop_nonexistent_db.js
deleted file mode 100644
index 36cb99338f1..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/drop_nonexistent_db.js
+++ /dev/null
@@ -1,58 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests that running mongorestore with --drop on a database with
- // nothing to drop does not error out, and completes the
- // restore successfully.
-
- jsTest.log('Testing restoration with --drop on a nonexistent db');
-
- var toolTest = getToolTest('drop_nonexistent_db');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'drop_nonexistent_db_dump';
- resetDbpath(dumpTarget);
-
- // the db we will use
- var testDB = toolTest.db.getSiblingDB('test');
-
- // insert a bunch of data
- var data = [];
- for (var i = 0; i < 500; i++) {
- data.push({_id: i});
- }
- testDB.coll.insertMany(data);
- // sanity check the insertion worked
- assert.eq(500, testDB.coll.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database we are using
- testDB.dropDatabase();
- // sanity check the drop worked
- assert.eq(0, testDB.coll.count());
-
- // restore the data with --drop
- ret = toolTest.runTool.apply(toolTest, ['restore', '--drop']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored
- assert.eq(500, testDB.coll.count());
- for (i = 0; i < 500; i++) {
- assert.eq(1, testDB.coll.count({_id: i}));
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/drop_one_collection.js b/src/mongo/gotools/test/qa-tests/jstests/restore/drop_one_collection.js
deleted file mode 100644
index 53a35ff4cfa..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/drop_one_collection.js
+++ /dev/null
@@ -1,92 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests that running mongorestore with --drop and --collection leaves data
- // in other collections untouched (that --drop only applies to the
- // specified collection).
-
- jsTest.log('Testing restoration with --drop and --collection, with data in'+
- ' other collections');
-
- var toolTest = getToolTest('drop_one_collection');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'drop_one_collection_dump';
- resetDbpath(dumpTarget);
-
- // the db we will take the dump from
- var sourceDB = toolTest.db.getSiblingDB('source');
-
- // dump from two different collections, even though we'll
- // only be restoring one.
- var collNames = ['coll1', 'coll2'];
- collNames.forEach(function(collName) {
- var data = [];
- for (var i = 0; i < 500; i++) {
- data.push({_id: i+'_'+collName});
- }
- sourceDB[collName].insertMany(data);
- // sanity check the insertion worked
- assert.eq(500, sourceDB[collName].count());
- });
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop and replace the data
- collNames.forEach(function(collName) {
- sourceDB[collName].drop();
- // sanity check the drop worked
- assert.eq(0, sourceDB[collName].count());
-
- // insert a disjoint set of data from the dump
- var data = [];
- for (var i = 500; i < 600; i++) {
- data.push({_id: i+'_'+collName});
- }
- sourceDB[collName].insertMany(data);
- // sanity check the insertion worked
- assert.eq(100, sourceDB[collName].count());
- });
-
- // insert data into the same collections in a different db
- var otherDB = toolTest.db.getSiblingDB('other');
- collNames.forEach(function(collName) {
- var data = [];
- for (var i = 500; i < 600; i++) {
- data.push({_id: i+'_'+collName});
- }
- otherDB[collName].insertMany(data);
- // sanity check the insertion worked
- assert.eq(100, otherDB[collName].count());
- });
-
- // restore with --drop and --collection
- ret = toolTest.runTool.apply(toolTest, ['restore', '--drop',
- '--db', 'source',
- '--collection', 'coll1']
- .concat(getRestoreTarget(dumpTarget+'/source/coll1.bson'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure that the dumped data replaced the old data in only
- // the specified collection, and all other data was left untouched
- assert.eq(500, sourceDB.coll1.count());
- for (var i = 0; i < 500; i++) {
- assert.eq(1, sourceDB.coll1.count({_id: i+'_coll1'}));
- }
- assert.eq(100, sourceDB.coll2.count());
- assert.eq(100, otherDB.coll1.count());
- assert.eq(100, otherDB.coll2.count());
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/drop_with_data.js b/src/mongo/gotools/test/qa-tests/jstests/restore/drop_with_data.js
deleted file mode 100644
index 56fc1a93473..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/drop_with_data.js
+++ /dev/null
@@ -1,77 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests that running mongorestore with --drop drops existing data
- // before restoring.
-
- jsTest.log('Testing restoration with --drop on existing data');
-
- var toolTest = getToolTest('drop_with_data');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'drop_with_data_dump';
- resetDbpath(dumpTarget);
-
- // the db we will use
- var testDB = toolTest.db.getSiblingDB('test');
-
- // we'll use two collections, to make sure they both
- // get dropped appropriately
- var collNames = ['coll1', 'coll2'];
-
- // insert a bunch of data to be dumped
- collNames.forEach(function(collName) {
- var data = [];
- for (var i = 0; i < 500; i++) {
- data.push({_id: i+'_'+collName});
- }
- testDB[collName].insertMany(data);
- // sanity check the insertion worked
- assert.eq(500, testDB[collName].count());
- });
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop all the data, and replace it with different data
- collNames.forEach(function(collName) {
- testDB[collName].drop();
- // sanity check the drop worked
- assert.eq(0, testDB[collName].count());
-
- var data = [];
- for (var i = 500; i < 600; i++) {
- data.push({_id: i+'_'+collName});
- }
- testDB[collName].insertMany(data);
- // sanity check the insertion worked
- assert.eq(100, testDB[collName].count());
- });
-
- // restore with --drop. the current data in all collections should
- // be removed and replaced with the dumped data
- ret = toolTest.runTool.apply(toolTest, ['restore', '--drop']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the dumped data was restored, and the old data
- // was dropped
- collNames.forEach(function(collName) {
- assert.eq(500, testDB[collName].count());
- for (var i = 0; i < 500; i++) {
- assert.eq(1, testDB[collName].count({_id: i+'_'+collName}));
- }
- });
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/duplicate_keys.js b/src/mongo/gotools/test/qa-tests/jstests/restore/duplicate_keys.js
deleted file mode 100644
index acc261dba11..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/duplicate_keys.js
+++ /dev/null
@@ -1,75 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests using mongorestore to restore a mix of existing and
- // non-existing documents to a collection, so we can make sure
- // all new documents are actually added.
-
- jsTest.log('Testing restoration of a dump on top of existing documents');
-
- var toolTest = getToolTest('dupe_restore');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'dupe_restore_dump';
- resetDbpath(dumpTarget);
-
- // we'll insert data into three collections spread across two dbs
- var dbOne = toolTest.db.getSiblingDB('dbOne');
- var testColl = dbOne.duplicates;
-
- // insert a bunch of data
- var data = [];
- for (var i = 0; i < 50; i++) {
- data.push({_id: i});
- }
- testColl.insertMany(data);
- // sanity check the insertion worked
- assert.eq(50, testColl.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // remove a few random documents
- var removeDocs = function() {
- testColl.remove({_id: 0});
- testColl.remove({_id: 5});
- testColl.remove({_id: 6});
- testColl.remove({_id: 9});
- testColl.remove({_id: 12});
- testColl.remove({_id: 27});
- testColl.remove({_id: 40});
- testColl.remove({_id: 46});
- testColl.remove({_id: 47});
- testColl.remove({_id: 49});
- assert.eq(40, testColl.count());
- };
- removeDocs();
-
- // restore the db with default settings
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the restore worked, and all of the removed keys were restored
- assert.eq(50, testColl.count(), "some documents were not restored with default settings");
-
- // now check an array of batch sizes
- for (i = 1; i < 100; i++) {
- removeDocs();
- ret = toolTest.runTool.apply(toolTest, ['restore', "--batchSize", String(i)]
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
- assert.eq(50, testColl.count(), "some documents were not restored for batchSize="+i);
- }
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/empty_users_and_roles.js b/src/mongo/gotools/test/qa-tests/jstests/restore/empty_users_and_roles.js
deleted file mode 100644
index 24a3032aab5..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/empty_users_and_roles.js
+++ /dev/null
@@ -1,33 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- if (dump_targets === "archive") {
- print('skipping test incompatable with archiving');
- return assert(true);
- }
-
- // Tests running mongorestore with --restoreDbUsersAndRoles, with
- // no users or roles in the dump.
-
- jsTest.log('Testing running mongorestore with --restoreDbUsersAndRoles with'+
- ' no users or roles in the dump');
-
- var toolTest = getToolTest('empty_users_and_roles');
- var commonToolArgs = getCommonToolArguments();
-
- // run the restore with no users or roles. it should succeed, but create no
- // users or roles
- var ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'test',
- '--restoreDbUsersAndRoles']
- .concat(getRestoreTarget('jstests/restore/testdata/blankdb'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/extended_json_metadata.js b/src/mongo/gotools/test/qa-tests/jstests/restore/extended_json_metadata.js
deleted file mode 100644
index 59d9997262e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/extended_json_metadata.js
+++ /dev/null
@@ -1,42 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- if (dump_targets !== "standard") {
- print('skipping test incompatable with archiving or compression');
- return assert(true);
- }
-
- // Tests that using mongorestore on a collection with extended json types
- // in the metadata (both indexes and options) is handled gracefully.
-
- jsTest.log('Testing that restoration of extended JSON collection options works.');
-
- var toolTest = getToolTest('extended_json_metadata_restore');
- var commonToolArgs = getCommonToolArguments();
- var testDB = toolTest.db.getSiblingDB('test');
- assert.eq(testDB.changelog.exists(), null, "collection already exists in db");
-
- // run a restore against the mongos
- var ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget('jstests/restore/testdata/dump_extended_json_options'))
- .concat(commonToolArgs));
- assert.eq(0, ret, "the restore does not crash");
-
- var collectionOptionsFromDB = testDB.changelog.exists();
- printjson(collectionOptionsFromDB);
- assert.eq(collectionOptionsFromDB.options.capped, true, "capped option should be restored");
- // Mongodb might fudge the collection max values for different storage engines,
- // so we need some wiggle room.
- var delta = 1000;
- var size = 10 * 1000 * 1000;
- assert.lte(collectionOptionsFromDB.options.size, size+delta, "size should be ~10000000");
- assert.gte(collectionOptionsFromDB.options.size, size-delta, "size should be ~10000000");
-
- var indexes = testDB.changelog.getIndexes();
- printjson(indexes);
- assert.eq(indexes[0].key._id, 1, "index is read properly");
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/indexes.js b/src/mongo/gotools/test/qa-tests/jstests/restore/indexes.js
deleted file mode 100644
index 0f5cdc201db..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/indexes.js
+++ /dev/null
@@ -1,98 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests that mongorestore handles restoring different types of
- // indexes correctly.
-
- jsTest.log('Testing restoration of different types of indexes');
-
- var toolTest = getToolTest('indexes');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'indexes_dump';
- resetDbpath(dumpTarget);
-
- // the db and collection we will use
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.coll;
-
- // create a bunch of indexes of different types
- testColl.ensureIndex({a: 1});
- testColl.ensureIndex({b: 1}, {sparse: true, unique: true});
- testColl.ensureIndex({a: 1, b: -1});
- testColl.ensureIndex({b: NumberLong("1"), a: NumberLong("1")});
- testColl.ensureIndex({listField: 1});
- testColl.ensureIndex({textField: 'text'}, {language: 'spanish'});
- testColl.ensureIndex({geoField: '2dsphere'});
-
- // store the getIndexes() output, to compare with the output
- // after dumping and restoring
- var indexesPre = testColl.getIndexes();
-
- // insert some data
- var data = [];
- for (var i = 0; i < 5; i++) {
- data.push({a: i, b: i+1, listField: [i, i+1]});
- data.push({textField: 'hola '+i});
- data.push({geoField: {type: 'Point', coordinates: [i, i+1]}});
- }
- testColl.insertMany(data);
- // sanity check the data was inserted
- assert.eq(15, testColl.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the collection
- testColl.drop();
- // sanity check that the drop worked
- assert.eq(0, testColl.count());
- assert.eq(0, testColl.getIndexes().length);
-
- // restore the data
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored correctly
- assert.eq(15, testColl.count());
-
- // make sure the indexes were restored correctly
- var indexesPost = testColl.getIndexes();
- assert.eq(indexesPre.length, indexesPost.length);
-
- if (dump_targets === "archive") {
- jsTest.log('skipping bson file restore test while running with archiving');
- } else {
- // drop the collection again
- testColl.drop();
- // sanity check that the drop worked
- assert.eq(0, testColl.count());
-
- assert.eq(0, testColl.getIndexes().length);
-
- // restore the data, but this time mentioning the bson file specifically
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget+"/test/coll.bson"))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored correctly
- assert.eq(15, testColl.count());
-
- // make sure the indexes were restored correctly
- indexesPost = testColl.getIndexes();
- assert.eq(indexesPre.length, indexesPost.length);
- }
-
- // success
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/invalid_dump_target.js b/src/mongo/gotools/test/qa-tests/jstests/restore/invalid_dump_target.js
deleted file mode 100644
index 89ecaca7ddc..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/invalid_dump_target.js
+++ /dev/null
@@ -1,32 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
-
- // Tests running mongorestore with invalid specified dumps (directories when
- // files are expected, and visa versa).
-
- jsTest.log('Testing running mongorestore with a invalid dump targets');
-
- var toolTest = new ToolTest('invalid_dump_target');
- toolTest.startDB('foo');
-
- // run restore with a file, not a directory, specified as the dump location
- var ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget('jstests/restore/testdata/blankdb/README')));
- assert.neq(0, ret);
-
- // run restore with --db specified and a file, not a directory, as the db dump
- ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'test']
- .concat(getRestoreTarget('jstests/restore/testdata/blankdb/README')));
- assert.neq(0, ret);
-
- // run restore with --collection specified and a directory, not a file,
- // as the dump file
- ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'test', '--collection', 'blank']
- .concat(getRestoreTarget('jstests/restore/testdata/blankdb')));
- assert.neq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/invalid_metadata.js b/src/mongo/gotools/test/qa-tests/jstests/restore/invalid_metadata.js
deleted file mode 100644
index 5630d8648e6..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/invalid_metadata.js
+++ /dev/null
@@ -1,22 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
-
- // Tests using mongorestore to restore data from a collection whose .metadata.json
- // file contains invalid indexes.
-
- jsTest.log('Testing restoration from a metadata file with invalid indexes');
-
- var toolTest = new ToolTest('invalid_metadata');
- toolTest.startDB('foo');
-
- // run restore, targeting a collection whose metadata file contains an invalid index
- var ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'dbOne',
- '--collection', 'invalid_metadata']
- .concat(getRestoreTarget('jstests/restore/testdata/dump_with_invalid/dbOne/invalid_metadata.bson')));
- assert.neq(0, ret);
-
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/keep_index_version.js b/src/mongo/gotools/test/qa-tests/jstests/restore/keep_index_version.js
deleted file mode 100644
index 498d3fc7fdb..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/keep_index_version.js
+++ /dev/null
@@ -1,90 +0,0 @@
-(function() {
-
- load('jstests/common/check_version.js');
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests that running mongorestore with --keepIndexVersion does not
- // update the index version, and that running it without
- // --keepIndexVersion does.
-
- jsTest.log('Testing mongorestore with --keepIndexVersion');
-
- var toolTest = getToolTest('keep_index_version');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'keep_index_version_dump';
- resetDbpath(dumpTarget);
-
- // the db and collection we will use
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.coll;
-
- if (isAtLeastVersion(testDB.version(), '3.1.0')) {
- jsTest.log("skipping test on "+testDB.version());
- return;
- }
- if (TestData && TestData.storageEngine === 'wiredTiger') {
- jsTest.log("skipping test on "+testDB.version()+" when storage engine is wiredTiger");
- return;
- }
-
- // create a version 0 index on the collection
- testColl.ensureIndex({num: 1}, {v: 0});
-
- // insert some data
- var data = [];
- for (var i = 0; i < 10; i++) {
- data.push({num: i});
- }
- testColl.insertMany(data);
- // sanity check the insert worked
- assert.eq(10, testColl.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the db
- testDB.dropDatabase();
-
- // restore the data
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored correctly
- assert.eq(10, testColl.count());
-
- // make sure the index version was updated
- var indexes = testColl.getIndexes();
- assert.eq(2, indexes.length);
- assert.eq(1, indexes[1].v);
-
- // drop the db
- testDB.dropDatabase();
-
- // restore the data with --keepIndexVersion specified
- ret = toolTest.runTool.apply(toolTest, ['restore', '--keepIndexVersion']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored correctly
- assert.eq(10, testColl.count());
-
- // make sure the index version was not updated
- indexes = testColl.getIndexes();
- assert.eq(2, indexes.length);
- assert.eq(0, indexes[1].v);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/large_bulk.js b/src/mongo/gotools/test/qa-tests/jstests/restore/large_bulk.js
deleted file mode 100644
index 2ffb80bddbe..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/large_bulk.js
+++ /dev/null
@@ -1,54 +0,0 @@
-(function() {
-
- // this test tests that the bulk api doesn't create BSON documents greater then the
- // 16MB limit, as was discovered in TOOLS-939.
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = getToolTest('large_bulk');
- var commonToolArgs = getCommonToolArguments();
-
- var dbOne = toolTest.db.getSiblingDB('dbOne');
- // create a test collection
-
- var oneK="";
- var oneM="";
- var i;
- for (i=0; i<=1024; i++) {
- oneK+="X";
- }
- for (i=0; i<=1024; i++) {
- oneM+=oneK;
- }
-
- var data = [];
- for (i=0; i<=32; i++) {
- data.push({data: oneM});
- }
- dbOne.test.insertMany(data);
-
- // dump it
- var dumpTarget = 'large_bulk_dump';
- resetDbpath(dumpTarget);
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database so it's empty
- dbOne.dropDatabase();
-
- // restore it
- // 32 records are well under the 1k batch size
- // so this should test wether the physcial size limit is respected
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret, "restore to empty DB should have returned successfully");
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/malformed_bson.js b/src/mongo/gotools/test/qa-tests/jstests/restore/malformed_bson.js
deleted file mode 100644
index 41844f58407..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/malformed_bson.js
+++ /dev/null
@@ -1,20 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
- // Tests using mongorestore to restore data from a malformed bson file.
-
- jsTest.log('Testing restoration from a malformed bson file');
-
- var toolTest = new ToolTest('malformed_bson');
- toolTest.startDB('foo');
-
- // run restore, targeting a malformed bson file
- var ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'dbOne',
- '--collection', 'malformed_coll']
- .concat(getRestoreTarget('jstests/restore/testdata/dump_with_malformed/dbOne/malformed_coll.bson')));
- assert.neq(0, ret);
-
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/malformed_metadata.js b/src/mongo/gotools/test/qa-tests/jstests/restore/malformed_metadata.js
deleted file mode 100644
index f724a15c620..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/malformed_metadata.js
+++ /dev/null
@@ -1,22 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
- // Tests using mongorestore to restore data from a collection with
- // a malformed metadata file.
-
- jsTest.log('Testing restoration from a malformed metadata file');
-
- var toolTest = new ToolTest('malformed_metadata');
- toolTest.startDB('foo');
-
- // run restore, targeting a collection with a malformed
- // metadata.json file.
- var ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'dbOne',
- '--collection', 'malformed_metadata']
- .concat(getRestoreTarget('jstests/restore/testdata/dump_with_malformed/dbOne/malformed_metadata.bson')));
- assert.neq(0, ret);
-
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/missing_dump.js b/src/mongo/gotools/test/qa-tests/jstests/restore/missing_dump.js
deleted file mode 100644
index 0d8ff685105..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/missing_dump.js
+++ /dev/null
@@ -1,32 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
- // Tests running mongorestore with a missing dump files and directories.
-
- jsTest.log('Testing running mongorestore with missing dump files and directories');
-
- var toolTest = new ToolTest('missing_dump');
- toolTest.startDB('foo');
-
- // run restore with a missing dump directory
- var ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget('xxxxxxxx')));
- assert.neq(0, ret);
-
- // run restore with --db and a missing dump directory
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'test']
- .concat(getRestoreTarget('xxxxxxxx')));
- assert.neq(0, ret);
-
- // specify --collection with a missing file
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'test',
- '--collection', 'data']
- .concat(getRestoreTarget('jstests/restore/testdata/blankdb/xxxxxxxx.bson')));
- assert.neq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/multiple_dbs.js b/src/mongo/gotools/test/qa-tests/jstests/restore/multiple_dbs.js
deleted file mode 100644
index cfd051c35a0..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/multiple_dbs.js
+++ /dev/null
@@ -1,82 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests using mongorestore to restore data to multiple dbs.
-
- jsTest.log('Testing restoration to multiple dbs');
-
- var toolTest = getToolTest('multiple_dbs');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'multiple_dbs_dump';
- resetDbpath(dumpTarget);
-
- // the dbs we will be using
- var dbOne = toolTest.db.getSiblingDB('dbOne');
- var dbTwo = toolTest.db.getSiblingDB('dbTwo');
-
- // we'll use two collections in each db, with one of
- // the collection names common across the dbs
- var oneOnlyCollName = 'dbOneColl';
- var twoOnlyCollName = 'dbTwoColl';
- var sharedCollName = 'bothColl';
-
- // insert a bunch of data
- var data = {};
- data[oneOnlyCollName] = [];
- data[twoOnlyCollName] = [];
- data[sharedCollName+'one'] = [];
- data[sharedCollName+'two'] = [];
- for (var i = 0; i < 50; i++) {
- data[oneOnlyCollName].push({_id: i+'_'+oneOnlyCollName});
- data[twoOnlyCollName].push({_id: i+'_'+twoOnlyCollName});
- data[sharedCollName+'one'].push({_id: i+'_dbOne_'+sharedCollName});
- data[sharedCollName+'two'].push({_id: i+'_dbTwo_'+sharedCollName});
- }
- dbOne[oneOnlyCollName].insertMany(data[oneOnlyCollName]);
- dbTwo[twoOnlyCollName].insertMany(data[twoOnlyCollName]);
- dbOne[sharedCollName].insertMany(data[sharedCollName+'one']);
- dbTwo[sharedCollName].insertMany(data[sharedCollName+'two']);
-
- // sanity check the insertion worked
- assert.eq(50, dbOne[oneOnlyCollName].count());
- assert.eq(50, dbTwo[twoOnlyCollName].count());
- assert.eq(50, dbOne[sharedCollName].count());
- assert.eq(50, dbTwo[sharedCollName].count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the databases
- dbOne.dropDatabase();
- dbTwo.dropDatabase();
-
- // restore the data
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored properly
- assert.eq(50, dbOne[oneOnlyCollName].count());
- assert.eq(50, dbTwo[twoOnlyCollName].count());
- assert.eq(50, dbOne[sharedCollName].count());
- assert.eq(50, dbTwo[sharedCollName].count());
- for (i = 0; i < 50; i++) {
- assert.eq(1, dbOne[oneOnlyCollName].count({_id: i+'_'+oneOnlyCollName}));
- assert.eq(1, dbTwo[twoOnlyCollName].count({_id: i+'_'+twoOnlyCollName}));
- assert.eq(1, dbOne[sharedCollName].count({_id: i+'_dbOne_'+sharedCollName}));
- assert.eq(1, dbTwo[sharedCollName].count({_id: i+'_dbTwo_'+sharedCollName}));
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/namespaces.js b/src/mongo/gotools/test/qa-tests/jstests/restore/namespaces.js
deleted file mode 100644
index cdad2a667d2..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/namespaces.js
+++ /dev/null
@@ -1,152 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- jsTest.log('Testing namespaces escludes, includes, and mappings during restore');
-
- var toolTest = getToolTest('namespaces');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'namespaces_dump';
-
- // the db we will dump from
- var source1DB = toolTest.db.getSiblingDB('source1');
- var source2DB = toolTest.db.getSiblingDB('source2');
- var source3DB = toolTest.db.getSiblingDB('source3');
- // the db we will restore to
- var destDB = toolTest.db.getSiblingDB('dest');
-
- function performRestoreWithArgs(...args) {
- return toolTest.runTool.apply(toolTest, ['restore']
- .concat(args)
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- }
-
- function addTestDataTo(db, colls) {
- colls.forEach(function(coll) {
- var data = [];
- for (var i = 0; i < 500; i++) {
- data.push({_id: i+'_'+db.getName()+'.'+coll});
- }
- db[coll].insertMany(data);
- // sanity check the insertion worked
- assert.eq(500, db[coll].count());
- // Add an index
- var index = {};
- index[db.getName()+'.'+coll] = 1;
- db[coll].createIndex(index);
- });
- }
-
- function verifyDataIn(collection, sourceNS) {
- if (sourceNS === null) {
- assert.eq(0, collection.count());
- return;
- }
- assert.eq(500, collection.count());
- for (var i = 0; i < 500; i++) {
- assert.eq(1, collection.count({_id: i+'_'+sourceNS}));
- }
- assert.eq(1, collection.getIndexes()[1].key[sourceNS]);
- }
-
- addTestDataTo(source1DB, ['coll1', 'coll2', 'coll3']);
- verifyDataIn(source1DB.coll1, 'source1.coll1');
- verifyDataIn(source1DB.coll2, 'source1.coll2');
- verifyDataIn(source1DB.coll3, 'source1.coll3');
-
- addTestDataTo(source2DB, ['coll1', 'coll2', 'coll3']);
- verifyDataIn(source2DB.coll1, 'source2.coll1');
- verifyDataIn(source2DB.coll2, 'source2.coll2');
- verifyDataIn(source2DB.coll3, 'source2.coll3');
-
- addTestDataTo(source3DB, ['coll3', 'coll4']);
- verifyDataIn(source3DB.coll3, 'source3.coll3');
- verifyDataIn(source3DB.coll4, 'source3.coll4');
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // Get rid of the source databases
- source1DB.dropDatabase();
- source2DB.dropDatabase();
- source3DB.dropDatabase();
-
- // Exclude *.coll1
- ret = performRestoreWithArgs('--nsExclude', '*.coll1', '--nsFrom', 'source$db-num$.coll$coll-num$', '--nsTo', 'dest.coll_$db-num$_$coll-num$');
- assert.eq(0, ret);
-
- verifyDataIn(destDB.coll_1_1, null);
- verifyDataIn(destDB.coll_1_2, 'source1.coll2');
- verifyDataIn(destDB.coll_1_3, 'source1.coll3');
- verifyDataIn(destDB.coll_2_1, null);
- verifyDataIn(destDB.coll_2_2, 'source2.coll2');
- verifyDataIn(destDB.coll_2_3, 'source2.coll3');
- verifyDataIn(destDB.coll_3_1, null);
- verifyDataIn(destDB.coll_3_2, null);
- verifyDataIn(destDB.coll_3_3, 'source3.coll3');
- verifyDataIn(destDB.coll_3_4, 'source3.coll4');
-
- destDB.dropDatabase();
-
- // Inclode only *.coll1
- ret = performRestoreWithArgs('--nsInclude', '*.coll1', '--nsFrom', 'source$db-num$.coll$coll-num$', '--nsTo', 'dest.coll_$db-num$_$coll-num$');
- assert.eq(0, ret);
-
- verifyDataIn(destDB.coll_1_1, 'source1.coll1');
- verifyDataIn(destDB.coll_1_2, null);
- verifyDataIn(destDB.coll_1_3, null);
- verifyDataIn(destDB.coll_2_1, 'source2.coll1');
- verifyDataIn(destDB.coll_2_2, null);
- verifyDataIn(destDB.coll_2_3, null);
- verifyDataIn(destDB.coll_3_1, null);
- verifyDataIn(destDB.coll_3_2, null);
- verifyDataIn(destDB.coll_3_3, null);
- verifyDataIn(destDB.coll_3_4, null);
-
- destDB.dropDatabase();
-
- // Exclude collections beginning with 'coll' (which is all of them)
- ret = performRestoreWithArgs('--excludeCollectionsWithPrefix', 'coll', '--nsFrom', 'source$db-num$.coll$coll-num$', '--nsTo', 'dest.coll_$db-num$_$coll-num$');
- assert.eq(0, ret);
-
- verifyDataIn(destDB.coll_1_1, null);
- verifyDataIn(destDB.coll_1_2, null);
- verifyDataIn(destDB.coll_1_3, null);
- verifyDataIn(destDB.coll_2_1, null);
- verifyDataIn(destDB.coll_2_2, null);
- verifyDataIn(destDB.coll_2_3, null);
- verifyDataIn(destDB.coll_3_1, null);
- verifyDataIn(destDB.coll_3_2, null);
- verifyDataIn(destDB.coll_3_3, null);
- verifyDataIn(destDB.coll_3_4, null);
-
- destDB.dropDatabase();
-
- // Swap source1 and source2 databases
- ret = performRestoreWithArgs('--nsFrom', 'source1.*', '--nsTo', 'source2.*', '--nsFrom', 'source2.*', '--nsTo', 'source1.*');
- assert.eq(0, ret);
-
- verifyDataIn(source1DB.coll1, 'source2.coll1');
- verifyDataIn(source1DB.coll2, 'source2.coll2');
- verifyDataIn(source1DB.coll3, 'source2.coll3');
- verifyDataIn(source2DB.coll1, 'source1.coll1');
- verifyDataIn(source2DB.coll2, 'source1.coll2');
- verifyDataIn(source2DB.coll3, 'source1.coll3');
- verifyDataIn(source3DB.coll3, 'source3.coll3');
- verifyDataIn(source3DB.coll4, 'source3.coll4');
-
- source1DB.dropDatabase();
- source2DB.dropDatabase();
- source3DB.dropDatabase();
-
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/no_index_restore.js b/src/mongo/gotools/test/qa-tests/jstests/restore/no_index_restore.js
deleted file mode 100644
index 43fa829de21..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/no_index_restore.js
+++ /dev/null
@@ -1,77 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests that running mongorestore with --noIndexRestore does not
- // restore indexes.
-
- jsTest.log('Testing restoration with --noIndexRestore');
-
- var toolTest = getToolTest('no_index_restore');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'no_index_restore_dump';
- resetDbpath(dumpTarget);
-
- // the db we will use
- var testDB = toolTest.db.getSiblingDB('test');
-
- // we'll use two collections, one with no indexes, the other
- // with indexes
- var collNames = ['coll1', 'coll2'];
-
- // insert some data to be dumped
- collNames.forEach(function(collName) {
- var data = [];
- for (var i = 0; i < 10; i++) {
- data.push({_id: i, num: i+1, s: ''+i});
- }
- testDB[collName].insertMany(data);
- // sanity check the insertion worked
- assert.eq(10, testDB[collName].count());
- });
-
- // create some indexes for the second collection
- testDB.coll2.ensureIndex({num: 1});
- testDB.coll2.ensureIndex({num: 1, s: -1});
- // sanity check the indexes were created
- assert.eq(3, testDB.coll2.getIndexes().length);
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the collections
- collNames.forEach(function(collName) {
- testDB[collName].drop();
- // sanity check the drop worked
- assert.eq(0, testDB[collName].count());
- assert.eq(0, testDB[collName].getIndexes().length);
- });
-
- // restore the data, with --noIndexRestore
- ret = toolTest.runTool.apply(toolTest, ['restore', '--noIndexRestore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored fully, and only the _id
- // indexes were restored
- collNames.forEach(function(collName) {
- assert.eq(10, testDB[collName].count());
- for (var i = 0; i < 10; i++) {
- assert.eq(1, testDB[collName].count({_id: i}));
- }
-
- assert.eq(1, testDB[collName].getIndexes().length);
- });
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/no_options_restore.js b/src/mongo/gotools/test/qa-tests/jstests/restore/no_options_restore.js
deleted file mode 100644
index 9f57dc64bd9..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/no_options_restore.js
+++ /dev/null
@@ -1,127 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Using the collection options command is the way to get full
- // collection options as of 2.8, so we use this helper to
- // pull the options from a listCollections cursor.
- var extractCollectionOptions = function(db, name) {
- var res = db.runCommand("listCollections");
- for (var i = 0; i < res.cursor.firstBatch.length; i++) {
- if (res.cursor.firstBatch[i].name === name) {
- return res.cursor.firstBatch[i].options;
- }
- }
- return {};
- };
-
- // Tests that running mongorestore with --noOptionsRestore does
- // not restore collection options, and that running it without
- // --noOptionsRestore does restore collection options.
- jsTest.log('Testing restoration with --noOptionsRestore');
-
- var toolTest = getToolTest('no_options_restore');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'no_options_restore_dump';
- resetDbpath(dumpTarget);
-
- // the db we'll use
- var testDB = toolTest.db.getSiblingDB('test');
-
- // we'll use three different collections - the first will have
- // options set, the second won't, the third will be capped.
- // TODO: why aren't these being used?
- // var collWithOptions = testDB.withOptions;
- // var collWithoutOptions = testDB.withoutOptions;
- // var collCapped = testDB.capped;
-
- // create the noPadding collection
- var noPaddingOptions = {noPadding: true};
- testDB.createCollection('withOptions', noPaddingOptions);
-
- // create the capped collection
- var cappedOptions = {capped: true, size: 4096, autoIndexId: true};
- testDB.createCollection('capped', cappedOptions);
-
- // insert some data into all three collections
- ['withOptions', 'withoutOptions', 'capped'].forEach(function(collName) {
- var data = [];
- for (var i = 0; i < 50; i++) {
- data.push({_id: i});
- }
- testDB[collName].insertMany(data);
- // sanity check the insertions worked
- assert.eq(50, testDB[collName].count());
- });
-
- // add options to the appropriate collection
- cmdRet = testDB.runCommand({'collMod': 'withOptions', usePowerOf2Sizes: true});
- assert.eq(1, cmdRet.ok);
-
- // store the default options, because they change based on storage engine
- var baseCappedOptionsFromDB = extractCollectionOptions(testDB, 'capped');
- var baseWithOptionsFromDB = extractCollectionOptions(testDB, 'withOptions');
- var baseWithoutOptionsFromDB = extractCollectionOptions(testDB, 'withoutOptions');
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the data
- testDB.dropDatabase();
-
- // restore the data
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored correctly
- ['withOptions', 'withoutOptions', 'capped'].forEach(function(collName) {
- assert.eq(50, testDB[collName].count());
- });
-
- // make sure the options were restored correctly
- var cappedOptionsFromDB = extractCollectionOptions(testDB, 'capped');
- assert.eq(baseCappedOptionsFromDB, cappedOptionsFromDB);
- var withOptionsFromDB = extractCollectionOptions(testDB, 'withOptions');
- assert.eq(baseWithOptionsFromDB, withOptionsFromDB);
- var withoutOptionsFromDB = extractCollectionOptions(testDB, 'withoutOptions');
- assert.eq(baseWithoutOptionsFromDB, withoutOptionsFromDB);
-
- // drop the data
- testDB.dropDatabase();
-
- // restore the data, without the options
- ret = toolTest.runTool.apply(toolTest, ['restore', '--noOptionsRestore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored correctly
- ['withOptions', 'withoutOptions', 'capped'].forEach(function(collName) {
- assert.eq(50, testDB[collName].count());
- });
-
- // make sure the options were not restored
- cappedOptionsFromDB = extractCollectionOptions(testDB, 'capped');
- assert.eq(baseWithoutOptionsFromDB, cappedOptionsFromDB);
- withOptionsFromDB = extractCollectionOptions(testDB, 'withOptions');
- assert.eq(baseWithoutOptionsFromDB, withOptionsFromDB);
- withoutOptionsFromDB = extractCollectionOptions(testDB, 'withoutOptions');
- assert.eq(baseWithoutOptionsFromDB, withoutOptionsFromDB);
-
- // additional check that the capped collection is no longer capped
- var cappedStats = testDB.capped.stats();
- assert(!cappedStats.capped);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/nonempty_temp_users.js b/src/mongo/gotools/test/qa-tests/jstests/restore/nonempty_temp_users.js
deleted file mode 100644
index 1ab3617ff14..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/nonempty_temp_users.js
+++ /dev/null
@@ -1,46 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
- // Tests running mongorestore and restoring users with a nonempty temp
- // users collection.
-
- jsTest.log('Testing restoring users with a nonempty temp users collection.'+
- ' The restore should fail');
-
- var toolTest = new ToolTest('nonempty_temp_users');
- toolTest.startDB('foo');
-
- // where we'll put the dump
- var dumpTarget = 'nonempty_temp_users_dump';
- resetDbpath(dumpTarget);
-
- // the admin db
- var adminDB = toolTest.db.getSiblingDB('admin');
-
- // create a user on the admin database
- adminDB.createUser({
- user: 'adminUser',
- pwd: 'password',
- roles: [{role: 'read', db: 'admin'}],
- });
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget)));
- assert.neq(1, ret);
-
- // clear out the user
- adminDB.dropAllUsers();
-
- // insert into the tempusers collection
- adminDB.tempusers.insert({_id: 'corruption'});
-
- // restore the data. It should succeed
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget)));
- assert.neq(1, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/norestore_profile.js b/src/mongo/gotools/test/qa-tests/jstests/restore/norestore_profile.js
deleted file mode 100644
index 84ea4a5b5dc..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/norestore_profile.js
+++ /dev/null
@@ -1,58 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = getToolTest('norestore_profile');
- var commonToolArgs = getCommonToolArguments();
-
- var dbOne = toolTest.db.getSiblingDB('dbOne');
- // turn on the profiler
- dbOne.setProfilingLevel(2);
-
- // create some test data
- var data = [];
- for (var i=0; i<=100; i++) {
- data.push({_id: i, x: i*i});
- }
- dbOne.test.insertMany(data);
- // run some queries to end up in the profile collection
- dbOne.test.find({_id: 3});
- dbOne.test.find({_id: 30});
- dbOne.test.find({_id: 50});
-
- assert.gt(dbOne.system.profile.count(), 0, "profiler still empty after running test setup");
-
- // dump it
- var dumpTarget = 'norestore_profile';
- resetDbpath(dumpTarget);
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // turn off profiling and remove the profiler collection
- dbOne.setProfilingLevel(0);
- dbOne.system.profile.drop();
- assert.eq(dbOne.system.profile.count(), 0);
-
- // drop the database so it's empty
- dbOne.dropDatabase();
-
- // restore it, this should restore everything *except* the profile collection
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret, "restore to empty DB should have returned successfully");
-
- // check that the data actually got restored
- assert.gt(dbOne.test.count(), 100);
-
- // but the profile collection should still be empty
- assert.eq(dbOne.system.profile.count(), 0);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/objcheck_valid_bson.js b/src/mongo/gotools/test/qa-tests/jstests/restore/objcheck_valid_bson.js
deleted file mode 100644
index baa2d67545d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/objcheck_valid_bson.js
+++ /dev/null
@@ -1,46 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
- // Tests that running mongorestore with --objcheck on valid bson
- // files restores the data successfully.
-
- jsTest.log('Testing restoration with --objcheck');
-
- var toolTest = new ToolTest('objcheck_valid_bson');
- toolTest.startDB('foo');
-
- // where we'll put the dump
- var dumpTarget = 'objcheck_valid_bson_dump';
- resetDbpath(dumpTarget);
-
- // the db and collection we will use
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.coll;
-
- // insert some data
- var data = [];
- for (var i = 0; i < 50; i++) {
- data.push({_id: i});
- }
- testColl.insertMany(data);
- // sanity check the insert worked
- assert.eq(50, testColl.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump'].concat(getDumpTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // drop the data
- testDB.dropDatabase();
-
- // restore the data, with --objcheck
- ret = toolTest.runTool.apply(toolTest, ['restore'].concat(getRestoreTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // make sure the restore completed succesfully
- assert.eq(50, testColl.count());
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_and_limit.js b/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_and_limit.js
deleted file mode 100644
index 378e018f155..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_and_limit.js
+++ /dev/null
@@ -1,78 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- if (dump_targets !== "standard") {
- print('skipping test incompatable with archiving or compression');
- return assert(true);
- }
-
- // Tests using mongorestore with the --oplogReplay and --oplogLimit flags.
-
- jsTest.log('Testing restoration with the --oplogReplay and --oplogLimit options');
-
- var toolTest = getToolTest('oplog_replay_and_limit');
- var commonToolArgs = getCommonToolArguments();
-
- // this test uses the testdata/dump_with_oplog directory. this directory contains:
- // - a test/ subdirectory, which will restore objects { _id: i } for i from
- // 0-9 to the test.data collection
- // - an oplog.bson file, which contains oplog entries for inserts of
- // objects { _id: i } for i from 10-14 to the test.data collection.
- //
- // within the oplog.bson file, the entries for i from 10-13 have timestamps
- // 1416342265:2 through 1416342265:5. the entry for { _id: i } has
- // timestamp 1500000000:1.
-
- // the db and collection we'll be using
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.data;
-
- // restore the data, without --oplogReplay. _ids 0-9, which appear in the
- // collection's bson file, should be restored.
- var ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget('jstests/restore/testdata/dump_with_oplog'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
- assert.eq(10, testColl.count());
- for (var i = 0; i < 10; i++) {
- assert.eq(1, testColl.count({_id: i}));
- }
-
- // drop the db
- testDB.dropDatabase();
-
- // restore the data, with --oplogReplay. _ids 10-14, appearing
- // in the oplog.bson file, should be inserted as well.
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--oplogReplay']
- .concat(getRestoreTarget('jstests/restore/testdata/dump_with_oplog'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
- assert.eq(15, testColl.count());
- for (i = 0; i < 15; i++) {
- assert.eq(1, testColl.count({_id: i}));
- }
-
- // drop the db
- testDB.dropDatabase();
-
- // restore the data, with --oplogReplay and --oplogLimit with a
- // value that will filter out { _id: 14 } from getting inserted.
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--oplogReplay',
- '--oplogLimit', '1416342266:0']
- .concat(getRestoreTarget('jstests/restore/testdata/dump_with_oplog'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
- assert.eq(14, testColl.count());
- for (i = 0; i < 14; i++) {
- assert.eq(1, testColl.count({_id: i}));
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_conflict.js b/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_conflict.js
deleted file mode 100644
index 4d021a656e2..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_conflict.js
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * oplog_replay_conflict.js
- *
- * This file tests mongorestore with --oplogReplay where the user provides two top priority
- * oplogs and mongorestore should exit with an error.
- */
-(function() {
- 'use strict';
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var commonToolArgs = getCommonToolArguments();
- var restoreTarget = 'jstests/restore/testdata/dump_oplog_conflict';
-
- var toolTest = getToolTest('oplog_replay_conflict');
-
- // The test db and collections we'll be using.
- var testDB = toolTest.db.getSiblingDB('test');
- testDB.createCollection('data');
- var testColl = testDB.data;
-
- // Replay the oplog from the provided oplog
- var ret = toolTest.runTool.apply(toolTest, ['restore',
- '--oplogReplay',
- '--oplogFile', 'jstests/restore/testdata/extra_oplog.bson',
- restoreTarget].concat(commonToolArgs));
-
- assert.eq(0, testColl.count(),
- "no original entries should be restored");
- assert.eq(1, ret, "restore operation succeeded when it shouldn't have");
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_local_main.js b/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_local_main.js
deleted file mode 100644
index ad149c2a910..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_local_main.js
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * oplog_replay_local_main.js
- *
- * This file tests mongorestore with --oplogReplay where the oplog file is in the 'oplog.$main'
- * collection of the 'local' database. This occurs when using master-slave replication.
- */
-(function() {
- 'use strict';
-
- var dumpTarget = 'oplog_replay_local_main';
- var rt = new ReplTest('oplog_replay_local_main');
- var m = rt.start(true);
- // Set the test db to 'local' and collection to 'oplog.$main' to fake a replica set oplog
- var testDB = m.getDB('local');
- var testColl = testDB.oplog.$main;
- var testRestoreDB = m.getDB('test');
- var testRestoreColl = testRestoreDB.op;
- resetDbpath(dumpTarget);
-
- var lastop = function() {
- return testColl.find().sort({$natural: -1}).next();
- };
-
- var lastTS = lastop().ts.t;
- var oplogSize = 100;
-
- // Create a fake oplog consisting of 100 inserts.
- for (var i = 0; i < oplogSize; i++) {
- var op = {
- ts: new Timestamp(lastTS, i),
- op: 'i',
- o: {_id: i, x: 'a' + i},
- ns: 'test.op'
- };
- assert.commandWorked(testDB.runCommand({godinsert: 'oplog.$main', obj: op}));
- }
-
- // Dump the fake oplog.
- var ret = runMongoProgram('mongodump',
- '--port', rt.ports[0],
- '--db', 'local',
- '-c', 'oplog.$main',
- '--out', dumpTarget);
- assert.eq(0, ret, "dump operation failed");
-
- // Create the test.op collection.
- testRestoreColl.drop();
- testRestoreDB.createCollection("op");
- assert.eq(0, testRestoreColl.count());
-
- // Replay the oplog from the provided oplog
- ret = runMongoProgram('mongorestore',
- '--port', rt.ports[0],
- '--oplogReplay',
- dumpTarget);
- assert.eq(0, ret, "restore operation failed");
-
- assert.eq(oplogSize, testRestoreColl.count(), "all oplog entries should be inserted");
- rt.stop(true);
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_local_rs.js b/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_local_rs.js
deleted file mode 100644
index 03a16c4a745..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_local_rs.js
+++ /dev/null
@@ -1,67 +0,0 @@
-/**
- * oplog_replay_local_rs.js
- *
- * This file tests mongorestore with --oplogReplay where the oplog file is in the 'oplog.rs'
- * collection of the 'local' database. This occurs when using a replica-set for replication.
- */
-(function() {
- 'use strict';
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var commonToolArgs = getCommonToolArguments();
- var dumpTarget = 'oplog_replay_local_rs';
-
- var toolTest = getToolTest('oplog_replay_local_rs');
-
- // Set the test db to 'local' and collection to 'oplog.rs' to fake a replica set oplog
- var testDB = toolTest.db.getSiblingDB('local');
- var testColl = testDB['oplog.rs'];
- var testRestoreDB = toolTest.db.getSiblingDB('test');
- var testRestoreColl = testRestoreDB.op;
- resetDbpath(dumpTarget);
-
- var oplogSize = 100;
- testDB.createCollection('oplog.rs', {capped: true, size: 100000});
-
- // Create a fake oplog consisting of 100 inserts.
- for (var i = 0; i < oplogSize; i++) {
- var r = testColl.insert({
- ts: new Timestamp(0, i),
- op: "i",
- o: {_id: i, x: 'a' + i},
- ns: "test.op",
- });
- assert.eq(1, r.nInserted, "insert failed");
- }
-
- // Dump the fake oplog.
- var ret = toolTest.runTool.apply(toolTest, ['dump',
- '--db', 'local',
- '-c', 'oplog.rs',
- '--out', dumpTarget]
- .concat(commonToolArgs));
- assert.eq(0, ret, "dump operation failed");
-
- // Dump original data.
- testColl.drop();
- assert.eq(0, testColl.count(), "all original entries should be dropped");
-
-
- // Create the test.op collection.
- testRestoreColl.drop();
- testRestoreDB.createCollection("op");
- assert.eq(0, testRestoreColl.count());
-
- // Replay the oplog from the provided oplog
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--oplogReplay',
- dumpTarget]
- .concat(commonToolArgs));
- assert.eq(0, ret, "restore operation failed");
-
- assert.eq(oplogSize, testRestoreColl.count(),
- "all oplog entries should be inserted");
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_no_oplog.js b/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_no_oplog.js
deleted file mode 100644
index eae7db1519e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_no_oplog.js
+++ /dev/null
@@ -1,19 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
- // Tests using mongorestore with --oplogReplay when no oplog.bson file is present.
-
- jsTest.log('Testing restoration with --oplogReplay and no oplog.bson file');
-
- var toolTest = new ToolTest('oplog_replay_no_oplog');
- toolTest.startDB('foo');
-
- // run the restore, with a dump directory that has no oplog.bson file
- var ret = toolTest.runTool.apply(toolTest, ['restore', '--oplogReplay']
- .concat(getRestoreTarget('restore/testdata/dump_empty')));
- assert.neq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_noop.js b/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_noop.js
deleted file mode 100644
index 6a1f20d5cf6..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_noop.js
+++ /dev/null
@@ -1,37 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- if (dump_targets !== "standard") {
- print('skipping test incompatable with archiving or compression');
- return assert(true);
- }
-
- // Tests using mongorestore with --oplogReplay and noops in the oplog.bson,
- // making sure the noops are ignored.
-
- jsTest.log('Testing restoration with --oplogReplay and noops');
-
- var toolTest = getToolTest('oplog_replay_noop');
- var commonToolArgs = getCommonToolArguments();
-
- // the db and collection we'll be using
- var testDB = toolTest.db.getSiblingDB('test');
- var testColl = testDB.data;
-
- // restore the data, with --oplogReplay
- var ret = toolTest.runTool.apply(toolTest, ['restore', '--oplogReplay']
- .concat(getRestoreTarget('jstests/restore/testdata/dump_with_noop_in_oplog'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the document appearing in the oplog, which shows up
- // after the noops, was added successfully
- assert.eq(1, testColl.count());
- assert.eq(1, testColl.count({a: 1}));
-
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_priority_oplog.js b/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_priority_oplog.js
deleted file mode 100644
index 6d2d873285e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_priority_oplog.js
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * oplog_replay_priority_oplog.js
- *
- * This file tests mongorestore with --oplogReplay where the user provides two oplogs and
- * mongorestore only restores the higher priority one.
- */
-(function() {
- 'use strict';
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var commonToolArgs = getCommonToolArguments();
- var restoreTarget = 'jstests/restore/testdata/dump_local_oplog';
-
- var toolTest = getToolTest('oplog_replay_priority_oplog');
-
- // The test db and collections we'll be using.
- var testDB = toolTest.db.getSiblingDB('test');
- testDB.createCollection('data');
- var testColl = testDB.data;
- testDB.createCollection('op');
- var restoreColl = testDB.op;
-
- // Replay the oplog from the provided oplog
- var ret = toolTest.runTool.apply(toolTest, ['restore',
- '--oplogReplay',
- '--oplogFile', 'jstests/restore/testdata/extra_oplog.bson',
- restoreTarget]
- .concat(commonToolArgs));
- assert.eq(0, ret, "restore operation failed");
-
- // Extra oplog has 5 entries as explained in oplog_replay_and_limit.js
- assert.eq(5, testColl.count(),
- "all original entries from high priority oplog should be restored");
- assert.eq(0, restoreColl.count(),
- "no original entries from low priority oplog should be restored");
- toolTest.stop();
-}());
-
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_size_safety.js b/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_size_safety.js
deleted file mode 100644
index 05543d646ae..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_size_safety.js
+++ /dev/null
@@ -1,71 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var commonToolArgs = getCommonToolArguments();
- var dumpTarget = 'oplog_replay_sizes';
-
- // Helper for using mongorestore with --oplogReplay and a large oplog.bson
- function tryOplogReplay(oplogSize, documentSize) {
- var toolTest = getToolTest('oplog_replay_sizes');
- // the test db and collections we'll be using
- var testDB = toolTest.db.getSiblingDB('test_oplog');
- var testColl = testDB.oplog;
- var testRestoreDB = toolTest.db.getSiblingDB('test');
- var testRestoreColl = testRestoreDB.op;
- resetDbpath(dumpTarget);
-
- var debugString = 'with ' + oplogSize + ' ops of size ' + documentSize;
- jsTest.log('Testing --oplogReplay ' + debugString);
-
-
- // create a fake oplog consisting of a large number of inserts
- var xStr = new Array(documentSize).join("x"); // ~documentSize bytes string
- var data = [];
- for (var i = 0; i < oplogSize; i++) {
- data.push({
- ts: new Timestamp(0, i),
- op: "i",
- o: {_id: i, x: xStr},
- ns: "test.op"
- });
- if (data.length === 1000) {
- testColl.insertMany(data);
- data = [];
- }
- }
- testColl.insertMany(data);
-
- // dump the fake oplog
- var ret = toolTest.runTool.apply(toolTest, ['dump',
- '--db', 'test_oplog',
- '-c', 'oplog',
- '--out', dumpTarget]
- .concat(commonToolArgs));
- assert.eq(0, ret, "dump operation failed " + debugString);
-
- // create the test.op collection
- testRestoreColl.drop();
- testRestoreDB.createCollection("op");
- assert.eq(0, testRestoreColl.count());
-
- // trick restore into replaying the "oplog" we forged above
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--oplogReplay', dumpTarget+'/test_oplog']
- .concat(commonToolArgs));
- assert.eq(0, ret, "restore operation failed " + debugString);
- assert.eq(oplogSize, testRestoreColl.count(),
- "all oplog entries should be inserted " + debugString);
- toolTest.stop();
- }
-
- // run the test on various oplog and op sizes
- tryOplogReplay(1024, 1024); // sanity check
- tryOplogReplay(1024*1024, 1); // millions of micro ops
- tryOplogReplay(8, 16*1024*1023); // 8 ~16MB ops
- tryOplogReplay(32, 1024*1024); // 32 ~1MB ops
- tryOplogReplay(32*1024, 1024); // many ~1KB ops
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_specify_file.js b/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_specify_file.js
deleted file mode 100644
index cba60c35dc7..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/oplog_replay_specify_file.js
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * oplog_replay_specify_file.js
- *
- * This file tests mongorestore with --oplogReplay where the user specifies a file with the
- * --oplogFile flag.
- */
-(function() {
- 'use strict';
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var commonToolArgs = getCommonToolArguments();
- var dumpTarget = 'oplog_replay_specify_file';
-
- var toolTest = getToolTest('oplog_replay_specify_file');
-
- // The test db and collections we'll be using.
- var testDB = toolTest.db.getSiblingDB('test_oplog');
- var testColl = testDB.foo;
- var testRestoreDB = toolTest.db.getSiblingDB('test');
- var testRestoreColl = testRestoreDB.op;
- resetDbpath(dumpTarget);
-
- var oplogSize = 100;
-
- // Create a fake oplog consisting of 100 inserts.
- var data = [];
- for (var i = 0; i < oplogSize; i++) {
- data.push({
- ts: new Timestamp(0, i),
- op: "i",
- o: {_id: i, x: 'a' + i},
- ns: "test.op"
- });
- }
- testColl.insertMany(data, {ordered: true});
-
- // Dump the fake oplog.
- var ret = toolTest.runTool.apply(toolTest, ['dump',
- '--db', 'test_oplog',
- '-c', 'foo',
- '--out', dumpTarget]
- .concat(commonToolArgs));
- assert.eq(0, ret, "dump operation failed");
-
- // Dump original data.
- testColl.drop();
- assert.eq(0, testColl.count(),
- "all original entries should be dropped");
-
- // Create the test.op collection.
- testRestoreColl.drop();
- testRestoreDB.createCollection("op");
- assert.eq(0, testRestoreColl.count());
-
- // Replay the oplog from the provided oplog
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--oplogReplay',
- '--oplogFile', dumpTarget + '/test_oplog/foo.bson',
- dumpTarget]
- .concat(commonToolArgs));
- assert.eq(0, ret, "restore operation failed");
-
- assert.eq(oplogSize, testRestoreColl.count(),
- "all oplog entries should be inserted");
- assert.eq(oplogSize, testColl.count(),
- "all original entries should be restored");
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/partial_restore.js b/src/mongo/gotools/test/qa-tests/jstests/restore/partial_restore.js
deleted file mode 100644
index b274ec56f8d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/partial_restore.js
+++ /dev/null
@@ -1,83 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests using mongorestore to restore only a subset of a dump (either a
- // single db or a single collection) from a larger dump.
-
- jsTest.log('Testing restoration of a subset of a dump');
-
- var toolTest = getToolTest('partial_restore');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'partial_restore_dump';
- resetDbpath(dumpTarget);
-
- // we'll insert data into three collections spread across two dbs
- var dbOne = toolTest.db.getSiblingDB('dbOne');
- var dbTwo = toolTest.db.getSiblingDB('dbTwo');
- var collOne = dbOne.collOne;
- var collTwo = dbOne.collTwo;
- var collThree = dbTwo.collThree;
-
- // insert a bunch of data
- var dataOne = [];
- var dataTwo = [];
- var dataThree = [];
- for (var i = 0; i < 50; i++) {
- collOne.insert({_id: i+'_collOne'});
- collTwo.insert({_id: i+'_collTwo'});
- collThree.insert({_id: i+'_collThree'});
- }
- collOne.insertMany(dataOne);
- collTwo.insertMany(dataTwo);
- collThree.insertMany(dataThree);
- // sanity check the insertion worked
- assert.eq(50, collOne.count());
- assert.eq(50, collTwo.count());
- assert.eq(50, collThree.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the databases
- dbOne.dropDatabase();
- dbTwo.dropDatabase();
-
- // restore a single db
- ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'dbOne']
- .concat(getRestoreTarget(dumpTarget+'/dbOne'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the restore worked, and nothing else but that db was restored
- assert.eq(50, collOne.count());
- assert.eq(50, collTwo.count());
- assert.eq(0, collThree.count());
-
- // drop the data
- dbOne.dropDatabase();
-
- // restore a single collection
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'dbOne',
- '--collection', 'collTwo']
- .concat(getRestoreTarget(dumpTarget+'/dbOne/collTwo.bson'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the restore worked, and nothing else but that collection was restored
- assert.eq(0, collOne.count());
- assert.eq(50, collTwo.count());
- assert.eq(0, collThree.count());
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/preserve_oplog_structure_order.js b/src/mongo/gotools/test/qa-tests/jstests/restore/preserve_oplog_structure_order.js
deleted file mode 100644
index 785a5ad31d6..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/preserve_oplog_structure_order.js
+++ /dev/null
@@ -1,25 +0,0 @@
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
-
- jsTest.log('Testing that the order of fields is preserved in the oplog');
-
- var toolTest = new ToolTest('ordered_oplog');
- toolTest.startDB('foo');
-
- // run restore, with an "update" oplog with a _id field that is a subdocument with several fields
- // { "h":{"$numberLong":"7987029173745013482"},"ns":"test.foobar",
- // "o":{"_id":{"a":1,"b":2,"c":3,"d":5,"e":6,"f":7,"g":8},"foo":"bar"},
- // "o2":{"_id":{"a":1,"b":2,"c":3,"d":5,"e":6,"f":7,"g":8}},"op":"u","ts":{"$timestamp":{"t":1439225650,"i":1}},"v":NumberInt(2)
- // }
- // if the _id from the "o" and the _id from the "o2" don't match then mongod complains
- // run it several times, because with just one execution there is a chance that restore randomly selects the correct order
- // With several executions the chances of all false positives diminishes.
- for (var i=0; i<10; i++) {
- var ret = toolTest.runTool('restore', '--oplogReplay', 'jstests/restore/testdata/dump_with_complex_id_oplog');
- assert.eq(0, ret);
- }
-
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/restore_document_validation.js b/src/mongo/gotools/test/qa-tests/jstests/restore/restore_document_validation.js
deleted file mode 100644
index 511b78dd9d7..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/restore_document_validation.js
+++ /dev/null
@@ -1,180 +0,0 @@
-/**
- * restore_document_validation.js
- *
- * This file test that mongorestore works with document validation. It both checks that when
- * validation is turned on invalid documents are not restored and that when a user indicates
- * they want to bypass validation, that all documents are restored.
- */
-
-(function() {
- 'use strict';
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- /**
- * Part 1: Test that restore follows document validation rules.
- */
- jsTest.log('Testing that restore reacts well to document validation');
-
- var toolTest = getToolTest('document_validation');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'doc_validation';
- resetDbpath(dumpTarget);
-
- // the db we will use
- var testDB = toolTest.db.getSiblingDB('test');
-
- // create 1000 documents, half of which will pass the validation
- var data = [];
- for (var i = 0; i < 1000; i++) {
- if (i%2 === 0) {
- data.push({_id: i, num: i+1, s: String(i)});
- } else {
- data.push({_id: i, num: i+1, s: String(i), baz: i});
- }
- }
- testDB.bar.insertMany(data, {ordered: true});
- // sanity check the insertion worked
- assert.eq(1000, testDB.bar.count(), 'all documents should be inserted');
-
- var ret = toolTest.runTool.apply(toolTest, ['dump', '-v']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret, 'dumping should run successfully');
-
- testDB.dropDatabase();
- assert.eq(0, testDB.bar.count(), 'after the drop, no documents should be seen');
-
- // sanity check that we can restore the data without validation
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- assert.eq(1000, testDB.bar.count(), 'after the restore, all documents should be seen');
-
- testDB.dropDatabase();
- assert.eq(0, testDB.bar.count(), 'after the drop, no documents should be seen');
-
- // turn on validation
- var r = testDB.createCollection('bar', {validator: {baz: {$exists: true}}});
- assert.eq(r, {ok: 1}, 'create collection with validation should work');
-
- // test that it's working
- r = testDB.bar.insert({num: 10000});
- assert.eq(r.nInserted, 0, "invalid documents shouldn't be inserted");
-
- // restore the 1000 records of which only 500 are valid
- ret = toolTest.runTool.apply(toolTest, ['restore', '-v']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret, 'restoring against a collection with validation on should still succeed');
-
- assert.eq(500, testDB.bar.count(), 'only the valid documents should be restored');
-
- /**
- * Part 2: Test that restore can bypass document validation rules.
- */
- jsTest.log('Testing that bypass document validation works');
-
- testDB.dropDatabase();
-
- // turn on validation
- r = testDB.createCollection('bar', {validator: {baz: {$exists: true}}});
- assert.eq(r, {ok: 1}, 'create collection with validation should work');
-
- // test that we cannot insert an 'invalid' document
- r = testDB.bar.insert({num: 10000});
- assert.eq(r.nInserted, 0, 'invalid documents should not be inserted');
-
- // restore the 1000 records again with bypassDocumentValidation turned on
- ret = toolTest.runTool.apply(toolTest, ['restore', '--bypassDocumentValidation']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret, 'restoring documents should work with bypass document validation set');
- assert.eq(1000, testDB.bar.count(),
- 'all documents should be restored with bypass document validation set');
-
- /**
- * Part 3: Test that restore can restore the document validation rules,
- * if they're dumped with the collection.
- */
- jsTest.log('Testing that dump and restore restores the validation rules themselves');
-
- // clear out the database, including the validation rules
- testDB.dropDatabase();
- assert.eq(0, testDB.bar.count(), 'after the drop, no documents should be seen');
-
- // test that we can insert an 'invalid' document
- r = testDB.bar.insert({num: 10000});
- assert.eq(r.nInserted, 1,
- 'invalid documents should be inserted after validation rules are dropped');
-
- testDB.dropDatabase();
- assert.eq(0, testDB.bar.count(), 'after the drop, no documents should be seen');
-
- // restore the 1000 records again
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
- assert.eq(1000, testDB.bar.count());
-
- // turn on validation on a existing collection
- testDB.runCommand({'collMod': 'bar', 'validator': {baz: {$exists: true}}});
-
- // re-dump everything, this time dumping the validation rules themselves
- ret = toolTest.runTool.apply(toolTest, ['dump', '-v']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret, 'the dump should run successfully');
-
- // clear out the database, including the validation rules
- testDB.dropDatabase();
- assert.eq(0, testDB.bar.count(), 'after the drop, no documents should be seen');
-
- // test that we can insert an 'invalid' document
- r = testDB.bar.insert({num: 10000});
- assert.eq(r.nInserted, 1,
- 'invalid documents should be inserted after we drop validation rules');
-
- testDB.dropDatabase();
- assert.eq(0, testDB.bar.count(), 'after the drop, no documents should be seen');
-
- // restore the 1000 records again
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret, 'restoring rules and some invalid documents should run successfully');
- assert.eq(500, testDB.bar.count(),
- 'restoring the validation rules and documents should only restore valid documents');
-
- /**
- * Part 4: Test that restore can bypass the document validation rules,
- * even if they're dumped with the collection and restored with the collection.
- */
- jsTest.log('Testing that bypass document validation works when restoring the rules as well');
-
- // clear out the database, including the validation rules
- testDB.dropDatabase();
- assert.eq(0, testDB.bar.count(), 'after the drop, no documents should be seen');
-
- // test that we can insert an 'invalid' document
- r = testDB.bar.insert({num: 10000});
- assert.eq(r.nInserted, 1,
- 'invalid documents should be inserted after validation rules are dropped');
-
- testDB.dropDatabase();
- assert.eq(0, testDB.bar.count(), 'after the drop, no documents should be seen');
-
- // restore the 1000 records again with bypassDocumentValidation turned on
- ret = toolTest.runTool.apply(toolTest, ['restore', '--bypassDocumentValidation']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret, 'restoring documents should work with bypass document validation set');
- assert.eq(1000, testDB.bar.count(),
- 'all documents should be restored with bypass document validation set');
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/sharded_fullrestore.js b/src/mongo/gotools/test/qa-tests/jstests/restore/sharded_fullrestore.js
deleted file mode 100644
index 7096943b0c3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/sharded_fullrestore.js
+++ /dev/null
@@ -1,45 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/sharding_28.config.js');
- }
-
- if (dump_targets === "archive") {
- print('skipping test incompatable with archiving');
- return assert(true);
- }
-
- var targetPath = 'restore_full_restore';
- var toolTest = getToolTest('fullrestore');
- var commonToolArgs = getCommonToolArguments();
-
- var sourceDB = toolTest.db.getSiblingDB('blahblah');
-
- // put in some sample data
- var data = [];
- for (var i=0; i<100; i++) {
- data.push({x: 1});
- }
- sourceDB.test.insertMany(data);
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(targetPath))
- .concat(commonToolArgs));
- assert.eq(ret, 0, "dump of full sharded system should have succeeded");
-
- // a full restore should fail
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(targetPath))
- .concat(commonToolArgs));
- assert.neq(ret, 0, "restore of full sharded system should have failed");
-
- // delete the config dir
- resetDbpath(targetPath + "/config");
-
- // *now* the restore should succeed
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(targetPath))
- .concat(commonToolArgs));
- assert.eq(ret, 0, "restore of sharded system without config db should have succeeded");
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/stop_on_error.js b/src/mongo/gotools/test/qa-tests/jstests/restore/stop_on_error.js
deleted file mode 100644
index 15f68466e51..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/stop_on_error.js
+++ /dev/null
@@ -1,50 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- var toolTest = getToolTest('stop_on_error');
- var commonToolArgs = getCommonToolArguments();
-
- var dbOne = toolTest.db.getSiblingDB('dbOne');
- // create a test collection
- var data = [];
- for (var i=0; i<=100; i++) {
- data.push({_id: i, x: i*i});
- }
- dbOne.test.insertMany(data);
-
- // dump it
- var dumpTarget = 'stop_on_error_dump';
- resetDbpath(dumpTarget);
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database so it's empty
- dbOne.dropDatabase();
-
- // restore it - database was just dropped, so this should work successfully
- ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret, "restore to empty DB should have returned successfully");
-
- // restore it again with --stopOnError - this one should fail since there are dup keys
- ret = toolTest.runTool.apply(toolTest, ['restore', '--stopOnError', '-vvvv']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.neq(0, ret);
-
- // restore it one more time without --stopOnError - there are dup keys but they will be ignored
- ret = toolTest.runTool.apply(toolTest, ['restore', '-vvvv']
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/symlinks.js b/src/mongo/gotools/test/qa-tests/jstests/restore/symlinks.js
deleted file mode 100644
index a27ef8b94c3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/symlinks.js
+++ /dev/null
@@ -1,46 +0,0 @@
-(function() {
-
- // Tests using mongorestore on a dump directory containing symlinks
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- if (dump_targets !== "standard") {
- print('skipping test incompatable with archiving or compression');
- return assert(true);
- }
-
- jsTest.log('Testing restoration from a dump containing symlinks');
-
- var toolTest = getToolTest('symlinks');
-
- // this test uses the testdata/dump_with_soft_link. within that directory,
- // the dbTwo directory is a soft link to testdata/soft_linked_db and the
- // dbOne/data.bson file is a soft link to testdata/soft_linked_collection.bson.
- // the file not_a_dir is a softlink to a bson file, and is there to make
- // sure that softlinked regular files are not treated as directories.
-
- // the two dbs we'll be using
- var dbOne = toolTest.db.getSiblingDB('dbOne');
- var dbTwo = toolTest.db.getSiblingDB('dbTwo');
- var notADir = toolTest.db.getSiblingDB('not_a_dir');
-
- // restore the data
- var ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(getRestoreTarget('jstests/restore/testdata/dump_with_soft_links')));
- assert.eq(0, ret);
-
- // make sure the data was restored properly
- assert.eq(10, dbOne.data.count());
- assert.eq(10, dbTwo.data.count());
- assert.eq(0, notADir.data.count());
- for (var i = 0; i < 10; i++) {
- assert.eq(1, dbOne.data.count({_id: i+'_dbOne'}));
- assert.eq(1, dbTwo.data.count({_id: i+'_dbTwo'}));
- }
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank.bson
deleted file mode 100644
index e69de29bb2d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank.bson
+++ /dev/null
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank_metadata.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank_metadata.bson
deleted file mode 100644
index e69de29bb2d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank_metadata.bson
+++ /dev/null
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank_metadata.metadata.json b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank_metadata.metadata.json
deleted file mode 100644
index 0967ef424bc..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankcoll/blank_metadata.metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankdb/README b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankdb/README
deleted file mode 100644
index 8a13ce0a00c..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/blankdb/README
+++ /dev/null
@@ -1 +0,0 @@
-This exists so that this directory can remain blank of .bson files but still be checked into version control.
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_empty/db_empty/coll_empty.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_empty/db_empty/coll_empty.bson
deleted file mode 100644
index e69de29bb2d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_empty/db_empty/coll_empty.bson
+++ /dev/null
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_extended_json_options/test/changelog.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_extended_json_options/test/changelog.bson
deleted file mode 100644
index 3799a6f04b6..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_extended_json_options/test/changelog.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_extended_json_options/test/changelog.metadata.json b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_extended_json_options/test/changelog.metadata.json
deleted file mode 100644
index 64d0433a836..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_extended_json_options/test/changelog.metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{ "options" : { "create" : "changelog", "size" : { "$numberLong" : "10000000" }, "capped" : true }, "indexes" : [ { "v" : 1, "key" : { "_id" : { "$numberLong" : "1"}}, "ns" : "config.changelog", "name" : "_id_" }, {"v":1,"key":{"pos":"2d"},"name":"position_2d","ns":"config.changelog","min":{"$numberLong":"0"},"max":{"$numberLong":"1000"},"bits":{"$numberLong":"32"}} ] }
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_local_oplog/local/oplog.rs.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_local_oplog/local/oplog.rs.bson
deleted file mode 100644
index 6051944948e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_local_oplog/local/oplog.rs.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_local_oplog/local/oplog.rs.metadata.json b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_local_oplog/local/oplog.rs.metadata.json
deleted file mode 100644
index 9e28c8db056..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_local_oplog/local/oplog.rs.metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{"options":{"capped":true,"size":100096},"indexes":[]} \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_oplog_conflict/oplog.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_oplog_conflict/oplog.bson
deleted file mode 100644
index a9ada58715f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_oplog_conflict/oplog.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_complex_id_oplog/oplog.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_complex_id_oplog/oplog.bson
deleted file mode 100644
index 9a47fca217f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_complex_id_oplog/oplog.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_invalid/dbOne/invalid_metadata.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_invalid/dbOne/invalid_metadata.bson
deleted file mode 100644
index e69de29bb2d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_invalid/dbOne/invalid_metadata.bson
+++ /dev/null
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_invalid/dbOne/invalid_metadata.metadata.json b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_invalid/dbOne/invalid_metadata.metadata.json
deleted file mode 100644
index e0ea3257e88..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_invalid/dbOne/invalid_metadata.metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{"indexes":[{"v":1,"name":"_id_","ns":"dbOne.invalid_metadata"},{"v":1,"name":"a_1","ns":"dbOne.invalid_metadata"}]}
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_coll.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_coll.bson
deleted file mode 100644
index dd6d86a43dc..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_coll.bson
+++ /dev/null
@@ -1 +0,0 @@
-XXX
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_metadata.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_metadata.bson
deleted file mode 100644
index e69de29bb2d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_metadata.bson
+++ /dev/null
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_metadata.metadata.json b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_metadata.metadata.json
deleted file mode 100644
index dd6d86a43dc..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/malformed_metadata.metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-XXX
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/missing_metadata.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/missing_metadata.bson
deleted file mode 100644
index e69de29bb2d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_malformed/dbOne/missing_metadata.bson
+++ /dev/null
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_noop_in_oplog/oplog.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_noop_in_oplog/oplog.bson
deleted file mode 100644
index 29172294c0f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_noop_in_oplog/oplog.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/oplog.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/oplog.bson
deleted file mode 100644
index a9ada58715f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/oplog.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/data.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/data.bson
deleted file mode 100644
index c570d917b76..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/data.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/data.metadata.json b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/data.metadata.json
deleted file mode 100644
index 65e5d967f00..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/data.metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{"options":{"flags":1},"indexes":[{"v":1,"key":{"_id":1},"name":"_id_","ns":"test.data"}]} \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/system.indexes.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/system.indexes.bson
deleted file mode 100644
index 324f8e270df..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_oplog/test/system.indexes.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/data.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/data.bson
deleted file mode 100644
index ff0d2e6bc31..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/data.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/data.metadata.json b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/data.metadata.json
deleted file mode 100644
index 98eb8799771..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/data.metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{"options":{"flags":1},"indexes":[{"v":1,"key":{"_id":1},"name":"_id_","ns":"dbOne.data"}]} \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/system.indexes.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/system.indexes.bson
deleted file mode 100644
index f1247e928c3..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbOne/system.indexes.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/data.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/data.bson
deleted file mode 100644
index 0d5439cec2e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/data.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/data.metadata.json b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/data.metadata.json
deleted file mode 100644
index 8fa8534bde5..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/data.metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{"options":{"flags":1},"indexes":[{"v":1,"key":{"_id":1},"name":"_id_","ns":"dbTwo.data"}]} \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/system.indexes.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/system.indexes.bson
deleted file mode 100644
index cea78cca0a5..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/dbTwo/system.indexes.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/not_a_dir b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/not_a_dir
deleted file mode 100644
index a9ada58715f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/dump_with_soft_links/not_a_dir
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/extra_oplog.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/extra_oplog.bson
deleted file mode 100644
index a9ada58715f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/extra_oplog.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_collection.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_collection.bson
deleted file mode 100644
index ff0d2e6bc31..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_collection.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/data.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/data.bson
deleted file mode 100644
index 0d5439cec2e..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/data.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/data.metadata.json b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/data.metadata.json
deleted file mode 100644
index 8fa8534bde5..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/data.metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{"options":{"flags":1},"indexes":[{"v":1,"key":{"_id":1},"name":"_id_","ns":"dbTwo.data"}]} \ No newline at end of file
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/system.indexes.bson b/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/system.indexes.bson
deleted file mode 100644
index cea78cca0a5..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/testdata/soft_linked_db/system.indexes.bson
+++ /dev/null
Binary files differ
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles.js b/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles.js
deleted file mode 100644
index eae515292b7..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles.js
+++ /dev/null
@@ -1,87 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongorestore with --restoreDbUsersAndRoles
-
- jsTest.log('Testing running mongorestore with --restoreDbUsersAndRoles');
-
- var toolTest = getToolTest('users_and_roles');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'users_and_roles_dump';
- resetDbpath(dumpTarget);
-
- // the db we'll be using
- var testDB = toolTest.db.getSiblingDB('test');
-
- // create some users and roles on the database
- testDB.createUser({
- user: 'userOne',
- pwd: 'pwdOne',
- roles: [{role: 'read', db: 'test'}],
- });
- testDB.createRole({
- role: 'roleOne',
- privileges: [{
- resource: {db: 'test', collection: ''},
- actions: ['find'],
- }],
- roles: [],
- });
- testDB.createUser({
- user: 'userTwo',
- pwd: 'pwdTwo',
- roles: [{role: 'roleOne', db: 'test'}],
- });
-
- // insert some data
- var data = [];
- for (var i = 0; i < 10; i++) {
- data.push({_id: i});
- }
- testDB.data.insertMany(data);
- // sanity check the insertion worked
- assert.eq(10, testDB.data.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump', '--db', 'test', '--dumpDbUsersAndRoles']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database, users, and roles
- testDB.dropDatabase();
- testDB.dropAllUsers();
- testDB.dropAllRoles();
-
- // restore the data, specifying --restoreDBUsersAndRoles
- ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'test', '--restoreDbUsersAndRoles']
- .concat(getRestoreTarget(dumpTarget+'/test'))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // make sure the data was restored
- assert.eq(10, testDB.data.count());
- for (i = 0; i < 10; i++) {
- assert.eq(1, testDB.data.count({_id: i}));
- }
-
- // make sure the users were restored
- var users = testDB.getUsers();
- assert.eq(2, users.length);
- assert(users[0].user === 'userOne' || users[1].user === 'userOne');
- assert(users[0].user === 'userTwo' || users[1].user === 'userTwo');
-
- // make sure the role was restored
- var roles = testDB.getRoles();
- assert.eq(1, roles.length);
- assert.eq('roleOne', roles[0].role);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_26_to_28_to_26.js b/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_26_to_28_to_26.js
deleted file mode 100644
index add3820e25b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_26_to_28_to_26.js
+++ /dev/null
@@ -1,145 +0,0 @@
-// This test requires mongo 2.6.x, and mongo 3.0.0 releases
-// @tags: [requires_mongo_26, requires_mongo_30]
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
- // skip tests requiring wiredTiger storage engine on pre 2.8 mongod
- if (TestData && TestData.storageEngine === 'wiredTiger') {
- return;
- }
-
- // Tests using mongorestore with --restoreDbUsersAndRoles, using a dump from
- // a 2.6 mongod and restoring to a 2.8 mongod, then dumping again and
- // restoring to a 2.6 mongod.
-
- jsTest.log('Testing running mongorestore with --restoreDbUsersAndRoles,'+
- ' restoring a 2.6 dump to a 2.8 mongod, then back to a 2.6 mongod');
-
- var toolTest = new ToolTest('users_and_roles_26_to_28_to_26', {binVersion: '2.6'});
- toolTest.startDB('foo');
-
- // where we'll put the dump
- var dumpTarget = 'users_and_roles_26_to_28_to_26_dump';
- resetDbpath(dumpTarget);
-
- // the db we'll be using
- var testDB = toolTest.db.getSiblingDB('test');
-
- // create some users and roles on the database
- testDB.createUser({
- user: 'userOne',
- pwd: 'pwdOne',
- roles: [{role: 'read', db: 'test'}],
- });
- testDB.createRole({
- role: 'roleOne',
- privileges: [{
- resource: {db: 'test', collection: ''},
- actions: ['find'],
- }],
- roles: [],
- });
- testDB.createUser({
- user: 'userTwo',
- pwd: 'pwdTwo',
- roles: [{role: 'roleOne', db: 'test'}],
- });
-
- // insert some data
- var data = [];
- for (var i = 0; i < 10; i++) {
- data.push({_id: i});
- }
- testDB.data.insertMany(data);
- // sanity check the insertion worked
- assert.eq(10, testDB.data.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump', '--db', 'test', '--dumpDbUsersAndRoles']
- .concat(getDumpTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // drop the database, users, and roles
- testDB.dropDatabase();
- testDB.dropAllUsers();
- testDB.dropAllRoles();
-
- // restart the mongod as a 2.8
- stopMongod(toolTest.port);
- toolTest.m = null;
- toolTest.db = null;
- delete toolTest.options.binVersion;
- toolTest.startDB('foo');
-
- // refresh the db reference
- testDB = toolTest.db.getSiblingDB('test');
-
- // restore the data, specifying --restoreDBUsersAndRoles
- ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'test', '--restoreDbUsersAndRoles']
- .concat(getRestoreTarget(dumpTarget+'/test')));
- assert.eq(0, ret);
-
- // make sure the data was restored
- assert.eq(10, testDB.data.count());
- for (i = 0; i < 10; i++) {
- assert.eq(1, testDB.data.count({_id: i}));
- }
-
- // make sure the users were restored
- var users = testDB.getUsers();
- assert.eq(2, users.length);
- assert(users[0].user === 'userOne' || users[1].user === 'userOne');
- assert(users[0].user === 'userTwo' || users[1].user === 'userTwo');
-
- // make sure the role was restored
- var roles = testDB.getRoles();
- assert.eq(1, roles.length);
- assert.eq('roleOne', roles[0].role);
-
- // dump the data again, to a slightly different target
- ret = toolTest.runTool.apply(toolTest, ['dump', '--db', 'test', '--dumpDbUsersAndRoles']
- .concat(getDumpTarget(dumpTarget+'_second')));
- assert.eq(0, ret);
-
- // drop the database, users, and roles
- testDB.dropDatabase();
- testDB.dropAllUsers();
- testDB.dropAllRoles();
-
- // restart the mongod as a 2.6
- stopMongod(toolTest.port);
- toolTest.m = null;
- toolTest.db = null;
- toolTest.options = toolTest.options || {};
- toolTest.options.binVersion = '2.6';
- toolTest.startDB('foo');
-
- // refresh the db reference
- testDB = toolTest.db.getSiblingDB('test');
-
- // restore the data, specifying --restoreDBUsersAndRoles
- ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'test', '--restoreDbUsersAndRoles']
- .concat(getRestoreTarget(dumpTarget+'_second/test')));
- assert.eq(0, ret);
-
- // make sure the data was restored
- assert.eq(10, testDB.data.count());
- for (i = 0; i < 10; i++) {
- assert.eq(1, testDB.data.count({_id: i}));
- }
-
- // make sure the users were restored
- users = testDB.getUsers();
- assert.eq(2, users.length);
- assert(users[0].user === 'userOne' || users[1].user === 'userOne');
- assert(users[0].user === 'userTwo' || users[1].user === 'userTwo');
-
- // make sure the role was restored
- roles = testDB.getRoles();
- assert.eq(1, roles.length);
- assert.eq('roleOne', roles[0].role);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_28_to_26.js b/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_28_to_26.js
deleted file mode 100644
index 1158ed5842f..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_28_to_26.js
+++ /dev/null
@@ -1,159 +0,0 @@
-// This test requires mongo 2.6.x, and mongo 3.0.0 releases
-// @tags: [requires_mongo_26, requires_mongo_30]
-(function() {
-
- load("jstests/configs/standard_dump_targets.config.js");
-
- // skip tests requiring wiredTiger storage engine on pre 2.8 mongod
- if (TestData && TestData.storageEngine === 'wiredTiger') {
- return;
- }
-
- // Tests using mongorestore with --restoreDbUsersAndRoles, using a dump from
- // a 2.8 mongod and restoring to a 2.6 mongod, which should fail.
-
- jsTest.log('Testing running mongorestore with --restoreDbUsersAndRoles,'+
- ' restoring a 2.8 dump to a 2.6 mongod');
-
- var toolTest = new ToolTest('users_and_roles_28_to_26');
- resetDbpath(toolTest.dbpath);
- toolTest.startDB('foo');
-
- // where we'll put the dump
- var dumpTarget = 'users_and_roles_28_to_26_dump';
- resetDbpath(dumpTarget);
-
- // the db we'll be using
- var testDB = toolTest.db.getSiblingDB('test');
-
- // create some users and roles on the database
- testDB.createUser({
- user: 'userOne',
- pwd: 'pwdOne',
- roles: [{role: 'read', db: 'test'}],
- });
- testDB.createRole({
- role: 'roleOne',
- privileges: [{
- resource: {db: 'test', collection: ''},
- actions: ['find'],
- }],
- roles: [],
- });
- testDB.createUser({
- user: 'userTwo',
- pwd: 'pwdTwo',
- roles: [{role: 'roleOne', db: 'test'}],
- });
-
- // insert some data
- var data = [];
- for (var i = 0; i < 10; i++) {
- data.push({_id: i});
- }
- testDB.data.insertMany(data);
- // sanity check the insertion worked
- assert.eq(10, testDB.data.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump', '--db', 'test', '--dumpDbUsersAndRoles']
- .concat(getDumpTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // drop the database, users, and roles
- testDB.dropDatabase();
- testDB.dropAllUsers();
- testDB.dropAllRoles();
-
- // restart the mongod as a 2.6
- stopMongod(toolTest.port);
- toolTest.m = null;
- toolTest.db = null;
- toolTest.options = toolTest.options || {};
- toolTest.options.binVersion = '2.6';
- resetDbpath(toolTest.dbpath);
- toolTest.startDB('foo');
-
- // refresh the db reference
- testDB = toolTest.db.getSiblingDB('test');
-
- // restore the data, specifying --restoreDBUsersAndRoles. it should fail
- // since the auth version is too new
- ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'test', '--restoreDbUsersAndRoles']
- .concat(getRestoreTarget(dumpTarget+'/test')));
- assert.neq(0, ret);
-
- // success
- toolTest.stop();
- jsTest.log('Testing running mongorestore with --restoreDbUsersAndRoles,'+
- ' restoring a 2.8 dump to a 2.6 mongod');
-
- toolTest = new ToolTest('users_and_roles_28_to_26');
- resetDbpath(toolTest.dbpath);
- toolTest.startDB('foo');
-
- // where we'll put the dump
- dumpTarget = 'users_and_roles_28_to_26_dump';
-
- // the db we'll be using
- testDB = toolTest.db.getSiblingDB('test');
-
- // create some users and roles on the database
- testDB.createUser({
- user: 'userOne',
- pwd: 'pwdOne',
- roles: [{role: 'read', db: 'test'}],
- });
- testDB.createRole({
- role: 'roleOne',
- privileges: [{
- resource: {db: 'test', collection: ''},
- actions: ['find'],
- }],
- roles: [],
- });
- testDB.createUser({
- user: 'userTwo',
- pwd: 'pwdTwo',
- roles: [{role: 'roleOne', db: 'test'}],
- });
-
- // insert some data
- for (i = 0; i < 10; i++) {
- testDB.data.insert({_id: i});
- }
- // sanity check the insertion worked
- assert.eq(10, testDB.data.count());
-
- // dump the data
- ret = toolTest.runTool.apply(toolTest, ['dump', '--db', 'test', '--dumpDbUsersAndRoles']
- .concat(getDumpTarget(dumpTarget)));
- assert.eq(0, ret);
-
- // drop the database, users, and roles
- testDB.dropDatabase();
- testDB.dropAllUsers();
- testDB.dropAllRoles();
-
- // restart the mongod as a 2.6
- stopMongod(toolTest.port);
- toolTest.m = null;
- toolTest.db = null;
- toolTest.options = toolTest.options || {};
- toolTest.options.binVersion = '2.6';
- resetDbpath(toolTest.dbpath);
- toolTest.startDB('foo');
-
- // refresh the db reference
- testDB = toolTest.db.getSiblingDB('test');
-
- // restore the data, specifying --restoreDBUsersAndRoles. it should fail
- // since the auth version is too new
- ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'test', '--restoreDbUsersAndRoles']
- .concat(getRestoreTarget(dumpTarget+'/test')));
- assert.neq(0, ret);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_full_dump.js b/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_full_dump.js
deleted file mode 100644
index 5ac03dc6c08..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_full_dump.js
+++ /dev/null
@@ -1,144 +0,0 @@
-// This test requires mongo 2.6.x releases
-// @tags: [requires_mongo_26]
-(function() {
-
- // Tests running mongorestore with --restoreDbUsersAndRoles against
- // a full dump.
-
-
- jsTest.log('Testing running mongorestore with --restoreDbUsersAndRoles against'+
- ' a full dump');
-
- if (typeof getDumpTarget === 'undefined') {
- load('jstests/configs/standard_dump_targets.config.js');
- }
-
- if (dump_targets !== "standard") {
- print('skipping test incompatable with archiving or compression');
- return assert(true);
- }
-
- var runTest = function(sourceDBVersion, dumpVersion, restoreVersion, destDBVersion) {
-
- jsTest.log('Running with sourceDBVersion=' + (sourceDBVersion || 'latest') +
- ', dumpVersion=' + (dumpVersion || 'latest') + ', restoreVersion=' +
- (restoreVersion || 'latest') + ', and destDBVersion=' +
- (destDBVersion || 'latest'));
-
- var toolTest = new ToolTest('users_and_roles_full_dump',
- {binVersion: sourceDBVersion});
- toolTest.startDB('foo');
-
- // where we'll put the dump
- var dumpTarget = 'users_and_roles_full_dump_dump';
- resetDbpath(dumpTarget);
-
- // the db we'll be using, and the admin db
- var adminDB = toolTest.db.getSiblingDB('admin');
- var testDB = toolTest.db.getSiblingDB('test');
-
- // create a user and role on the admin database
- adminDB.createUser({
- user: 'adminUser',
- pwd: 'password',
- roles: [{role: 'read', db: 'admin'}],
- });
- adminDB.createRole({
- role: 'adminRole',
- privileges: [{
- resource: {db: 'admin', collection: ''},
- actions: ['find'],
- }],
- roles: [],
- });
-
- // create some users and roles on the database
- testDB.createUser({
- user: 'userOne',
- pwd: 'pwdOne',
- roles: [{role: 'read', db: 'test'}],
- });
- testDB.createRole({
- role: 'roleOne',
- privileges: [{
- resource: {db: 'test', collection: ''},
- actions: ['find'],
- }],
- roles: [],
- });
- testDB.createUser({
- user: 'userTwo',
- pwd: 'pwdTwo',
- roles: [{role: 'roleOne', db: 'test'}],
- });
-
- // insert some data
- var data = [];
- for (var i = 0; i < 10; i++) {
- data.push({_id: i});
- }
- testDB.data.insertMany(data);
- // sanity check the insertion worked
- assert.eq(10, testDB.data.count());
-
- // dump the data
- var args = ['mongodump' + (dumpVersion ? ('-'+dumpVersion) : ''),
- '--port', toolTest.port]
- .concat(getDumpTarget(dumpTarget));
- var ret = runMongoProgram.apply(this, args);
- assert.eq(0, ret);
-
- // restart the mongod, with a clean db path
- stopMongod(toolTest.port);
- resetDbpath(toolTest.dbpath);
- toolTest.m = null;
- toolTest.db = null;
- toolTest.options.binVersion = destDBVersion;
- toolTest.startDB('foo');
-
- // refresh the db references
- adminDB = toolTest.db.getSiblingDB('admin');
- testDB = toolTest.db.getSiblingDB('test');
-
- // do a full restore
- args = ['mongorestore' + (restoreVersion ? ('-'+restoreVersion) : ''),
- '--port', toolTest.port]
- .concat(getRestoreTarget(dumpTarget));
- ret = runMongoProgram.apply(this, args);
- assert.eq(0, ret);
-
- // make sure the data was restored
- assert.eq(10, testDB.data.count());
- for (i = 0; i < 10; i++) {
- assert.eq(1, testDB.data.count({_id: i}));
- }
-
- // make sure the users were restored
- var users = testDB.getUsers();
- assert.eq(2, users.length);
- assert(users[0].user === 'userOne' || users[1].user === 'userOne');
- assert(users[0].user === 'userTwo' || users[1].user === 'userTwo');
- var adminUsers = adminDB.getUsers();
- assert.eq(1, adminUsers.length);
- assert.eq('adminUser', adminUsers[0].user);
-
- // make sure the roles were restored
- var roles = testDB.getRoles();
- assert.eq(1, roles.length);
- assert.eq('roleOne', roles[0].role);
- var adminRoles = adminDB.getRoles();
- assert.eq(1, adminRoles.length);
- assert.eq('adminRole', adminRoles[0].role);
-
- // success
- toolTest.stop();
-
- };
-
- // 'undefined' triggers latest
- runTest('2.6', '2.6', undefined, '2.6');
- runTest('2.6', '2.6', undefined, undefined);
- runTest('2.6', undefined, undefined, undefined);
- runTest('2.6', undefined, undefined, '2.6');
- runTest(undefined, undefined, undefined, undefined);
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_temp_collections.js b/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_temp_collections.js
deleted file mode 100644
index a57ab2cdfee..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/users_and_roles_temp_collections.js
+++ /dev/null
@@ -1,106 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
-
- // Tests running mongorestore with --restoreDbUsersAndRoles
-
- jsTest.log('Testing running mongorestore with --restoreDbUsersAndRoles');
-
- var toolTest = getToolTest('users_and_roles_temp_collections');
- var commonToolArgs = getCommonToolArguments();
-
- // where we'll put the dump
- var dumpTarget = 'users_and_roles_temp_collections_dump';
- resetDbpath(dumpTarget);
-
- // the db we'll be using
- var testDB = toolTest.db.getSiblingDB('test');
-
- // create some users and roles on the database
- testDB.createUser({
- user: 'userOne',
- pwd: 'pwdOne',
- roles: [{role: 'read', db: 'test'}],
- });
- testDB.createRole({
- role: 'roleOne',
- privileges: [{
- resource: {db: 'test', collection: ''},
- actions: ['find'],
- }],
- roles: [],
- });
- testDB.createUser({
- user: 'userTwo',
- pwd: 'pwdTwo',
- roles: [{role: 'roleOne', db: 'test'}],
- });
-
- // insert some data
- var data = [];
- for (var i = 0; i < 10; i++) {
- data.push({_id: i});
- }
- testDB.data.insertMany(data);
- // sanity check the insertion worked
- assert.eq(10, testDB.data.count());
-
- // dump the data
- var ret = toolTest.runTool.apply(toolTest, ['dump', '--db', 'test', '--dumpDbUsersAndRoles']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- // drop the database, users, and roles
- testDB.dropDatabase();
- testDB.dropAllUsers();
- testDB.dropAllRoles();
-
- // insert to the default temp collections so we hit them later
- var adminDB = toolTest.db.getSiblingDB('admin');
- adminDB.tempusers.insert({_id: 1});
- adminDB.temproles.insert({_id: 1});
-
- // try to restore the data
- ret = toolTest.runTool.apply(toolTest, ['restore', '--db', 'test', '--restoreDbUsersAndRoles']
- .concat(getRestoreTarget(dumpTarget+'/test'))
- .concat(commonToolArgs));
-
- // we should succeed with default temp collections
- assert.eq(0, ret);
-
- // try to restore the data with new temp collections
- ret = toolTest.runTool.apply(toolTest, ['restore',
- '--db', 'test',
- '--tempUsersColl', 'tempU',
- '--tempRolesColl', 'tempR',
- '--restoreDbUsersAndRoles']
- .concat(getRestoreTarget(dumpTarget+'/test'))
- .concat(commonToolArgs));
-
- // we should succeed with new temp collections
- assert.eq(0, ret);
-
- // make sure the data was restored
- assert.eq(10, testDB.data.count());
- for (i = 0; i < 10; i++) {
- assert.eq(1, testDB.data.count({_id: i}));
- }
-
- // make sure the users were restored
- var users = testDB.getUsers();
- assert.eq(2, users.length);
- assert(users[0].user === 'userOne' || users[1].user === 'userOne');
- assert(users[0].user === 'userTwo' || users[1].user === 'userTwo');
-
- // make sure the role was restored
- var roles = testDB.getRoles();
- assert.eq(1, roles.length);
- assert.eq('roleOne', roles[0].role);
-
- // success
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/write_concern.js b/src/mongo/gotools/test/qa-tests/jstests/restore/write_concern.js
deleted file mode 100644
index ae9cf6e0f7c..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/write_concern.js
+++ /dev/null
@@ -1,66 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- var toolTest = new ToolTest('write_concern', null);
- var commonToolArgs = getCommonToolArguments();
-
- var rs = new ReplSetTest({
- name: "rpls",
- nodes: 3,
- useHostName: true,
- settings: {chainingAllowed: false},
- });
-
- rs.startSet();
- rs.initiate();
- rs.awaitReplication();
- toolTest.port = rs.getPrimary().port;
- var dbOne = rs.getPrimary().getDB("dbOne");
-
- // create a test collection
- var data = [];
- for (var i=0; i<=100; i++) {
- data.push({_id: i, x: i*i});
- }
- dbOne.test.insertMany(data);
- rs.awaitReplication();
-
- // dump the data that we'll
- var dumpTarget = 'write_concern_dump';
- resetDbpath(dumpTarget);
- var ret = toolTest.runTool.apply(toolTest, ['dump']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- function writeConcernTestFunc(exitCode, writeConcern, name) {
- jsTest.log(name);
- var ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(writeConcern)
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(exitCode, ret, name);
- dbOne.dropDatabase();
- }
-
- function noConnectTest() {
- return startMongoProgramNoConnect.apply(null, ['mongorestore',
- '--writeConcern={w:3}', '--host', rs.getPrimary().host]
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- }
-
- // drop the database so it's empty
- dbOne.dropDatabase();
-
- // load and run the write concern suite
- load('jstests/libs/wc_framework.js');
- runWCTest("mongorestore", rs, toolTest, writeConcernTestFunc, noConnectTest);
-
- dbOne.dropDatabase();
- rs.stopSet();
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/restore/write_concern_mongos.js b/src/mongo/gotools/test/qa-tests/jstests/restore/write_concern_mongos.js
deleted file mode 100644
index 7473347f07b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/restore/write_concern_mongos.js
+++ /dev/null
@@ -1,71 +0,0 @@
-(function() {
-
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- var toolTest = new ToolTest('write_concern', null);
- var st = new ShardingTest({
- shards: {
- rs0: {
- nodes: 3,
- useHostName: true,
- settings: {chainingAllowed: false},
- },
- },
- mongos: 1,
- config: 1,
- configReplSetTestOptions: {
- settings: {chainingAllowed: false},
- },
- });
- var rs = st.rs0;
- rs.awaitReplication();
- toolTest.port = st.s.port;
- var commonToolArgs = getCommonToolArguments();
- var dbOne = st.s.getDB("dbOne");
-
- // create a test collection
- var data = [];
- for (var i=0; i<=100; i++) {
- data.push({_id: i, x: i*i});
- }
- dbOne.test.insertMany(data);
- rs.awaitReplication();
-
- // dump the data that we'll
- var dumpTarget = 'write_concern_mongos_dump';
- resetDbpath(dumpTarget);
- var ret = toolTest.runTool.apply(toolTest, ['dump', '-d', 'dbOne']
- .concat(getDumpTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(0, ret);
-
- function writeConcernTestFunc(exitCode, writeConcern, name) {
- jsTest.log(name);
- var ret = toolTest.runTool.apply(toolTest, ['restore']
- .concat(writeConcern)
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- assert.eq(exitCode, ret, name);
- dbOne.dropDatabase();
- }
-
- function noConnectTest() {
- return startMongoProgramNoConnect.apply(null, ['mongorestore',
- '--writeConcern={w:3}', '--host', st.s.host]
- .concat(getRestoreTarget(dumpTarget))
- .concat(commonToolArgs));
- }
-
- // drop the database so it's empty
- dbOne.dropDatabase();
-
- // load and run the write concern suite
- load('jstests/libs/wc_framework.js');
- runWCTest("mongorestore", rs, toolTest, writeConcernTestFunc, noConnectTest);
-
- dbOne.dropDatabase();
- rs.stopSet();
- toolTest.stop();
-
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/ssl/ssl_with_system_ca.js b/src/mongo/gotools/test/qa-tests/jstests/ssl/ssl_with_system_ca.js
deleted file mode 100644
index 5abfe227f9d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/ssl/ssl_with_system_ca.js
+++ /dev/null
@@ -1,43 +0,0 @@
-// On OSX this test assumes that jstests/libs/trusted-ca.pem has been added as a trusted
-// certificate to the login keychain of the evergreen user. See,
-// https://github.com/10gen/buildslave-cookbooks/commit/af7cabe5b6e0885902ebd4902f7f974b64cc8961
-// for details.
-((function() {
- 'use strict';
- const HOST_TYPE = getBuildInfo().buildEnvironment.target_os;
-
- if (HOST_TYPE === "windows") {
- runProgram(
- "certutil.exe", "-addstore", "-user", "-f", "CA", "jstests\\libs\\trusted-ca.pem");
- }
-
- var testWithCerts = function(serverPem, clientPem) {
- jsTest.log(`Testing with SSL certs $ {
- serverPem
- }`);
- // allowSSL instead of requireSSL so that the non-SSL connection succeeds.
- var conn = MongoRunner.runMongod(
- {sslMode: 'requireSSL', sslPEMKeyFile: "jstests/libs/" + serverPem});
-
- // Should not be able to authenticate with x509.
- // Authenticate call will return 1 on success, 0 on error.
- var argv =
- ['./mongodump', '-v', '--ssl', '--port', conn.port, "--sslPEMKeyFile", "jstests/libs/" + clientPem];
- if (HOST_TYPE === "linux") {
- // On Linux we override the default path to the system CA store to point to our
- // "trusted" CA. On Windows, this CA will have been added to the user's trusted CA list
- argv.unshift("env", "SSL_CERT_FILE=jstests/libs/trusted-ca.pem");
- }
- var exitStatus = runMongoProgram.apply(null, argv);
- assert.eq(exitStatus, 0, "successfully connected with SSL");
-
- MongoRunner.stopMongod(conn.port);
- };
-
- assert.throws(function() {
- testWithCerts("server.pem", "client.pem");
- });
- assert.doesNotThrow(function() {
- testWithCerts("trusted-server.pem", "trusted-client.pem");
- });
-})());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_auth.js b/src/mongo/gotools/test/qa-tests/jstests/stat/stat_auth.js
deleted file mode 100644
index fd62745c674..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_auth.js
+++ /dev/null
@@ -1,31 +0,0 @@
-(function() {
- load("jstests/libs/mongostat.js");
- var port = allocatePort();
- var m = startMongod(
- "--auth",
- "--port", port,
- "--dbpath", MongoRunner.dataPath+"stat_auth"+port,
- "--nohttpinterface",
- "--bind_ip", "127.0.0.1");
-
- var db = m.getDB("admin");
- db.createUser({
- user: "foobar",
- pwd: "foobar",
- roles: jsTest.adminUserRoles
- });
-
- assert(db.auth("foobar", "foobar"), "auth failed");
-
- var args = ["mongostat",
- "--host", "127.0.0.1:" + port,
- "--rowcount", "1",
- "--authenticationDatabase", "admin",
- "--username", "foobar"];
-
- var x = runMongoProgram.apply(null, args.concat("--password", "foobar"));
- assert.eq(x, exitCodeSuccess, "mongostat should exit successfully with foobar:foobar");
-
- x = runMongoProgram.apply(null, args.concat("--password", "wrong"));
- assert.eq(x, exitCodeErr, "mongostat should exit with an error exit code with foobar:wrong");
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_broken_pipe.js b/src/mongo/gotools/test/qa-tests/jstests/stat/stat_broken_pipe.js
deleted file mode 100644
index 73bd0b01b83..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_broken_pipe.js
+++ /dev/null
@@ -1,34 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- var toolTest = getToolTest('stat_broken_pipe');
- var baseArgs = getCommonToolArguments();
- baseArgs = baseArgs.concat('--port', toolTest.port);
-
- if (toolTest.useSSL) {
- baseArgs = baseArgs.concat([
- '--ssl',
- '--sslPEMKeyFile', 'jstests/libs/server.pem',
- '--sslCAFile', 'jstests/libs/ca.pem',
- '--sslAllowInvalidHostnames']);
- }
- var statArgs = ['mongostat', '--rowcount=5'].concat(baseArgs);
- var ddArgs = ['dd', 'count=1000000', 'bs=1024', 'of=/dev/null'];
- if (_isWindows()) {
- statArgs[0] += '.exe';
- }
- statArgs.unshift('set -o pipefail', '&&', 'PATH=.:$PATH');
-
- var ret = runProgram('bash', '-c', statArgs.concat('|', ddArgs).join(' '));
- assert.eq(0, ret, "bash execution should succeed");
-
- ddArgs = ['dd', 'count=100', 'bs=1', 'of=/dev/null'];
- ret = runProgram('bash', '-c', statArgs.concat('|', ddArgs).join(' '));
- assert.neq(0, ret, "bash execution should fail");
- assert.soon(function() {
- return rawMongoProgramOutput().search(/broken pipe|The pipe is being closed/);
- }, 'should print an error message');
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_custom_headers.js b/src/mongo/gotools/test/qa-tests/jstests/stat/stat_custom_headers.js
deleted file mode 100644
index 89cb539bc34..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_custom_headers.js
+++ /dev/null
@@ -1,107 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load("jstests/libs/mongostat.js");
- load("jstests/libs/extended_assert.js");
- var assert = extendedAssert;
-
- var toolTest = getToolTest("stat_custom_headers");
- var port = toolTest.port;
-
- var x, rows;
- x = runMongoProgram("mongostat", "--port", port,
- "-o", "host,conn,time", "-O", "metrics.record.moves");
- assert.eq(x, exitCodeBadOptions, "mongostat should fail with both -o and -O options");
- clearRawMongoProgramOutput();
-
- // basic -o --humanReadable=false
- x = runMongoProgram("mongostat", "--port", port,
- "-o", "host,conn,time", "-n", 4, "--humanReadable=false");
- assert.eq(x, 0, "mongostat should succeed with -o and -n options");
- assert.eq.soon(5, function() {
- rows = statRows();
- return rows.length;
- }, "expected 5 rows in mongostat output");
- assert.eq(statFields(rows[0]).join(), "host,conn,time",
- "first row doesn't match 'host conn time'");
- assert.eq(statFields(rows[1]).length, 3,
- "there should be exactly three entries for a row of this stat output");
- clearRawMongoProgramOutput();
-
- // basic -o
- x = runMongoProgram("mongostat", "--port", port,
- "-o", "host,conn,time", "-n", 4);
- assert.eq(x, 0, "mongostat should succeed with -o and -n options");
- assert.eq.soon(5, function() {
- rows = statRows();
- return rows.length;
- }, "expected 5 rows in mongostat output");
- assert.eq(statFields(rows[0]).join(), "host,conn,time",
- "first row doesn't match 'host conn time'");
- assert.eq(statFields(rows[1]).length, 5,
- "there should be exactly five entries for a row of this stat output (time counts as three)");
- clearRawMongoProgramOutput();
-
- // basic -O
- x = runMongoProgram("mongostat", "--port", port,
- "-O", "host", "-n", 4);
- assert.eq(x, 0, "mongostat should succeed with -o and -n options");
- rows = statRows();
- var fields = statFields(rows[0]);
- assert.eq(fields[fields.length-1], "host",
- "first row should end with added 'host' field");
- clearRawMongoProgramOutput();
-
- // named
- x = runMongoProgram("mongostat", "--port", port,
- "-o", "host=H,conn=C,time=MYTiME", "-n", 4);
- assert.eq(x, 0, "mongostat should succeed with -o and -n options");
- assert.eq.soon(5, function() {
- rows = statRows();
- return rows.length;
- }, "expected 5 rows in mongostat output");
- assert.eq(statFields(rows[0]).join(), "H,C,MYTiME",
- "first row doesn't match 'H C MYTiME'");
- assert.eq(statFields(rows[1]).length, 5,
- "there should be exactly five entries for a row of this stat output (time counts as three)");
- clearRawMongoProgramOutput();
-
- // serverStatus custom field
- x = runMongoProgram("mongostat", "--port", port,
- "-o", "host,conn,mem.bits", "-n", 4);
- assert.eq(x, 0, "mongostat should succeed with -o and -n options");
- assert.eq.soon(5, function() {
- rows = statRows();
- return rows.length;
- }, "expected 5 rows in mongostat output");
- assert.eq(statFields(rows[0]).join(), "host,conn,mem.bits",
- "first row doesn't match 'host time mem.bits'");
- fields = statFields(rows[1]);
- assert.eq(fields.length, 3,
- "there should be exactly three entries for a row of this stat output");
- assert(fields[2] === "32" || fields[2] === "64",
- "mem.bits didn't yield valid output (should be one of 32 or 64, was '"
- +fields[2]+"')");
- clearRawMongoProgramOutput();
-
- // serverStatus named field
- x = runMongoProgram("mongostat", "--port", port,
- "-o", "host,conn=MYCoNN,mem.bits=BiTs", "-n", 4);
- assert.eq(x, 0, "mongostat should succeed with -o and -n options");
- assert.eq.soon(5, function() {
- rows = statRows();
- return rows.length;
- }, "expected 5 rows in mongostat output");
- assert.eq(statFields(rows[0]).join(), "host,MYCoNN,BiTs",
- "first row doesn't match 'host MYTiME BiTs'");
- fields = statFields(rows[1]);
- assert.eq(fields.length, 3,
- "there should be exactly three entries for a row of this stat output");
- assert(fields[2] === "32" || fields[2] === "64",
- "mem.bits didn't yield valid output (should be one of 32 or 64, was '"
- +fields[2]+"')");
- clearRawMongoProgramOutput();
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_discover.js b/src/mongo/gotools/test/qa-tests/jstests/stat/stat_discover.js
deleted file mode 100644
index 5b4aa3a0f6a..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_discover.js
+++ /dev/null
@@ -1,60 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load("jstests/libs/mongostat.js");
-
- var toolTest = getToolTest("stat_discover");
- var rs = new ReplSetTest({
- name: "rpls",
- nodes: 4,
- useHostName: true,
- });
-
- rs.startSet();
- rs.initiate();
- rs.awaitReplication();
-
- worked = statCheck(["mongostat",
- "--port", rs.liveNodes.master.port,
- "--discover"],
- hasOnlyPorts(rs.ports));
- assert(worked, "when only port is used, each host still only appears once");
-
- assert(discoverTest(rs.ports, rs.liveNodes.master.host), "--discover against a replset master sees all members");
-
- assert(discoverTest(rs.ports, rs.liveNodes.slaves[0].host), "--discover against a replset slave sees all members");
-
- hosts = [rs.liveNodes.master.host, rs.liveNodes.slaves[0].host, rs.liveNodes.slaves[1].host];
- ports = [rs.liveNodes.master.port, rs.liveNodes.slaves[0].port, rs.liveNodes.slaves[1].port];
- worked = statCheck(['mongostat',
- '--host', hosts.join(',')],
- hasOnlyPorts(ports));
- assert(worked, "replica set specifiers are correctly used");
-
- assert(discoverTest([toolTest.port], toolTest.m.host), "--discover against a stand alone-sees just the stand-alone");
-
- // Test discovery with nodes cutting in and out
- clearRawMongoProgramOutput();
- pid = startMongoProgramNoConnect("mongostat", "--host", rs.liveNodes.slaves[1].host, "--discover");
-
- assert.soon(hasPort(rs.liveNodes.slaves[0].port), "discovered host is seen");
- assert.soon(hasPort(rs.liveNodes.slaves[1].port), "specified host is seen");
-
- rs.stop(rs.liveNodes.slaves[0]);
- assert.soon(lacksPort(rs.liveNodes.slaves[0].port), "after discovered host is stopped, it is not seen");
- assert.soon(hasPort(rs.liveNodes.slaves[1].port), "after discovered host is stopped, specified host is still seen");
-
- rs.start(rs.liveNodes.slaves[0]);
- assert.soon(hasPort(rs.liveNodes.slaves[0].port), "after discovered is restarted, discovered host is seen again");
- assert.soon(hasPort(rs.liveNodes.slaves[1].port), "after discovered is restarted, specified host is still seen");
-
- rs.stop(rs.liveNodes.slaves[1]);
- assert.soon(lacksPort(rs.liveNodes.slaves[1].port), "after specified host is stopped, specified host is not seen");
- assert.soon(hasPort(rs.liveNodes.slaves[0].port), "after specified host is stopped, the discovered host is still seen");
-
- stopMongoProgramByPid(pid);
-
- rs.stopSet();
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_discover_shard.js b/src/mongo/gotools/test/qa-tests/jstests/stat/stat_discover_shard.js
deleted file mode 100644
index 621cdcdc388..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_discover_shard.js
+++ /dev/null
@@ -1,14 +0,0 @@
-(function() {
- load("jstests/libs/mongostat.js");
-
- var st = new ShardingTest({name: "shard1", shards: 2});
- shardPorts = [st._mongos[0].port, st._connections[0].port, st._connections[1].port];
-
- clearRawMongoProgramOutput();
- pid = startMongoProgramNoConnect("mongostat", "--host", st._mongos[0].host, "--discover");
- assert.soon(hasOnlyPorts(shardPorts), "--discover against a mongos sees all shards");
-
- st.stop();
- assert.soon(hasOnlyPorts([]), "stops showing data when hosts come down");
- assert.eq(exitCodeStopped, stopMongoProgramByPid(pid), "mongostat --discover against a sharded cluster shouldn't error when the cluster goes down");
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_header.js b/src/mongo/gotools/test/qa-tests/jstests/stat/stat_header.js
deleted file mode 100644
index e9fe39957d5..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_header.js
+++ /dev/null
@@ -1,27 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load('jstests/libs/mongostat.js');
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var toolTest = getToolTest('stat_header');
-
- function outputIncludesHeader() {
- return rawMongoProgramOutput()
- .split("\n").some(function(line) {
- return line.match(/^sh\d+\| insert/);
- });
- }
-
- clearRawMongoProgramOutput();
- x = runMongoProgram("mongostat", "--port", toolTest.port, "--rowcount", 1);
- assert.soon(outputIncludesHeader, "normally a header appears");
-
- clearRawMongoProgramOutput();
- x = runMongoProgram("mongostat", "--port", toolTest.port, "--rowcount", 1, "--noheaders");
- assert.eq.soon(false, outputIncludesHeader, "--noheaders suppresses the header");
-
- toolTest.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_mixed_storage_engine.js b/src/mongo/gotools/test/qa-tests/jstests/stat/stat_mixed_storage_engine.js
deleted file mode 100644
index 56c5a10d30d..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_mixed_storage_engine.js
+++ /dev/null
@@ -1,45 +0,0 @@
-// @tags: [requires_mmap_available]
-(function() {
- if (TestData && TestData.storageEngine === 'wiredTiger') {
- return;
- }
- load("jstests/libs/mongostat.js");
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
-
- var mmap_options = {storageEngine: "mmapv1"};
- var wt_options = {storageEngine: "wiredTiger"};
- var replTest = new ReplSetTest({
- nodes: {
- node0: mmap_options,
- node1: mmap_options,
- node2: wt_options,
- },
- });
-
- replTest.startSet();
- replTest.initiate();
- replTest.awaitReplication();
-
- clearRawMongoProgramOutput();
- assert(discoverTest(replTest.ports, replTest.nodes[0].host), "mongostat against a heterogenous storage engine replica set sees all hosts");
-
- clearRawMongoProgramOutput();
- runMongoProgram("mongostat", "--host", replTest.nodes[0].host, "--rowcount", 7, "--discover");
- assert.strContains.soon("used flushes mapped", rawMongoProgramOutput, "against replset has fields for both engines");
-
- replTest.stopSet();
-
- st = new ShardingTest({shards: [wt_options, mmap_options], options: {nopreallocj: true}});
- stdb = st.getDB("test");
- shardPorts = [st._mongos[0].port, st._connections[0].port, st._connections[1].port];
-
- clearRawMongoProgramOutput();
- assert(discoverTest(shardPorts, st._mongos[0].host, "mongostat reports on a heterogenous storage engine sharded cluster"));
-
- clearRawMongoProgramOutput();
- runMongoProgram("mongostat", "--host", st._mongos[0].host, "--rowcount", 7, "--discover");
- assert.strContains.soon("used flushes mapped", rawMongoProgramOutput, "against sharded cluster has fields for both engines");
-
- st.stop();
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_rowcount.js b/src/mongo/gotools/test/qa-tests/jstests/stat/stat_rowcount.js
deleted file mode 100644
index 3af09f723ea..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/stat/stat_rowcount.js
+++ /dev/null
@@ -1,56 +0,0 @@
-(function() {
- if (typeof getToolTest === 'undefined') {
- load('jstests/configs/plain_28.config.js');
- }
- load("jstests/libs/mongostat.js");
- load('jstests/libs/extended_assert.js');
- var assert = extendedAssert;
- var commonToolArgs = getCommonToolArguments();
- print("common tool sargs");
- printjson(commonToolArgs);
-
- var toolTest = getToolTest('stat_rowcount');
- var x, pid;
- clearRawMongoProgramOutput();
-
- x = runMongoProgram("mongostat", "--host", toolTest.m.host, "--rowcount", 7, "--noheaders");
- assert.eq.soon(7, function() {
- return rawMongoProgramOutput().split("\n").filter(function(r) {
- return r.match(rowRegex);
- }).length;
- }, "--rowcount value is respected correctly");
-
- startTime = new Date();
- x = runMongoProgram("mongostat", "--host", toolTest.m.host, "--rowcount", 3, "--noheaders", 3);
- endTime = new Date();
- duration = Math.floor((endTime - startTime) / 1000);
- assert.gte(duration, 9, "sleep time affects the total time to produce a number or results");
-
- clearRawMongoProgramOutput();
-
- pid = startMongoProgramNoConnect.apply(null, ["mongostat", "--port", toolTest.port].concat(commonToolArgs));
- assert.strContains.soon('sh'+pid+'| ', rawMongoProgramOutput, "should produce some output");
- assert.eq(exitCodeStopped, stopMongoProgramByPid(pid), "stopping should cause mongostat exit with a 'stopped' code");
-
- x = runMongoProgram.apply(null, ["mongostat", "--port", toolTest.port - 1, "--rowcount", 1].concat(commonToolArgs));
- assert.neq(exitCodeSuccess, x, "can't connect causes an error exit code");
-
- x = runMongoProgram.apply(null, ["mongostat", "--rowcount", "-1"].concat(commonToolArgs));
- assert.eq(exitCodeBadOptions, x, "mongostat --rowcount specified with bad input: negative value");
-
- x = runMongoProgram.apply(null, ["mongostat", "--rowcount", "foobar"].concat(commonToolArgs));
- assert.eq(exitCodeBadOptions, x, "mongostat --rowcount specified with bad input: non-numeric value");
-
- x = runMongoProgram.apply(null, ["mongostat", "--host", "badreplset/127.0.0.1:" + toolTest.port, "--rowcount", 1].concat(commonToolArgs));
- assert.eq(exitCodeErr, x, "--host used with a replica set string for nodes not in a replica set");
-
- pid = startMongoProgramNoConnect.apply(null, ["mongostat", "--host", "127.0.0.1:" + toolTest.port].concat(commonToolArgs));
- assert.strContains.soon('sh'+pid+'| ', rawMongoProgramOutput, "should produce some output");
-
- MongoRunner.stopMongod(toolTest.port);
- assert.gte.soon(10, function() {
- var rows = statRows();
- return statFields(rows[rows.length - 1]).length;
- }, "should stop showing new stat lines");
- assert.eq(exitCodeStopped, stopMongoProgramByPid(pid), "mongostat shouldn't error out when the server goes down");
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_json.js b/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_json.js
deleted file mode 100644
index cdf92788f94..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_json.js
+++ /dev/null
@@ -1,44 +0,0 @@
-// mongotop_json.js; ensure that running mongotop using the --json flag works as
-// expected
-var testName = 'mongotop_json';
-(function() {
- jsTest.log('Testing mongotop --json option');
- load('jstests/top/util/mongotop_common.js');
- var assert = extendedAssert;
-
- var runTests = function(topology, passthrough) {
- jsTest.log('Using ' + passthrough.name + ' passthrough');
- var t = topology.init(passthrough);
- var conn = t.connection();
-
- // clear the output buffer
- clearRawMongoProgramOutput();
-
- // ensure tool runs without error with --rowcount = 1
- var ret = executeProgram(['mongotop', '--port', conn.port, '--json', '--rowcount', 1].concat(passthrough.args));
- assert.eq(ret.exitCode, 0, 'failed 1');
- assert.eq.soon('object', function() {
- return typeof JSON.parse(extractJSON(ret.getOutput()));
- }, 'invalid JSON 1');
-
- // ensure tool runs without error with --rowcount > 1
- var rowcount = 5;
- clearRawMongoProgramOutput();
- ret = executeProgram(['mongotop', '--port', conn.port, '--json', '--rowcount', rowcount].concat(passthrough.args));
- assert.eq(ret.exitCode, 0, 'failed 2');
- assert.eq.soon(rowcount, function() {
- return ret.getOutput().split('\n').length;
- }, "expected " + rowcount + " top results");
- ret.getOutput().split('\n').forEach(function(line) {
- assert(typeof JSON.parse(extractJSON(line)) === 'object', 'invalid JSON 2');
- });
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_reports.js b/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_reports.js
deleted file mode 100644
index 853669dc853..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_reports.js
+++ /dev/null
@@ -1,151 +0,0 @@
-// mongotop_reports.js; ensure that running mongotop reports accurately on operations
-// going on in namespaces
-var testName = 'mongotop_reports';
-load('jstests/top/util/mongotop_common.js');
-
-(function() {
- jsTest.log('Testing mongotop\'s reporting fidelity');
- var assert = extendedAssert;
- var read = 'read';
- var write = 'write';
-
- var runReportTest = function(topology, passthrough, test) {
- jsTest.log('Using ' + passthrough.name + ' passthrough on ' + test.name + ' shell');
- var t = topology.init(passthrough);
- var conn = t.connection();
- db = conn.getDB('foo'); // eslint-disable-line no-native-reassign
- db.dropDatabase();
- assert.eq(db.bar.count(), 0, 'drop failed');
-
- // start the parallel shell command
- if (passthrough.name === auth.name) {
- var authCommand = '\n db.getSiblingDB(\'admin\').auth(\'' + authUser + '\',\'' + authPassword + '\'); \n';
- test.shellCommand = authCommand + test.shellCommand;
- }
- var shellWorkload = startParallelShell(test.shellCommand);
-
- // allow for command to actually start
- sleep(5000);
-
- // ensure tool runs without error
- clearRawMongoProgramOutput();
- var ret = executeProgram(['mongotop', '--port', conn.port, '--json', '--rowcount', 1].concat(passthrough.args));
- assert.eq(ret.exitCode, 0, 'failed 1');
- var parsedOutput;
- assert.eq.soon('object', function() {
- parsedOutput = JSON.parse(extractJSON(ret.getOutput()));
- return typeof parsedOutput;
- }, 'invalid JSON 1');
-
- // ensure only the active namespaces reports a non-zero value
- for (var namespace in parsedOutput.totals) {
- if (!parsedOutput.totals.hasOwnProperty(namespace)) {
- continue;
- }
- var isAuthActivity = namespace.indexOf('.system.') !== -1;
- var isReplActivity = namespace.indexOf('local.') !== -1;
-
- // authentication and replication activity should be ignored
- if (isAuthActivity || isReplActivity) {
- continue;
- }
-
- var nsDetails = parsedOutput.totals[namespace];
- assert.neq(nsDetails, undefined, 'no details reported for namespace ' + namespace);
-
- var comparator = 'eq';
- var shouldHaveActivity = test.namespaces.filter(function(testSpace) { // eslint-disable-line no-loop-func
- return testSpace === namespace;
- });
-
- // return the opposite comparator if this namespace should have activity
- if (shouldHaveActivity.length !== 0) {
- comparator = 'neq';
- }
-
- test.indicators.forEach(function(indicator) { // eslint-disable-line no-loop-func
- ['count', 'time'].forEach(function(metric) {
- assert[comparator](nsDetails[indicator][metric], 0, 'unexpected ' + indicator + ' activity on ' + namespace + '; ' + metric + ': ' + nsDetails[indicator][metric]);
- if (test.indicators.length === 1) {
- // read or write shell
- var opposite = read;
- if (test.name === read) {
- opposite = write;
- }
- // ensure there's no activity on the inactive metric
- // sometimes the readings are a bit out of sync - making some
- // allowance to prevent test flakiness
- assert.between(0, nsDetails[opposite][metric], 1, 'unexpected ' + opposite + ' (opposite) activity on ' + namespace + '; ' + metric + ': ' + nsDetails[opposite][metric]);
- } else {
- // read/write shell should have read and write activity
- assert[comparator](nsDetails[read][metric], 0, 'unexpected ' + read + ' activity (read/write) on ' + namespace + '; ' + metric + ': ' + nsDetails[read][metric]);
- assert[comparator](nsDetails[write][metric], 0, 'unexpected ' + write + ' activity (read/write) on ' + namespace + '; ' + metric + ': ' + nsDetails[write][metric]);
- }
- var calculatedSum = nsDetails[read][metric] + nsDetails[write][metric];
- var expectedSum = nsDetails['total'][metric];
-
- // sometimes the total isn't exact - making some allowance to prevent
- // test flakiness
- assert.between(0, expectedSum - calculatedSum, 1, 'unexpected sum for metric ' + metric + ': expected ' + expectedSum + ' but got ' + calculatedSum);
- });
- });
- }
- t.stop();
-
- // Swallow the exit code for the shell per SERVER-25777.
- shellWorkload();
- };
-
- var runTests = function(topology, passthrough) {
- var readShell = '\nprint(\'starting read\'); \n' +
- 'for (var i = 0; i < 1000000; ++i) \n{ ' +
- ' db.getSiblingDB(\'foo\').bar.find({ x: i }).forEach(function(){}); \n' +
- ' sleep(1); \n' +
- '}\n';
-
- var writeShell = '\nprint(\'starting write\'); \n' +
- 'for (var i = 0; i < 1000000; ++i) { \n' +
- ' db.getSiblingDB(\'foo\').bar.insert({ x: i }); \n' +
- ' sleep(1); \n' +
- '}\n';
-
- var readWriteShell = '\nprint(\'starting read/write\'); \n' +
- 'for (var i = 0; i < 1000000; ++i) \n{ ' +
- ' db.getSiblingDB(\'foo\').bar.insert({ x: i }); \n' +
- ' db.getSiblingDB(\'foo\').bar.find({ x: i }).forEach(function(){}); \n' +
- ' sleep(1); \n' +
- '}\n';
-
- var testSpaces = [
- ['foo.bar'],
- ['foo.bar', 'bar.foo'],
- ];
-
- var tests = [{
- name: read,
- indicators: [read],
- shellCommand: readShell,
- }, {
- name: write,
- indicators: [write],
- shellCommand: writeShell,
- }, {
- name: read + '/' + write,
- indicators: [read, write],
- shellCommand: readWriteShell,
- }];
-
- tests.forEach(function(test) {
- testSpaces.forEach(function(testSpace) {
- test.namespaces = testSpace;
- runReportTest(topology, passthrough, test);
- });
- });
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_sharded.js b/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_sharded.js
deleted file mode 100644
index 6bce18a5e9b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_sharded.js
+++ /dev/null
@@ -1,47 +0,0 @@
-// mongotop_sharded.js; ensure that running mongotop against a sharded cluster
-// fails with a useful error message
-var testName = 'mongotop_sharded';
-(function() {
- jsTest.log('Testing mongotop against sharded cluster');
- load('jstests/top/util/mongotop_common.js');
- var assert = extendedAssert;
-
- var expectedError = 'cannot run mongotop against a mongos';
- var verifyOutput = function(getOutput) {
- assert.strContains.soon(expectedError, getOutput, 'error message must appear at least once');
- var shellOutput = getOutput();
- jsTest.log('shell output: ' + shellOutput);
- shellOutput.split('\n').forEach(function(line) {
- // check the displayed error message
- assert.neq(line.match(expectedError), null, 'unexpeced error message');
- });
- };
-
- var runTests = function(topology, passthrough) {
- jsTest.log('Using ' + passthrough.name + ' passthrough');
- var t = topology.init(passthrough);
- var conn = t.connection();
-
- // getting the version should work without error
- assert.eq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, '--version'].concat(passthrough.args)), 0, 'failed 1');
-
- // getting the help text should work without error
- assert.eq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, '--help'].concat(passthrough.args)), 0, 'failed 2');
-
- // anything that runs against the mongos server should fail
- var result = executeProgram(['mongotop', '--port', conn.port].concat(passthrough.args));
- assert.neq(result.exitCode, 0, 'expected failure against a mongos');
- verifyOutput(result.getOutput);
-
- result = executeProgram(['mongotop', '--port', conn.port, '2'].concat(passthrough.args));
- assert.neq(result.exitCode, 0, 'expected failure against a mongos');
- verifyOutput(result.getOutput);
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(shardedClusterTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_stress.js b/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_stress.js
deleted file mode 100644
index 5693a1b8254..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_stress.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// mongotop_stress.js; ensure that running mongotop, even when the server is
-// under heavy load, works as expected
-var testName = 'mongotop_stress';
-load('jstests/top/util/mongotop_common.js');
-
-(function() {
- jsTest.log('Testing mongotop\'s performance under load');
-
- var runTests = function(topology, passthrough) {
- jsTest.log('Using ' + passthrough.name + ' passthrough');
- var t = topology.init(passthrough);
- var conn = t.connection();
- db = conn.getDB('foo'); // eslint-disable-line no-native-reassign
-
- // concurrently insert documents into thousands of collections
- var stressShell = '\nprint(\'starting read/write stress test\'); \n' +
- ' if (\'' + passthrough.name + '\' === \'auth\')' +
- ' db.getSiblingDB(\'admin\').auth(\'' + authUser + '\',\'' + authPassword + '\'); ' +
- ' var dbName = (Math.random() + 1).toString(36).substring(7); ' +
- ' var clName = (Math.random() + 1).toString(36).substring(7); ' +
- ' for (var i = 0; i < 10000; ++i) { ' +
- ' db.getSiblingDB(dbName).getCollection(clName).find({ x: i }).forEach(); \n' +
- ' sleep(1); \n' +
- ' db.getSiblingDB(dbName).getCollection(clName).insert({ x: i }); \n' +
- ' sleep(1);\n' +
- ' }\n';
-
- var shells = [];
- for (var i = 0; i < 10; ++i) {
- shells.push(startParallelShell(stressShell));
- }
-
- // wait a bit for the stress to kick in
- sleep(5000);
- jsTest.log('Current operation(s)');
- printjson(db.currentOp());
-
- // ensure tool runs without error
- clearRawMongoProgramOutput();
- assert.eq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, '--json', '--rowcount', 1].concat(passthrough.args)), 0, 'failed 1');
-
- t.stop();
-
- // Wait for all the shells to finish per SERVER-25777.
- shells.forEach(function(join) {
- join();
- });
-
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_validation.js b/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_validation.js
deleted file mode 100644
index 0abe5a4c74b..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/top/mongotop_validation.js
+++ /dev/null
@@ -1,46 +0,0 @@
-// mongotop_validation.js; ensure that running mongotop using invalid arguments
-// fail as expected
-var testName = 'mongotop_validation';
-load('jstests/top/util/mongotop_common.js');
-
-(function() {
- jsTest.log('Testing mongotop with invalid arguments');
-
- var runTests = function(topology, passthrough) {
- jsTest.log('Using ' + passthrough.name + ' passthrough');
- var t = topology.init(passthrough);
- var conn = t.connection();
-
- // checking the version should not return an error
- assert.eq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, '--version'].concat(passthrough.args)), 0, '--version assertion failure 1');
-
-
- // ensure tool returns an error...
-
- // when used with an invalid port
- assert.neq(runMongoProgram.apply(this, ['mongotop', '--port', 55555].concat(passthrough.args)), 0, '--port assertion failure 1');
- assert.neq(runMongoProgram.apply(this, ['mongotop', '--port', 'hello'].concat(passthrough.args)), 0, '--port assertion failure 2');
- assert.neq(runMongoProgram.apply(this, ['mongotop', '--port', ''].concat(passthrough.args)), 0, '--port assertion failure 3');
-
- // when supplied invalid row counts
- assert.neq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, '--rowcount', '-2'].concat(passthrough.args)), 0, '--rowcount assertion failure 1');
- assert.neq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, '--rowcount', 'hello'].concat(passthrough.args)), 0, '--rowcount assertion failure 2');
- assert.neq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, '--rowcount', ''].concat(passthrough.args)), 0, '--rowcount assertion failure 3');
-
- // when supplied invalid sleep times
- assert.neq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, '-4'].concat(passthrough.args)), 0, 'sleep time assertion failure 1');
- assert.neq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, 'forever'].concat(passthrough.args)), 0, 'sleep time assertion failure 2');
-
- // when supplied invalid options
- assert.neq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, '--elder'].concat(passthrough.args)), 0, 'invalid options failure 1');
- assert.neq(runMongoProgram.apply(this, ['mongotop', '--port', conn.port, '--price'].concat(passthrough.args)), 0, 'invalid options failure 2');
-
- t.stop();
- };
-
- // run with plain and auth passthroughs
- passthroughs.forEach(function(passthrough) {
- runTests(standaloneTopology, passthrough);
- runTests(replicaSetTopology, passthrough);
- });
-}());
diff --git a/src/mongo/gotools/test/qa-tests/jstests/top/util/mongotop_common.js b/src/mongo/gotools/test/qa-tests/jstests/top/util/mongotop_common.js
deleted file mode 100644
index 5d4e002fb92..00000000000
--- a/src/mongo/gotools/test/qa-tests/jstests/top/util/mongotop_common.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// mongotop_common.js; contains variables used by mongotop tests
-/* exported executeProgram */
-/* exported extractJSON */
-load('jstests/common/topology_helper.js');
-load('jstests/libs/extended_assert.js');
-
-var executeProgram = function(args) {
- clearRawMongoProgramOutput();
- var pid = startMongoProgramNoConnect.apply(this, args);
- var exitCode = waitProgram(pid);
- var prefix = 'sh'+pid+'| ';
- var getOutput = function() {
- return rawMongoProgramOutput().split('\n').filter(function(line) {
- return line.indexOf(prefix) === 0;
- }).join('\n');
- };
- return {
- exitCode: exitCode,
- getOutput: getOutput,
- };
-};
-
-var extractJSON = function(shellOutput) {
- return shellOutput.substring(shellOutput.indexOf('{'), shellOutput.lastIndexOf('}') + 1);
-};
diff --git a/src/mongo/gotools/vendor/src/github.com/10gen/llmgo/socket.go b/src/mongo/gotools/vendor/src/github.com/10gen/llmgo/socket.go
index bfac9082cc7..a8bad09d528 100644
--- a/src/mongo/gotools/vendor/src/github.com/10gen/llmgo/socket.go
+++ b/src/mongo/gotools/vendor/src/github.com/10gen/llmgo/socket.go
@@ -446,9 +446,11 @@ func (socket *MongoSocket) SimpleQuery(op *QueryOp) (data []byte, replyOp *Reply
if !replyDone {
replyDone = true
replyErr = err
- replyOp = rfl.op
- if err == nil {
- replyData = rfl.docData
+ if rfl != nil {
+ replyOp = rfl.op
+ if err == nil {
+ replyData = rfl.docData
+ }
}
}
change.Unlock()