summaryrefslogtreecommitdiff
path: root/jstests/tool
diff options
context:
space:
mode:
Diffstat (limited to 'jstests/tool')
-rw-r--r--jstests/tool/command_line_quotes.js10
-rw-r--r--jstests/tool/csv1.js61
-rw-r--r--jstests/tool/csvexport1.js69
-rw-r--r--jstests/tool/csvexport2.js39
-rw-r--r--jstests/tool/csvimport1.js75
-rw-r--r--jstests/tool/dumpauth.js31
-rw-r--r--jstests/tool/dumpfilename1.js13
-rw-r--r--jstests/tool/dumprestore1.js34
-rw-r--r--jstests/tool/dumprestore10.js10
-rw-r--r--jstests/tool/dumprestore3.js14
-rw-r--r--jstests/tool/dumprestore4.js29
-rw-r--r--jstests/tool/dumprestore6.js30
-rw-r--r--jstests/tool/dumprestore7.js36
-rw-r--r--jstests/tool/dumprestore8.js109
-rw-r--r--jstests/tool/dumprestore9.js138
-rw-r--r--jstests/tool/dumprestoreWithNoOptions.js99
-rw-r--r--jstests/tool/dumprestore_auth.js139
-rw-r--r--jstests/tool/dumprestore_auth2.js216
-rw-r--r--jstests/tool/dumprestore_auth3.js91
-rw-r--r--jstests/tool/dumprestore_excludecollections.js101
-rw-r--r--jstests/tool/dumpsecondary.js32
-rw-r--r--jstests/tool/exportimport1.js59
-rw-r--r--jstests/tool/exportimport3.js30
-rw-r--r--jstests/tool/exportimport4.js43
-rw-r--r--jstests/tool/exportimport5.js69
-rw-r--r--jstests/tool/exportimport6.js27
-rw-r--r--jstests/tool/exportimport_bigarray.js35
-rw-r--r--jstests/tool/exportimport_date.js20
-rw-r--r--jstests/tool/exportimport_minkey_maxkey.js20
-rw-r--r--jstests/tool/files1.js16
-rw-r--r--jstests/tool/gridfs.js17
-rw-r--r--jstests/tool/oplog1.js19
-rw-r--r--jstests/tool/oplog_all_ops.js46
-rw-r--r--jstests/tool/restorewithauth.js66
-rw-r--r--jstests/tool/stat1.js30
-rw-r--r--jstests/tool/tool1.js61
-rw-r--r--jstests/tool/tool_replset.js43
-rw-r--r--jstests/tool/tsv1.js63
38 files changed, 1169 insertions, 871 deletions
diff --git a/jstests/tool/command_line_quotes.js b/jstests/tool/command_line_quotes.js
index d7b618a3406..35f7305ff4a 100644
--- a/jstests/tool/command_line_quotes.js
+++ b/jstests/tool/command_line_quotes.js
@@ -8,14 +8,8 @@ coll.insert({a: 2});
var query = "{\"a\": {\"$gt\": 1} }";
assert(!MongoRunner.runMongoTool(
- "mongodump",
- {
- "host": "127.0.0.1:" + mongod.port,
- "db": "spaces",
- "collection": "coll",
- "query": query
- }
-));
+ "mongodump",
+ {"host": "127.0.0.1:" + mongod.port, "db": "spaces", "collection": "coll", "query": query}));
MongoRunner.stopMongod(mongod);
diff --git a/jstests/tool/csv1.js b/jstests/tool/csv1.js
index 3338d500fdf..7a5690062f8 100644
--- a/jstests/tool/csv1.js
+++ b/jstests/tool/csv1.js
@@ -1,42 +1,57 @@
// csv1.js
-t = new ToolTest( "csv1" );
+t = new ToolTest("csv1");
-c = t.startDB( "foo" );
+c = t.startDB("foo");
-base = { a : 1 , b : "foo,bar\"baz,qux" , c: 5, 'd d': -6 , e: '-', f : "."};
+base = {
+ a: 1,
+ b: "foo,bar\"baz,qux",
+ c: 5, 'd d': -6,
+ e: '-',
+ f: "."
+};
-assert.eq( 0 , c.count() , "setup1" );
-c.insert( base );
+assert.eq(0, c.count(), "setup1");
+c.insert(base);
delete base._id;
-assert.eq( 1 , c.count() , "setup2" );
+assert.eq(1, c.count(), "setup2");
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c,d d,e,f" );
+t.runTool(
+ "export", "--out", t.extFile, "-d", t.baseName, "-c", "foo", "--csv", "-f", "a,b,c,d d,e,f");
c.drop();
-assert.eq( 0 , c.count() , "after drop" );
-
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c,d d,e,f" );
-assert.soon( "2 == c.count()" , "restore 2" );
-
-a = c.find().sort( { a : 1 } ).toArray();
+assert.eq(0, c.count(), "after drop");
+
+t.runTool("import",
+ "--file",
+ t.extFile,
+ "-d",
+ t.baseName,
+ "-c",
+ "foo",
+ "--type",
+ "csv",
+ "-f",
+ "a,b,c,d d,e,f");
+assert.soon("2 == c.count()", "restore 2");
+
+a = c.find().sort({a: 1}).toArray();
delete a[0]._id;
delete a[1]._id;
-assert.docEq( { a : "a" , b : "b" , c : "c" , 'd d': "d d", e: 'e', f : "f"}, a[1], "csv parse 1" );
-assert.docEq( base, a[0], "csv parse 0" );
+assert.docEq({a: "a", b: "b", c: "c", 'd d': "d d", e: 'e', f: "f"}, a[1], "csv parse 1");
+assert.docEq(base, a[0], "csv parse 0");
c.drop();
-assert.eq( 0 , c.count() , "after drop 2" );
+assert.eq(0, c.count(), "after drop 2");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
+t.runTool(
+ "import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
+assert.soon("c.findOne()", "no data after sleep");
+assert.eq(1, c.count(), "after restore 2");
x = c.findOne();
delete x._id;
-assert.docEq( base, x, "csv parse 2" );
-
-
-
+assert.docEq(base, x, "csv parse 2");
t.stop();
diff --git a/jstests/tool/csvexport1.js b/jstests/tool/csvexport1.js
index f01acbcd6fc..afea559b2b0 100644
--- a/jstests/tool/csvexport1.js
+++ b/jstests/tool/csvexport1.js
@@ -1,64 +1,81 @@
// csvexport1.js
-t = new ToolTest( "csvexport1" );
+t = new ToolTest("csvexport1");
-c = t.startDB( "foo" );
+c = t.startDB("foo");
-assert.eq( 0 , c.count() , "setup1" );
+assert.eq(0, c.count(), "setup1");
objId = ObjectId();
-c.insert({ a : new NumberInt(1) , b : objId , c: [1, 2, 3], d : {a : "hello", b : "world"} , e: '-'});
-c.insert({ a : -2.0, c : MinKey, d : "Then he said, \"Hello World!\"", e : new NumberLong(3)});
-c.insert({ a : new BinData(0, "1234"), b : ISODate("2009-08-27T12:34:56.789"),
- c : new Timestamp(1234, 9876), d : /foo*\"bar\"/i,
- e : function foo() { print("Hello World!"); }});
-
-assert.eq( 3 , c.count() , "setup2" );
+c.insert({a: new NumberInt(1), b: objId, c: [1, 2, 3], d: {a: "hello", b: "world"}, e: '-'});
+c.insert({a: -2.0, c: MinKey, d: "Then he said, \"Hello World!\"", e: new NumberLong(3)});
+c.insert({
+ a: new BinData(0, "1234"),
+ b: ISODate("2009-08-27T12:34:56.789"),
+ c: new Timestamp(1234, 9876),
+ d: /foo*\"bar\"/i,
+ e: function foo() {
+ print("Hello World!");
+ }
+});
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b,c,d,e");
+assert.eq(3, c.count(), "setup2");
+t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo", "--csv", "-f", "a,b,c,d,e");
c.drop();
-assert.eq( 0 , c.count() , "after drop" );
+assert.eq(0, c.count(), "after drop");
-t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
+t.runTool(
+ "import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
-assert.soon ( 3 + " == c.count()", "after import");
+assert.soon(3 + " == c.count()", "after import");
// Note: Exporting and Importing to/from CSV is not designed to be round-trippable
expected = [];
-expected.push({ a : 1, b : "ObjectId(" + objId.valueOf() + ")", c : [ 1, 2, 3 ], d : { "a" : "hello", "b" : "world" }, e : "-"});
-expected.push({ a : -2.0, b : "", c : "$MinKey", d : "Then he said, \"Hello World!\"", e : 3});
+expected.push({
+ a: 1,
+ b: "ObjectId(" + objId.valueOf() + ")",
+ c: [1, 2, 3],
+ d: {"a": "hello", "b": "world"},
+ e: "-"
+});
+expected.push({a: -2.0, b: "", c: "$MinKey", d: "Then he said, \"Hello World!\"", e: 3});
// "t" should be 1234, but the shell interprets the first field of timestamps as milliseconds while
// they are stored as seconds. See SERVER-7718.
-expected.push({ a : "D76DF8", b : "2009-08-27T12:34:56.789Z",
- c : { "$timestamp" : { "t" : 1234, "i" : 9876 } },
- d : "/foo*\\\"bar\\\"/i", e : tojson(function foo() { print("Hello World!"); })});
+expected.push({
+ a: "D76DF8",
+ b: "2009-08-27T12:34:56.789Z",
+ c: {"$timestamp": {"t": 1234, "i": 9876}},
+ d: "/foo*\\\"bar\\\"/i",
+ e: tojson(function foo() {
+ print("Hello World!");
+ })
+});
actual = [];
-actual.push(c.find({a : 1}).toArray()[0]);
-actual.push(c.find({a : -2.0}).toArray()[0]);
-actual.push(c.find({a : "D76DF8"}).toArray()[0]);
+actual.push(c.find({a: 1}).toArray()[0]);
+actual.push(c.find({a: -2.0}).toArray()[0]);
+actual.push(c.find({a: "D76DF8"}).toArray()[0]);
for (i = 0; i < expected.length; i++) {
delete actual[i]._id;
assert.eq(Object.keys(expected[i]).length, Object.keys(actual[i]).length);
keys = Object.keys(expected[i]);
- for(var j=0;j<keys.length;j++){
+ for (var j = 0; j < keys.length; j++) {
expectedVal = expected[i][keys[j]];
- if((typeof expectedVal)== "object"){
+ if ((typeof expectedVal) == "object") {
// For fields which contain arrays or objects, they have been
// exported as JSON - parse the JSON in the output and verify
// that it matches the original document's value
assert.docEq(expectedVal, JSON.parse(actual[i][keys[j]]), "CSV export " + i);
- }else{
+ } else {
// Otherwise just compare the values directly
assert.eq(expectedVal, actual[i][keys[j]], "CSV export " + i);
}
}
}
-
t.stop();
diff --git a/jstests/tool/csvexport2.js b/jstests/tool/csvexport2.js
index dc12288e83b..7ced84a953c 100644
--- a/jstests/tool/csvexport2.js
+++ b/jstests/tool/csvexport2.js
@@ -1,31 +1,34 @@
// csvexport2.js
-t = new ToolTest( "csvexport2" );
+t = new ToolTest("csvexport2");
-c = t.startDB( "foo" );
+c = t.startDB("foo");
// This test is designed to test exporting of a CodeWithScope object.
-// However, due to SERVER-3391, it is not possible to create a CodeWithScope object in the mongo shell,
-// therefore this test does not work. Once SERVER-3391 is resolved, this test should be un-commented out
+// However, due to SERVER-3391, it is not possible to create a CodeWithScope object in the mongo
+// shell,
+// therefore this test does not work. Once SERVER-3391 is resolved, this test should be
+// un-commented out
-//assert.eq( 0 , c.count() , "setup1" );
+// assert.eq( 0 , c.count() , "setup1" );
-//c.insert({ a : 1 , b : Code("print(\"Hello \" + x);", {"x" : "World!"})})
-//assert.eq( 1 , c.count() , "setup2" );
-//t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b")
+// c.insert({ a : 1 , b : Code("print(\"Hello \" + x);", {"x" : "World!"})})
+// assert.eq( 1 , c.count() , "setup2" );
+// t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f",
+// "a,b")
+// c.drop()
-//c.drop()
+// assert.eq( 0 , c.count() , "after drop" )
+// t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv",
+// "--headerline");
+// assert.soon ( 1 + " == c.count()", "after import");
-//assert.eq( 0 , c.count() , "after drop" )
-//t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline");
-//assert.soon ( 1 + " == c.count()", "after import");
-
-//expected = { a : 1, b : "\"{ \"$code\" : print(\"Hello \" + x); , \"$scope\" : { \"x\" : \"World!\" } }"};
-//actual = c.findOne()
-
-//delete actual._id;
-//assert.eq( expected, actual );
+// expected = { a : 1, b : "\"{ \"$code\" : print(\"Hello \" + x); , \"$scope\" : { \"x\" :
+// \"World!\" } }"};
+// actual = c.findOne()
+// delete actual._id;
+// assert.eq( expected, actual );
t.stop(); \ No newline at end of file
diff --git a/jstests/tool/csvimport1.js b/jstests/tool/csvimport1.js
index a85470f2c11..28258bbe37f 100644
--- a/jstests/tool/csvimport1.js
+++ b/jstests/tool/csvimport1.js
@@ -1,40 +1,71 @@
// csvimport1.js
-t = new ToolTest( "csvimport1" );
+t = new ToolTest("csvimport1");
-c = t.startDB( "foo" );
+c = t.startDB("foo");
base = [];
-base.push({ a : 1, b : "this is some text.\nThis text spans multiple lines, and just for fun\ncontains a comma", "c" : "This has leading and trailing whitespace!" });
-base.push({a : 2, b : "When someone says something you \"put it in quotes\"", "c" : "I like embedded quotes/slashes\\backslashes" });
-base.push({a : 3, b : " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", "c" : "" });
-base.push({a : 4, b : "", "c" : "How are empty entries handled?" });
-base.push({a : 5, b : "\"\"", c : "\"This string is in quotes and contains empty quotes (\"\")\""});
-base.push({ a : "a" , b : "b" , c : "c"});
+base.push({
+ a: 1,
+ b: "this is some text.\nThis text spans multiple lines, and just for fun\ncontains a comma",
+ "c": "This has leading and trailing whitespace!"
+});
+base.push({
+ a: 2,
+ b: "When someone says something you \"put it in quotes\"",
+ "c": "I like embedded quotes/slashes\\backslashes"
+});
+base.push({
+ a: 3,
+ b:
+ " This line contains the empty string and has leading and trailing whitespace inside the quotes! ",
+ "c": ""
+});
+base.push({a: 4, b: "", "c": "How are empty entries handled?"});
+base.push({a: 5, b: "\"\"", c: "\"This string is in quotes and contains empty quotes (\"\")\""});
+base.push({a: "a", b: "b", c: "c"});
-assert.eq( 0 , c.count() , "setup" );
+assert.eq(0, c.count(), "setup");
-t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c" );
-assert.soon( base.length + " == c.count()" , "after import 1 " );
+t.runTool("import",
+ "--file",
+ "jstests/tool/data/csvimport1.csv",
+ "-d",
+ t.baseName,
+ "-c",
+ "foo",
+ "--type",
+ "csv",
+ "-f",
+ "a,b,c");
+assert.soon(base.length + " == c.count()", "after import 1 ");
-a = c.find().sort( { a : 1 } ).toArray();
-for (i = 0; i < base.length; i++ ) {
+a = c.find().sort({a: 1}).toArray();
+for (i = 0; i < base.length; i++) {
delete a[i]._id;
- assert.docEq( base[i], a[i], "csv parse " + i);
+ assert.docEq(base[i], a[i], "csv parse " + i);
}
c.drop();
-assert.eq( 0 , c.count() , "after drop" );
+assert.eq(0, c.count(), "after drop");
-t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( base.length - 1 , c.count() , "after import 2" );
+t.runTool("import",
+ "--file",
+ "jstests/tool/data/csvimport1.csv",
+ "-d",
+ t.baseName,
+ "-c",
+ "foo",
+ "--type",
+ "csv",
+ "--headerline");
+assert.soon("c.findOne()", "no data after sleep");
+assert.eq(base.length - 1, c.count(), "after import 2");
-x = c.find().sort( { a : 1 } ).toArray();
-for (i = 0; i < base.length - 1; i++ ) {
+x = c.find().sort({a: 1}).toArray();
+for (i = 0; i < base.length - 1; i++) {
delete x[i]._id;
- assert.docEq( base[i], x[i], "csv parse with headerline " + i);
+ assert.docEq(base[i], x[i], "csv parse with headerline " + i);
}
-
t.stop();
diff --git a/jstests/tool/dumpauth.js b/jstests/tool/dumpauth.js
index 86caf260328..7be119a9f54 100644
--- a/jstests/tool/dumpauth.js
+++ b/jstests/tool/dumpauth.js
@@ -8,8 +8,8 @@ var profileName = "system.profile";
var dumpDir = MongoRunner.dataPath + "jstests_tool_dumprestore_dump_system_profile/";
db = m.getDB(dbName);
-db.createUser({user: "testuser" , pwd: "testuser", roles: jsTest.adminUserRoles});
-assert( db.auth( "testuser" , "testuser" ) , "auth failed" );
+db.createUser({user: "testuser", pwd: "testuser", roles: jsTest.adminUserRoles});
+assert(db.auth("testuser", "testuser"), "auth failed");
t = db[colName];
t.drop();
@@ -20,25 +20,30 @@ profile.drop();
db.setProfilingLevel(2);
// Populate the database
-for(var i = 0; i < 100; i++) {
- t.save({ "x": i });
+for (var i = 0; i < 100; i++) {
+ t.save({"x": i});
}
assert.gt(profile.count(), 0, "admin.system.profile should have documents");
assert.eq(t.count(), 100, "testcol should have documents");
// Create a user with backup permissions
-db.createUser({user: "backup" , pwd: "password", roles: ["backup"]});
+db.createUser({user: "backup", pwd: "password", roles: ["backup"]});
// Backup the database with the backup user
-x = runMongoProgram( "mongodump",
- "--db", dbName,
- "--out", dumpDir,
- "--authenticationDatabase=admin",
- "-u", "backup",
- "-p", "password",
- "-h", "127.0.0.1:"+m.port);
+x = runMongoProgram("mongodump",
+ "--db",
+ dbName,
+ "--out",
+ dumpDir,
+ "--authenticationDatabase=admin",
+ "-u",
+ "backup",
+ "-p",
+ "password",
+ "-h",
+ "127.0.0.1:" + m.port);
assert.eq(x, 0, "mongodump should succeed with authentication");
// Assert that a BSON document for admin.system.profile has been produced
-x = runMongoProgram( "bsondump", dumpDir + "/" + dbName + "/" + profileName + ".bson" );
+x = runMongoProgram("bsondump", dumpDir + "/" + dbName + "/" + profileName + ".bson");
assert.eq(x, 0, "bsondump should succeed parsing the profile data");
diff --git a/jstests/tool/dumpfilename1.js b/jstests/tool/dumpfilename1.js
index 4a79a11bdb1..3e826952c4c 100644
--- a/jstests/tool/dumpfilename1.js
+++ b/jstests/tool/dumpfilename1.js
@@ -1,13 +1,12 @@
-//dumpfilename1.js
+// dumpfilename1.js
-//Test designed to make sure error that dumping a collection with "/" fails
+// Test designed to make sure error that dumping a collection with "/" fails
-t = new ToolTest( "dumpfilename1" );
+t = new ToolTest("dumpfilename1");
-t.startDB( "foo" );
+t.startDB("foo");
c = t.db;
-assert.writeOK(c.getCollection("df/").insert({ a: 3 }));
-assert(t.runTool( "dump" , "--out" , t.ext ) != 0, "dump should fail with non-zero return code");
+assert.writeOK(c.getCollection("df/").insert({a: 3}));
+assert(t.runTool("dump", "--out", t.ext) != 0, "dump should fail with non-zero return code");
t.stop();
-
diff --git a/jstests/tool/dumprestore1.js b/jstests/tool/dumprestore1.js
index aabe441244f..dad1eb65a48 100644
--- a/jstests/tool/dumprestore1.js
+++ b/jstests/tool/dumprestore1.js
@@ -1,31 +1,31 @@
// dumprestore1.js
-t = new ToolTest( "dumprestore1" );
+t = new ToolTest("dumprestore1");
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-c.save( { a : 22 } );
-assert.eq( 1 , c.count() , "setup2" );
+c = t.startDB("foo");
+assert.eq(0, c.count(), "setup1");
+c.save({a: 22});
+assert.eq(1, c.count(), "setup2");
-t.runTool( "dump" , "--out" , t.ext );
+t.runTool("dump", "--out", t.ext);
c.drop();
-assert.eq( 0 , c.count() , "after drop" );
+assert.eq(0, c.count(), "after drop");
-t.runTool( "restore" , "--dir" , t.ext );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-assert.eq( 22 , c.findOne().a , "after restore 2" );
+t.runTool("restore", "--dir", t.ext);
+assert.soon("c.findOne()", "no data after sleep");
+assert.eq(1, c.count(), "after restore 2");
+assert.eq(22, c.findOne().a, "after restore 2");
// ensure that --collection is used with --db. See SERVER-7721
-var ret = t.runTool( "dump" , "--collection" , "col" );
-assert.neq( ret, 0, "mongodump should return failure code" );
+var ret = t.runTool("dump", "--collection", "col");
+assert.neq(ret, 0, "mongodump should return failure code");
t.stop();
// Ensure that --db and --collection are provided when filename is "-" (stdin).
-ret = t.runTool( "restore" , "--collection" , "coll", "--dir", "-" );
-assert.neq( ret, 0, "mongorestore should return failure code" );
+ret = t.runTool("restore", "--collection", "coll", "--dir", "-");
+assert.neq(ret, 0, "mongorestore should return failure code");
t.stop();
-ret = t.runTool( "restore" , "--db" , "db", "--dir", "-" );
-assert.neq( ret, 0, "mongorestore should return failure code" );
+ret = t.runTool("restore", "--db", "db", "--dir", "-");
+assert.neq(ret, 0, "mongorestore should return failure code");
t.stop();
diff --git a/jstests/tool/dumprestore10.js b/jstests/tool/dumprestore10.js
index 6cf3cbbbfa1..7c8cc0ada58 100644
--- a/jstests/tool/dumprestore10.js
+++ b/jstests/tool/dumprestore10.js
@@ -10,7 +10,7 @@ function step(msg) {
step();
-var replTest = new ReplSetTest( {name: name, nodes: 2} );
+var replTest = new ReplSetTest({name: name, nodes: 2});
var nodes = replTest.startSet();
replTest.initiate();
var master = replTest.getPrimary();
@@ -20,7 +20,7 @@ var total = 1000;
step("store data");
var foo = master.getDB("foo");
for (i = 0; i < total; i++) {
- foo.bar.insert({ x: i, y: "abc" });
+ foo.bar.insert({x: i, y: "abc"});
}
}
@@ -33,8 +33,7 @@ step("mongodump from replset");
var data = MongoRunner.dataDir + "/dumprestore10-dump1/";
-runMongoProgram( "mongodump", "--host", "127.0.0.1:"+master.port, "--out", data );
-
+runMongoProgram("mongodump", "--host", "127.0.0.1:" + master.port, "--out", data);
{
step("remove data after dumping");
@@ -48,7 +47,8 @@ runMongoProgram( "mongodump", "--host", "127.0.0.1:"+master.port, "--out", data
step("try mongorestore with write concern");
-runMongoProgram( "mongorestore", "--writeConcern", "2", "--host", "127.0.0.1:"+master.port, "--dir", data );
+runMongoProgram(
+ "mongorestore", "--writeConcern", "2", "--host", "127.0.0.1:" + master.port, "--dir", data);
var x = 0;
diff --git a/jstests/tool/dumprestore3.js b/jstests/tool/dumprestore3.js
index f6a8735d5f6..6ac6ae76c3f 100644
--- a/jstests/tool/dumprestore3.js
+++ b/jstests/tool/dumprestore3.js
@@ -3,7 +3,7 @@
var name = "dumprestore3";
-var replTest = new ReplSetTest( {name: name, nodes: 2} );
+var replTest = new ReplSetTest({name: name, nodes: 2});
var nodes = replTest.startSet();
replTest.initiate();
var primary = replTest.getPrimary();
@@ -12,7 +12,7 @@ var secondary = replTest.getSecondary();
jsTestLog("populate primary");
var foo = primary.getDB("foo");
for (i = 0; i < 20; i++) {
- foo.bar.insert({ x: i, y: "abc" });
+ foo.bar.insert({x: i, y: "abc"});
}
jsTestLog("wait for secondary");
@@ -21,21 +21,21 @@ replTest.awaitReplication();
jsTestLog("mongodump from primary");
var data = MongoRunner.dataDir + "/dumprestore3-other1/";
resetDbpath(data);
-var ret = runMongoProgram( "mongodump", "--host", primary.host, "--out", data );
+var ret = runMongoProgram("mongodump", "--host", primary.host, "--out", data);
assert.eq(ret, 0, "mongodump should exit w/ 0 on primary");
jsTestLog("try mongorestore to secondary");
-ret = runMongoProgram( "mongorestore", "--host", secondary.host, "--dir", data );
+ret = runMongoProgram("mongorestore", "--host", secondary.host, "--dir", data);
assert.neq(ret, 0, "mongorestore should exit w/ 1 on secondary");
jsTestLog("mongoexport from primary");
dataFile = MongoRunner.dataDir + "/dumprestore3-other2.json";
-ret = runMongoProgram( "mongoexport", "--host", primary.host, "--out",
- dataFile, "--db", "foo", "--collection", "bar" );
+ret = runMongoProgram(
+ "mongoexport", "--host", primary.host, "--out", dataFile, "--db", "foo", "--collection", "bar");
assert.eq(ret, 0, "mongoexport should exit w/ 0 on primary");
jsTestLog("mongoimport from secondary");
-ret = runMongoProgram( "mongoimport", "--host", secondary.host, "--file", dataFile );
+ret = runMongoProgram("mongoimport", "--host", secondary.host, "--file", dataFile);
assert.neq(ret, 0, "mongoreimport should exit w/ 1 on secondary");
jsTestLog("stopSet");
diff --git a/jstests/tool/dumprestore4.js b/jstests/tool/dumprestore4.js
index a4d33df7deb..58595f62383 100644
--- a/jstests/tool/dumprestore4.js
+++ b/jstests/tool/dumprestore4.js
@@ -1,6 +1,5 @@
// dumprestore4.js -- see SERVER-2186
-
// The point of this test is to ensure that mongorestore successfully
// constructs indexes when the database being restored into has a
// different name than the database dumped from. There are 2
@@ -9,35 +8,35 @@
// some reason you have another database called "A" at the time of the
// restore, mongorestore shouldn't touch it.
-t = new ToolTest( "dumprestore4" );
+t = new ToolTest("dumprestore4");
-c = t.startDB( "dumprestore4" );
+c = t.startDB("dumprestore4");
-db=t.db;
+db = t.db;
dbname = db.getName();
-dbname2 = "NOT_"+dbname;
+dbname2 = "NOT_" + dbname;
-db2=db.getSisterDB( dbname2 );
+db2 = db.getSisterDB(dbname2);
-db.dropDatabase(); // make sure it's empty
-db2.dropDatabase(); // make sure everybody's empty
+db.dropDatabase(); // make sure it's empty
+db2.dropDatabase(); // make sure everybody's empty
-assert.eq( 0 , c.getIndexes().length , "setup1" );
-c.ensureIndex({ x : 1} );
-assert.eq( 2 , c.getIndexes().length , "setup2" ); // _id and x_1
+assert.eq(0, c.getIndexes().length, "setup1");
+c.ensureIndex({x: 1});
+assert.eq(2, c.getIndexes().length, "setup2"); // _id and x_1
-assert.eq( 0, t.runTool( "dump" , "-d" , dbname, "--out", t.ext ), "dump");
+assert.eq(0, t.runTool("dump", "-d", dbname, "--out", t.ext), "dump");
// to ensure issue (2), we have to clear out the first db.
// By inspection, db.dropIndexes() doesn't get rid of the _id index on c,
// so we have to drop the collection.
c.drop();
-assert.eq( 0, t.runTool( "restore" , "--dir" , t.ext + "/" + dbname, "-d", dbname2 ), "restore" );
+assert.eq(0, t.runTool("restore", "--dir", t.ext + "/" + dbname, "-d", dbname2), "restore");
// issue (1)
-assert.eq( 2 , db2.dumprestore4.getIndexes().length , "after restore 1" );
+assert.eq(2, db2.dumprestore4.getIndexes().length, "after restore 1");
// issue (2)
-assert.eq( 0 , db.dumprestore4.getIndexes().length , "after restore 2" );
+assert.eq(0, db.dumprestore4.getIndexes().length, "after restore 2");
t.stop();
diff --git a/jstests/tool/dumprestore6.js b/jstests/tool/dumprestore6.js
index e342e71f3f1..653dd256895 100644
--- a/jstests/tool/dumprestore6.js
+++ b/jstests/tool/dumprestore6.js
@@ -1,26 +1,30 @@
// Test restoring from a dump with v:0 indexes.
-// mongodump strips the 'v' property from the index specification by default. When using
+// mongodump strips the 'v' property from the index specification by default. When using
// --keepIndexVersion, the 'v' property is not stripped, but index creation will fail.
-var toolTest = new ToolTest( "dumprestore6" );
-var col = toolTest.startDB( "foo" );
+var toolTest = new ToolTest("dumprestore6");
+var col = toolTest.startDB("foo");
var testDb = toolTest.db;
-assert.eq( 0 , col.count() , "setup1" );
+assert.eq(0, col.count(), "setup1");
// Normal restore should succeed and convert v:1 index.
-toolTest.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db",
- "jstests_tool_dumprestore6");
-assert.soon( "col.findOne()" , "no data after sleep" );
-assert.eq( 1 , col.count() , "after restore" );
+toolTest.runTool(
+ "restore", "--dir", "jstests/tool/data/dumprestore6", "--db", "jstests_tool_dumprestore6");
+assert.soon("col.findOne()", "no data after sleep");
+assert.eq(1, col.count(), "after restore");
var indexes = col.getIndexes();
-assert.eq( 2, indexes.length, "there aren't the correct number of indexes" );
+assert.eq(2, indexes.length, "there aren't the correct number of indexes");
// Try with --keepIndexVersion, should fail to restore v:0 index.
testDb.dropDatabase();
-assert.eq( 0 , col.count() , "after drop" );
-toolTest.runTool("restore", "--dir", "jstests/tool/data/dumprestore6", "--db",
- "jstests_tool_dumprestore6", "--keepIndexVersion");
+assert.eq(0, col.count(), "after drop");
+toolTest.runTool("restore",
+ "--dir",
+ "jstests/tool/data/dumprestore6",
+ "--db",
+ "jstests_tool_dumprestore6",
+ "--keepIndexVersion");
indexes = col.getIndexes();
-assert.eq( 1, indexes.length, "there aren't the correct number of indexes" );
+assert.eq(1, indexes.length, "there aren't the correct number of indexes");
toolTest.stop();
diff --git a/jstests/tool/dumprestore7.js b/jstests/tool/dumprestore7.js
index 9a7d09665ef..0598e73c0a8 100644
--- a/jstests/tool/dumprestore7.js
+++ b/jstests/tool/dumprestore7.js
@@ -8,7 +8,7 @@ function step(msg) {
step();
-var replTest = new ReplSetTest( {name: name, nodes: 1} );
+var replTest = new ReplSetTest({name: name, nodes: 1});
var nodes = replTest.startSet();
replTest.initiate();
var master = replTest.getPrimary();
@@ -17,14 +17,20 @@ var master = replTest.getPrimary();
step("first chunk of data");
var foo = master.getDB("foo");
for (i = 0; i < 20; i++) {
- foo.bar.insert({ x: i, y: "abc" });
+ foo.bar.insert({x: i, y: "abc"});
}
}
{
step("wait");
replTest.awaitReplication();
- var time = replTest.getPrimary().getDB("local").getCollection("oplog.rs").find().limit(1).sort({$natural:-1}).next();
+ var time = replTest.getPrimary()
+ .getDB("local")
+ .getCollection("oplog.rs")
+ .find()
+ .limit(1)
+ .sort({$natural: -1})
+ .next();
step(time.ts.t);
}
@@ -32,26 +38,29 @@ var master = replTest.getPrimary();
step("second chunk of data");
var foo = master.getDB("foo");
for (i = 30; i < 50; i++) {
- foo.bar.insert({ x: i, y: "abc" });
+ foo.bar.insert({x: i, y: "abc"});
}
}
-{
- var conn = MongoRunner.runMongod({});
-}
+{ var conn = MongoRunner.runMongod({}); }
step("try mongodump with $timestamp");
var data = MongoRunner.dataDir + "/dumprestore7-dump1/";
-var query = "{\"ts\":{\"$gt\":{\"$timestamp\":{\"t\":"+ time.ts.t + ",\"i\":" + time.ts.i +"}}}}";
+var query = "{\"ts\":{\"$gt\":{\"$timestamp\":{\"t\":" + time.ts.t + ",\"i\":" + time.ts.i + "}}}}";
-MongoRunner.runMongoTool( "mongodump",
- { "host": "127.0.0.1:"+replTest.ports[0],
- "db": "local", "collection": "oplog.rs",
- "query": query, "out": data });
+MongoRunner.runMongoTool("mongodump",
+ {
+ "host": "127.0.0.1:" + replTest.ports[0],
+ "db": "local",
+ "collection": "oplog.rs",
+ "query": query,
+ "out": data
+ });
step("try mongorestore from $timestamp");
-runMongoProgram( "mongorestore", "--host", "127.0.0.1:"+conn.port, "--dir", data, "--writeConcern", 1);
+runMongoProgram(
+ "mongorestore", "--host", "127.0.0.1:" + conn.port, "--dir", data, "--writeConcern", 1);
var x = 9;
x = conn.getDB("local").getCollection("oplog.rs").count();
@@ -61,4 +70,3 @@ step("stopSet");
replTest.stopSet();
step("SUCCESS");
-
diff --git a/jstests/tool/dumprestore8.js b/jstests/tool/dumprestore8.js
index edc1a874343..9cdae87df80 100644
--- a/jstests/tool/dumprestore8.js
+++ b/jstests/tool/dumprestore8.js
@@ -1,107 +1,110 @@
// dumprestore8.js
-
// This file tests that indexes and capped collection options get properly dumped and restored.
-// It checks that this works both when doing a full database dump/restore and when doing it just for a single db or collection
+// It checks that this works both when doing a full database dump/restore and when doing it just for
+// a single db or collection
-t = new ToolTest( "dumprestore8" );
+t = new ToolTest("dumprestore8");
-t.startDB( "foo" );
+t.startDB("foo");
db = t.db;
dbname = db.getName();
-dbname2 = "NOT_"+dbname;
+dbname2 = "NOT_" + dbname;
db.dropDatabase();
-assert.eq( 0 , db.foo.count() , "setup1" );
-db.foo.save( { a : 1, b : 1 } );
-db.foo.ensureIndex({a:1});
-db.foo.ensureIndex({b:1, _id:-1});
-assert.eq( 1 , db.foo.count() , "setup2" );
-
+assert.eq(0, db.foo.count(), "setup1");
+db.foo.save({a: 1, b: 1});
+db.foo.ensureIndex({a: 1});
+db.foo.ensureIndex({b: 1, _id: -1});
+assert.eq(1, db.foo.count(), "setup2");
-assert.eq( 0 , db.bar.count() , "setup3" );
-db.createCollection("bar", {capped:true, size:1000, max:10});
+assert.eq(0, db.bar.count(), "setup3");
+db.createCollection("bar", {capped: true, size: 1000, max: 10});
for (var i = 0; i < 1000; i++) {
- db.bar.save( { x : i } );
+ db.bar.save({x: i});
}
-db.bar.ensureIndex({x:1});
+db.bar.ensureIndex({x: 1});
barDocCount = db.bar.count();
-assert.gt( barDocCount, 0 , "No documents inserted" );
-assert.lt( db.bar.count(), 1000 , "Capped collection didn't evict documents" );
-assert.eq( 5 , db.foo.getIndexes().length + db.bar.getIndexes().length, "Indexes weren't created right" );
-
+assert.gt(barDocCount, 0, "No documents inserted");
+assert.lt(db.bar.count(), 1000, "Capped collection didn't evict documents");
+assert.eq(5,
+ db.foo.getIndexes().length + db.bar.getIndexes().length,
+ "Indexes weren't created right");
// Full dump/restore
-t.runTool( "dump" , "--out" , t.ext );
+t.runTool("dump", "--out", t.ext);
db.dropDatabase();
-assert.eq( 0 , db.foo.count() , "foo not dropped" );
-assert.eq( 0 , db.bar.count() , "bar not dropped" );
-assert.eq( 0 , db.bar.getIndexes().length , "indexes on bar not dropped" );
-assert.eq( 0 , db.foo.getIndexes().length , "indexes on foo not dropped" );
+assert.eq(0, db.foo.count(), "foo not dropped");
+assert.eq(0, db.bar.count(), "bar not dropped");
+assert.eq(0, db.bar.getIndexes().length, "indexes on bar not dropped");
+assert.eq(0, db.foo.getIndexes().length, "indexes on foo not dropped");
-t.runTool( "restore" , "--dir" , t.ext );
+t.runTool("restore", "--dir", t.ext);
-assert.soon( "db.foo.findOne()" , "no data after sleep" );
-assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo" );
-assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar" );
+assert.soon("db.foo.findOne()", "no data after sleep");
+assert.eq(1, db.foo.count(), "wrong number of docs restored to foo");
+assert.eq(barDocCount, db.bar.count(), "wrong number of docs restored to bar");
for (var i = 0; i < 10; i++) {
- db.bar.save({x:i});
+ db.bar.save({x: i});
}
-assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore." );
-assert.eq( 5 , db.foo.getIndexes().length + db.bar.getIndexes().length, "Indexes weren't created correctly by restore");
+assert.eq(barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore.");
+assert.eq(5,
+ db.foo.getIndexes().length + db.bar.getIndexes().length,
+ "Indexes weren't created correctly by restore");
// Dump/restore single DB
dumppath = t.ext + "singledbdump/";
mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
+t.runTool("dump", "-d", dbname, "--out", dumppath);
db.dropDatabase();
-assert.eq( 0 , db.foo.count() , "foo not dropped2" );
-assert.eq( 0 , db.bar.count() , "bar not dropped2" );
-assert.eq( 0 , db.foo.getIndexes().length , "indexes on foo not dropped2" );
-assert.eq( 0 , db.bar.getIndexes().length , "indexes on bar not dropped2" );
+assert.eq(0, db.foo.count(), "foo not dropped2");
+assert.eq(0, db.bar.count(), "bar not dropped2");
+assert.eq(0, db.foo.getIndexes().length, "indexes on foo not dropped2");
+assert.eq(0, db.bar.getIndexes().length, "indexes on bar not dropped2");
-t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname );
+t.runTool("restore", "-d", dbname2, "--dir", dumppath + dbname);
db = db.getSiblingDB(dbname2);
-assert.soon( "db.foo.findOne()" , "no data after sleep 2" );
-assert.eq( 1 , db.foo.count() , "wrong number of docs restored to foo 2" );
-assert.eq( barDocCount, db.bar.count(), "wrong number of docs restored to bar 2" );
+assert.soon("db.foo.findOne()", "no data after sleep 2");
+assert.eq(1, db.foo.count(), "wrong number of docs restored to foo 2");
+assert.eq(barDocCount, db.bar.count(), "wrong number of docs restored to bar 2");
for (var i = 0; i < 10; i++) {
- db.bar.save({x:i});
+ db.bar.save({x: i});
}
-assert.eq( barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore 2." );
-assert.eq( 5 , db.foo.getIndexes().length + db.bar.getIndexes().length, "Indexes weren't created correctly by restore 2");
-
+assert.eq(barDocCount, db.bar.count(), "Capped collection didn't evict documents after restore 2.");
+assert.eq(5,
+ db.foo.getIndexes().length + db.bar.getIndexes().length,
+ "Indexes weren't created correctly by restore 2");
// Dump/restore single collection
dumppath = t.ext + "singlecolldump/";
mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname2, "-c", "bar", "--out" , dumppath );
+t.runTool("dump", "-d", dbname2, "-c", "bar", "--out", dumppath);
db.dropDatabase();
-assert.eq( 0 , db.bar.count() , "bar not dropped3" );
-assert.eq( 0 , db.bar.getIndexes().length , "indexes not dropped3" );
+assert.eq(0, db.bar.count(), "bar not dropped3");
+assert.eq(0, db.bar.getIndexes().length, "indexes not dropped3");
-t.runTool( "restore" , "-d", dbname, "-c", "baz", "--dir" , dumppath + dbname2 + "/bar.bson" );
+t.runTool("restore", "-d", dbname, "-c", "baz", "--dir", dumppath + dbname2 + "/bar.bson");
db = db.getSiblingDB(dbname);
-assert.soon( "db.baz.findOne()" , "no data after sleep 2" );
-assert.eq( barDocCount, db.baz.count(), "wrong number of docs restored to bar 2" );
+assert.soon("db.baz.findOne()", "no data after sleep 2");
+assert.eq(barDocCount, db.baz.count(), "wrong number of docs restored to bar 2");
for (var i = 0; i < 10; i++) {
- db.baz.save({x:i});
+ db.baz.save({x: i});
}
-assert.eq( barDocCount, db.baz.count(), "Capped collection didn't evict documents after restore 3." );
-assert.eq( 2 , db.baz.getIndexes().length , "Indexes weren't created correctly by restore 3" );
+assert.eq(barDocCount, db.baz.count(), "Capped collection didn't evict documents after restore 3.");
+assert.eq(2, db.baz.getIndexes().length, "Indexes weren't created correctly by restore 3");
t.stop();
diff --git a/jstests/tool/dumprestore9.js b/jstests/tool/dumprestore9.js
index 5a36c54efd5..7db1f817b24 100644
--- a/jstests/tool/dumprestore9.js
+++ b/jstests/tool/dumprestore9.js
@@ -1,93 +1,103 @@
// Test disabled until SERVER-3853 is finished
-if(0) {
+if (0) {
+ (function() {
-(function() {
+ var name = "dumprestore9";
+ function step(msg) {
+ msg = msg || "";
+ this.x = (this.x || 0) + 1;
+ print('\n' + name + ".js step " + this.x + ' ' + msg);
+ }
-var name = "dumprestore9";
-function step(msg) {
- msg = msg || "";
- this.x = (this.x || 0) + 1;
- print('\n' + name + ".js step " + this.x + ' ' + msg);
-}
-
-var s = new ShardingTest({ name: "dumprestore9a",
- shards: 2,
- mongos: 3,
- other: { chunkSize: 1, enableBalancer : 1 } });
+ var s = new ShardingTest({
+ name: "dumprestore9a",
+ shards: 2,
+ mongos: 3,
+ other: {chunkSize: 1, enableBalancer: 1}
+ });
-step("Shard collection");
+ step("Shard collection");
-s.adminCommand( { enablesharding : "aaa" } ); // Make this db alphabetically before 'config' so it gets restored first
-s.ensurePrimaryShard('aaa', 'shard0001');
-s.adminCommand( { shardcollection : "aaa.foo" , key : { x : 1 } } );
+ s.adminCommand({
+ enablesharding: "aaa"
+ }); // Make this db alphabetically before 'config' so it gets restored first
+ s.ensurePrimaryShard('aaa', 'shard0001');
+ s.adminCommand({shardcollection: "aaa.foo", key: {x: 1}});
-db = s.getDB( "aaa" );
-coll = db.foo;
+ db = s.getDB("aaa");
+ coll = db.foo;
-step("insert data");
+ step("insert data");
-str = 'a';
-while (str.length < 1024*512) {
- str += str;
-}
+ str = 'a';
+ while (str.length < 1024 * 512) {
+ str += str;
+ }
-numDocs = 20;
-for (var i = 0; i < numDocs; i++) {
- coll.insert({x:i, str:str});
-}
+ numDocs = 20;
+ for (var i = 0; i < numDocs; i++) {
+ coll.insert({x: i, str: str});
+ }
-step("Wait for balancing");
+ step("Wait for balancing");
-assert.soon( function(){ var x = s.chunkDiff( "foo" , "aaa" ); print( "chunk diff: " + x ); return x < 2; } , "no balance happened" , 8 * 60 * 1000 , 2000 );
+ assert.soon(function() {
+ var x = s.chunkDiff("foo", "aaa");
+ print("chunk diff: " + x);
+ return x < 2;
+ }, "no balance happened", 8 * 60 * 1000, 2000);
-assert.eq(numDocs, coll.count(), "Documents weren't inserted correctly");
+ assert.eq(numDocs, coll.count(), "Documents weren't inserted correctly");
-step("dump cluster");
+ step("dump cluster");
-dumpdir = MongoRunner.dataDir + "/dumprestore9-dump1/";
-resetDbpath(dumpdir);
-runMongoProgram( "mongodump", "--host", s._mongos[0].host, "--out", dumpdir );
+ dumpdir = MongoRunner.dataDir + "/dumprestore9-dump1/";
+ resetDbpath(dumpdir);
+ runMongoProgram("mongodump", "--host", s._mongos[0].host, "--out", dumpdir);
-step("Shutting down cluster");
+ step("Shutting down cluster");
-s.stop();
+ s.stop();
-step("Starting up clean cluster");
-s = new ShardingTest({ name: "dumprestore9b",
- shards: 2,
- mongos: 3,
- other: {chunkSize:1} });
+ step("Starting up clean cluster");
+ s = new ShardingTest({name: "dumprestore9b", shards: 2, mongos: 3, other: {chunkSize: 1}});
-db = s.getDB( "aaa" );
-coll = db.foo;
+ db = s.getDB("aaa");
+ coll = db.foo;
-assert.eq(0, coll.count(), "Data wasn't cleaned up by restarting sharding test");
+ assert.eq(0, coll.count(), "Data wasn't cleaned up by restarting sharding test");
-step("Restore data and config");
+ step("Restore data and config");
-runMongoProgram( "mongorestore", dumpdir, "--host", s._mongos[1].host, "--restoreShardingConfig", "--forceConfigRestore");
+ runMongoProgram("mongorestore",
+ dumpdir,
+ "--host",
+ s._mongos[1].host,
+ "--restoreShardingConfig",
+ "--forceConfigRestore");
-config = s.getDB("config");
-assert(config.databases.findOne({_id:'aaa'}).partitioned, "Config data wasn't restored properly");
+ config = s.getDB("config");
+ assert(config.databases.findOne({_id: 'aaa'}).partitioned,
+ "Config data wasn't restored properly");
-assert( s.chunkDiff( "foo" , "aaa" ) < 2, "Chunk data wasn't restored properly");
+ assert(s.chunkDiff("foo", "aaa") < 2, "Chunk data wasn't restored properly");
-assert.eq(numDocs, coll.count(), "Didn't restore all documents properly2");
-assert.eq(numDocs, coll.find().itcount(), "Didn't restore all documents properly");
+ assert.eq(numDocs, coll.count(), "Didn't restore all documents properly2");
+ assert.eq(numDocs, coll.find().itcount(), "Didn't restore all documents properly");
-for (var i = 0; i < numDocs; i++) {
- doc = coll.findOne({x:i});
- assert.eq(i, doc.x, "Doc missing from the shard it should be on");
-}
-
-for (var i = 0; i < s._connections.length; i++) {
- assert(s._connections[i].getDB("aaa").foo.count() > 0, "No data on shard: " + s._connections[i].host);
-}
+ for (var i = 0; i < numDocs; i++) {
+ doc = coll.findOne({x: i});
+ assert.eq(i, doc.x, "Doc missing from the shard it should be on");
+ }
-step("Stop cluster");
-s.stop();
-step("SUCCESS");
+ for (var i = 0; i < s._connections.length; i++) {
+ assert(s._connections[i].getDB("aaa").foo.count() > 0,
+ "No data on shard: " + s._connections[i].host);
+ }
-})();
+ step("Stop cluster");
+ s.stop();
+ step("SUCCESS");
+ })();
}
diff --git a/jstests/tool/dumprestoreWithNoOptions.js b/jstests/tool/dumprestoreWithNoOptions.js
index b822deb93e8..1062abd1e94 100644
--- a/jstests/tool/dumprestoreWithNoOptions.js
+++ b/jstests/tool/dumprestoreWithNoOptions.js
@@ -8,107 +8,122 @@
// database dump/restore and when doing it just for a
// single db or collection.
+t = new ToolTest("dumprestoreWithNoOptions");
-t = new ToolTest( "dumprestoreWithNoOptions" );
-
-t.startDB( "foo" );
+t.startDB("foo");
db = t.db;
// We turn this off to prevent the server from touching the 'options' field in system.namespaces.
// This is important because we check exact values of the 'options' field in this test.
-db.adminCommand({setParameter:1, newCollectionsUsePowerOf2Sizes: false});
+db.adminCommand({setParameter: 1, newCollectionsUsePowerOf2Sizes: false});
dbname = db.getName();
-dbname2 = "NOT_"+dbname;
+dbname2 = "NOT_" + dbname;
db.dropDatabase();
var defaultFlags = {};
-var options = { capped: true, size: 4096, autoIndexId: true };
+var options = {
+ capped: true,
+ size: 4096,
+ autoIndexId: true
+};
db.createCollection('capped', options);
-assert.eq( 1, db.capped.getIndexes().length, "auto index not created" );
+assert.eq(1, db.capped.getIndexes().length, "auto index not created");
var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt],
+for (var opt in options) {
+ assert.eq(options[opt],
+ cappedOptions[opt],
'invalid option:' + tojson(options) + " " + tojson(cappedOptions));
}
-assert.writeOK(db.capped.insert({ x: 1 }));
+assert.writeOK(db.capped.insert({x: 1}));
// Full dump/restore
-t.runTool( "dump" , "--out" , t.ext );
+t.runTool("dump", "--out", t.ext);
db.dropDatabase();
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.capped.getIndexes().length, "indexes not dropped" );
+assert.eq(0, db.capped.count(), "capped not dropped");
+assert.eq(0, db.capped.getIndexes().length, "indexes not dropped");
-t.runTool( "restore" , "--dir" , t.ext , "--noOptionsRestore");
+t.runTool("restore", "--dir", t.ext, "--noOptionsRestore");
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
+assert.eq(1, db.capped.count(), "wrong number of docs restored to capped");
assert(true !== db.capped.stats().capped, "restore options were not ignored");
-assert.eq( defaultFlags, db.capped.exists().options,
- "restore options not ignored: " + tojson( db.capped.exists() ) );
+assert.eq(defaultFlags,
+ db.capped.exists().options,
+ "restore options not ignored: " + tojson(db.capped.exists()));
// Dump/restore single DB
db.dropDatabase();
-var options = { capped: true, size: 4096, autoIndexId: true };
+var options = {
+ capped: true,
+ size: 4096,
+ autoIndexId: true
+};
db.createCollection('capped', options);
-assert.eq( 1, db.capped.getIndexes().length, "auto index not created" );
+assert.eq(1, db.capped.getIndexes().length, "auto index not created");
var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt], 'invalid option');
+for (var opt in options) {
+ assert.eq(options[opt], cappedOptions[opt], 'invalid option');
}
-assert.writeOK(db.capped.insert({ x: 1 }));
+assert.writeOK(db.capped.insert({x: 1}));
dumppath = t.ext + "noOptionsSingleDump/";
mkdir(dumppath);
-t.runTool( "dump" , "-d", dbname, "--out" , dumppath );
+t.runTool("dump", "-d", dbname, "--out", dumppath);
db.dropDatabase();
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.capped.getIndexes().length, "indexes not dropped" );
+assert.eq(0, db.capped.count(), "capped not dropped");
+assert.eq(0, db.capped.getIndexes().length, "indexes not dropped");
-t.runTool( "restore" , "-d", dbname2, "--dir" , dumppath + dbname, "--noOptionsRestore");
+t.runTool("restore", "-d", dbname2, "--dir", dumppath + dbname, "--noOptionsRestore");
db = db.getSiblingDB(dbname2);
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
+assert.eq(1, db.capped.count(), "wrong number of docs restored to capped");
assert(true !== db.capped.stats().capped, "restore options were not ignored");
-assert.eq( defaultFlags, db.capped.exists().options,
- "restore options not ignored: " + tojson( db.capped.exists() ) );
+assert.eq(defaultFlags,
+ db.capped.exists().options,
+ "restore options not ignored: " + tojson(db.capped.exists()));
// Dump/restore single collection
db.dropDatabase();
-var options = { capped: true, size: 4096, autoIndexId: true };
+var options = {
+ capped: true,
+ size: 4096,
+ autoIndexId: true
+};
db.createCollection('capped', options);
-assert.eq( 1, db.capped.getIndexes().length, "auto index not created" );
+assert.eq(1, db.capped.getIndexes().length, "auto index not created");
var cappedOptions = db.capped.exists().options;
-for ( var opt in options ) {
- assert.eq(options[opt], cappedOptions[opt], 'invalid option');
+for (var opt in options) {
+ assert.eq(options[opt], cappedOptions[opt], 'invalid option');
}
-assert.writeOK(db.capped.insert({ x: 1 }));
+assert.writeOK(db.capped.insert({x: 1}));
dumppath = t.ext + "noOptionsSingleColDump/";
mkdir(dumppath);
dbname = db.getName();
-t.runTool( "dump" , "-d", dbname, "-c", "capped", "--out" , dumppath );
+t.runTool("dump", "-d", dbname, "-c", "capped", "--out", dumppath);
db.dropDatabase();
-assert.eq( 0, db.capped.count(), "capped not dropped");
-assert.eq( 0, db.capped.getIndexes().length, "indexes not dropped" );
+assert.eq(0, db.capped.count(), "capped not dropped");
+assert.eq(0, db.capped.getIndexes().length, "indexes not dropped");
-t.runTool( "restore", "-d", dbname, "--drop", "--noOptionsRestore", dumppath + dbname );
+t.runTool("restore", "-d", dbname, "--drop", "--noOptionsRestore", dumppath + dbname);
db = db.getSiblingDB(dbname);
-assert.eq( 1, db.capped.count() , "wrong number of docs restored to capped" );
-assert( true !== db.capped.stats().capped, "restore options were not ignored" );
-assert.eq( defaultFlags, db.capped.exists().options,
- "restore options not ignored: " + tojson( db.capped.exists() ) );
+assert.eq(1, db.capped.count(), "wrong number of docs restored to capped");
+assert(true !== db.capped.stats().capped, "restore options were not ignored");
+assert.eq(defaultFlags,
+ db.capped.exists().options,
+ "restore options not ignored: " + tojson(db.capped.exists()));
t.stop();
diff --git a/jstests/tool/dumprestore_auth.js b/jstests/tool/dumprestore_auth.js
index 4bda54a5bdc..a4a19650e77 100644
--- a/jstests/tool/dumprestore_auth.js
+++ b/jstests/tool/dumprestore_auth.js
@@ -1,48 +1,64 @@
// dumprestore_auth.js
-
-t = new ToolTest("dumprestore_auth", { auth : "" });
+t = new ToolTest("dumprestore_auth", {auth: ""});
c = t.startDB("foo");
var dbName = c.getDB().toString();
-print("DB is ",dbName);
+print("DB is ", dbName);
adminDB = c.getDB().getSiblingDB('admin');
adminDB.createUser({user: 'admin', pwd: 'password', roles: ['root']});
-adminDB.auth('admin','password');
+adminDB.auth('admin', 'password');
adminDB.createUser({user: 'backup', pwd: 'password', roles: ['backup']});
adminDB.createUser({user: 'restore', pwd: 'password', roles: ['restore']});
// Add user defined roles & users with those roles
var testUserAdmin = c.getDB().getSiblingDB(dbName);
-var backupActions = ["find","listCollections", "listIndexes"];
-testUserAdmin.createRole({role: "backupFoo",
- privileges: [{resource: {db: dbName, collection: "foo"}, actions:backupActions},
- {resource: {db: dbName, collection: "" },
- actions: backupActions}],
- roles: []});
+var backupActions = ["find", "listCollections", "listIndexes"];
+testUserAdmin.createRole({
+ role: "backupFoo",
+ privileges: [
+ {resource: {db: dbName, collection: "foo"}, actions: backupActions},
+ {resource: {db: dbName, collection: ""}, actions: backupActions}
+ ],
+ roles: []
+});
testUserAdmin.createUser({user: 'backupFoo', pwd: 'password', roles: ['backupFoo']});
-var restoreActions = ["collMod", "createCollection","createIndex","dropCollection","insert","listCollections","listIndexes"];
+var restoreActions = [
+ "collMod",
+ "createCollection",
+ "createIndex",
+ "dropCollection",
+ "insert",
+ "listCollections",
+ "listIndexes"
+];
var restoreActionsFind = restoreActions;
restoreActionsFind.push("find");
-testUserAdmin.createRole({role: "restoreChester",
- privileges: [{resource: {db: dbName, collection: "chester"}, actions: restoreActions},
- {resource: {db: dbName, collection: ""}, actions:["listCollections","listIndexes"]},
- ],
- roles: []});
-testUserAdmin.createRole({role: "restoreFoo",
- privileges: [{resource: {db: dbName, collection: "foo"}, actions:restoreActions},
- {resource: {db: dbName, collection: ""}, actions:["listCollections","listIndexes"]},
- ],
- roles: []});
+testUserAdmin.createRole({
+ role: "restoreChester",
+ privileges: [
+ {resource: {db: dbName, collection: "chester"}, actions: restoreActions},
+ {resource: {db: dbName, collection: ""}, actions: ["listCollections", "listIndexes"]},
+ ],
+ roles: []
+});
+testUserAdmin.createRole({
+ role: "restoreFoo",
+ privileges: [
+ {resource: {db: dbName, collection: "foo"}, actions: restoreActions},
+ {resource: {db: dbName, collection: ""}, actions: ["listCollections", "listIndexes"]},
+ ],
+ roles: []
+});
testUserAdmin.createUser({user: 'restoreChester', pwd: 'password', roles: ['restoreChester']});
testUserAdmin.createUser({user: 'restoreFoo', pwd: 'password', roles: ['restoreFoo']});
var sysUsers = adminDB.system.users.count();
-assert.eq(0 , c.count() , "setup1");
-c.save({ a : 22 });
-assert.eq(1 , c.count() , "setup2");
+assert.eq(0, c.count(), "setup1");
+c.save({a: 22});
+assert.eq(1, c.count(), "setup2");
assert.commandWorked(c.runCommand("collMod", {usePowerOf2Sizes: false}));
@@ -56,20 +72,28 @@ collections.forEach(function(coll) {
assert.neq(null, fooColl, "foo collection doesn't exist");
assert(!fooColl.options.flags, "find namespaces 1");
-t.runTool("dump" , "--out" , t.ext, "--username", "backup", "--password", "password");
+t.runTool("dump", "--out", t.ext, "--username", "backup", "--password", "password");
c.drop();
-assert.eq(0 , c.count() , "after drop");
+assert.eq(0, c.count(), "after drop");
// Restore should fail without user & pass
-t.runTool("restore" , "--dir" , t.ext, "--writeConcern" ,"0");
-assert.eq(0 , c.count() , "after restore without auth");
+t.runTool("restore", "--dir", t.ext, "--writeConcern", "0");
+assert.eq(0, c.count(), "after restore without auth");
// Restore should pass with authorized user
-t.runTool("restore" , "--dir" , t.ext, "--username", "restore", "--password", "password", "--writeConcern", "0");
-assert.soon("c.findOne()" , "no data after sleep");
-assert.eq(1 , c.count() , "after restore 2");
-assert.eq(22 , c.findOne().a , "after restore 2");
+t.runTool("restore",
+ "--dir",
+ t.ext,
+ "--username",
+ "restore",
+ "--password",
+ "password",
+ "--writeConcern",
+ "0");
+assert.soon("c.findOne()", "no data after sleep");
+assert.eq(1, c.count(), "after restore 2");
+assert.eq(22, c.findOne().a, "after restore 2");
collections = c.getDB().getCollectionInfos();
fooColl = null;
@@ -84,23 +108,52 @@ assert(!fooColl.options.flags, "find namespaces 2");
assert.eq(sysUsers, adminDB.system.users.count());
// Dump & restore DB/colection with user defined roles
-t.runTool("dump" , "--out" , t.ext, "--username", "backupFoo", "--password", "password",
- "--db", dbName, "--collection", "foo");
+t.runTool("dump",
+ "--out",
+ t.ext,
+ "--username",
+ "backupFoo",
+ "--password",
+ "password",
+ "--db",
+ dbName,
+ "--collection",
+ "foo");
c.drop();
-assert.eq(0 , c.count() , "after drop");
+assert.eq(0, c.count(), "after drop");
// Restore with wrong user
-t.runTool("restore" , "--username", "restoreChester", "--password", "password",
- "--db", dbName, "--collection", "foo", t.ext+dbName+"/foo.bson", "--writeConcern", "0");
-assert.eq(0 , c.count() , "after restore with wrong user");
+t.runTool("restore",
+ "--username",
+ "restoreChester",
+ "--password",
+ "password",
+ "--db",
+ dbName,
+ "--collection",
+ "foo",
+ t.ext + dbName + "/foo.bson",
+ "--writeConcern",
+ "0");
+assert.eq(0, c.count(), "after restore with wrong user");
// Restore with proper user
-t.runTool("restore" , "--username", "restoreFoo", "--password", "password",
- "--db", dbName, "--collection", "foo", t.ext+dbName+"/foo.bson", "--writeConcern", "0");
-assert.soon("c.findOne()" , "no data after sleep");
-assert.eq(1 , c.count() , "after restore 3");
-assert.eq(22 , c.findOne().a , "after restore 3");
+t.runTool("restore",
+ "--username",
+ "restoreFoo",
+ "--password",
+ "password",
+ "--db",
+ dbName,
+ "--collection",
+ "foo",
+ t.ext + dbName + "/foo.bson",
+ "--writeConcern",
+ "0");
+assert.soon("c.findOne()", "no data after sleep");
+assert.eq(1, c.count(), "after restore 3");
+assert.eq(22, c.findOne().a, "after restore 3");
collections = c.getDB().getCollectionInfos();
fooColl = null;
diff --git a/jstests/tool/dumprestore_auth2.js b/jstests/tool/dumprestore_auth2.js
index 4d410d34ca9..275b47ceac6 100644
--- a/jstests/tool/dumprestore_auth2.js
+++ b/jstests/tool/dumprestore_auth2.js
@@ -4,110 +4,118 @@
var dumpRestoreAuth2 = function(backup_role, restore_role) {
- t = new ToolTest("dumprestore_auth2", {auth: ""});
-
- coll = t.startDB("foo");
- admindb = coll.getDB().getSiblingDB("admin");
-
- // Create the relevant users and roles.
- admindb.createUser({user: "root", pwd: "pass", roles: ["root"]});
- admindb.auth("root", "pass");
-
- admindb.createUser({user: "backup", pwd: "pass", roles: [backup_role]});
- admindb.createUser({user: "restore", pwd: "pass", roles: [restore_role]});
-
- admindb.createRole({role: "customRole",
- privileges:[{resource: {db: "jstests_tool_dumprestore_auth2",
- collection: "foo"},
- actions: ["find"]}],
- roles:[]});
- admindb.createUser({user: "test", pwd: "pass", roles: ["customRole"]});
-
- coll.insert({word: "tomato"});
- assert.eq(1, coll.count());
-
- assert.eq(4, admindb.system.users.count(), "setup users");
- assert.eq(2, admindb.system.users.getIndexes().length,
- "setup2: " + tojson( admindb.system.users.getIndexes() ) );
- assert.eq(1, admindb.system.roles.count(), "setup3");
- assert.eq(2, admindb.system.roles.getIndexes().length, "setup4");
- assert.eq(1, admindb.system.version.count());
- var versionDoc = admindb.system.version.findOne();
-
- // Logout root user.
- admindb.logout();
-
- // Verify that the custom role works as expected.
- admindb.auth("test", "pass");
- assert.eq("tomato", coll.findOne().word);
- admindb.logout();
-
- // Dump the database.
- t.runTool("dump", "--out", t.ext, "--username", "backup", "--password", "pass");
-
- // Drop the relevant data in the database.
- admindb.auth("root", "pass");
- coll.getDB().dropDatabase();
- admindb.dropUser("backup");
- admindb.dropUser("test");
- admindb.dropRole("customRole");
-
- assert.eq(2, admindb.system.users.count(), "didn't drop backup and test users");
- assert.eq(0, admindb.system.roles.count(), "didn't drop roles");
- assert.eq(0, coll.count(), "didn't drop foo coll");
-
- // This test depends on W=0 to mask unique index violations.
- // This should be fixed once we implement TOOLS-341
- t.runTool("restore",
- "--dir", t.ext,
- "--username", "restore",
- "--password", "pass",
- "--writeConcern", "0");
-
- assert.soon("admindb.system.users.findOne()", "no data after restore");
- assert.eq(4, admindb.system.users.count(), "didn't restore users");
- assert.eq(2, admindb.system.users.getIndexes().length,
- "didn't restore user indexes");
- assert.eq(1, admindb.system.roles.find({role:'customRole'}).count(), "didn't restore roles");
- assert.eq(2, admindb.system.roles.getIndexes().length,
- "didn't restore role indexes");
-
- admindb.logout();
-
- // Login as user with customRole to verify privileges are restored.
- admindb.auth("test", "pass");
- assert.eq("tomato", coll.findOne().word);
- admindb.logout();
-
- admindb.auth("root", "pass");
- admindb.createUser({user: "root2", pwd: "pass", roles: ["root"]});
- admindb.dropRole("customRole");
- admindb.createRole({role: "customRole2", roles: [], privileges:[]});
- admindb.dropUser("root");
- admindb.logout();
-
- t.runTool("restore",
- "--dir", t.ext,
- "--username", "restore",
- "--password", "pass",
- "--drop",
- "--writeConcern", "0");
-
- admindb.auth("root", "pass");
- assert.soon("1 == admindb.system.users.find({user:'root'}).count()", "didn't restore users 2");
- assert.eq(0, admindb.system.users.find({user:'root2'}).count(), "didn't drop users");
- assert.eq(0, admindb.system.roles.find({role:'customRole2'}).count(), "didn't drop roles");
- assert.eq(1, admindb.system.roles.find({role:'customRole'}).count(), "didn't restore roles");
- assert.eq(2, admindb.system.users.getIndexes().length,
- "didn't maintain user indexes");
- assert.eq(2, admindb.system.roles.getIndexes().length,
- "didn't maintain role indexes");
- assert.eq(1, admindb.system.version.count(), "didn't restore version");
- assert.docEq(versionDoc, admindb.system.version.findOne(),
- "version doc wasn't restored properly");
- admindb.logout();
-
- t.stop();
+ t = new ToolTest("dumprestore_auth2", {auth: ""});
+
+ coll = t.startDB("foo");
+ admindb = coll.getDB().getSiblingDB("admin");
+
+ // Create the relevant users and roles.
+ admindb.createUser({user: "root", pwd: "pass", roles: ["root"]});
+ admindb.auth("root", "pass");
+
+ admindb.createUser({user: "backup", pwd: "pass", roles: [backup_role]});
+ admindb.createUser({user: "restore", pwd: "pass", roles: [restore_role]});
+
+ admindb.createRole({
+ role: "customRole",
+ privileges: [{
+ resource: {db: "jstests_tool_dumprestore_auth2", collection: "foo"},
+ actions: ["find"]
+ }],
+ roles: []
+ });
+ admindb.createUser({user: "test", pwd: "pass", roles: ["customRole"]});
+
+ coll.insert({word: "tomato"});
+ assert.eq(1, coll.count());
+
+ assert.eq(4, admindb.system.users.count(), "setup users");
+ assert.eq(2,
+ admindb.system.users.getIndexes().length,
+ "setup2: " + tojson(admindb.system.users.getIndexes()));
+ assert.eq(1, admindb.system.roles.count(), "setup3");
+ assert.eq(2, admindb.system.roles.getIndexes().length, "setup4");
+ assert.eq(1, admindb.system.version.count());
+ var versionDoc = admindb.system.version.findOne();
+
+ // Logout root user.
+ admindb.logout();
+
+ // Verify that the custom role works as expected.
+ admindb.auth("test", "pass");
+ assert.eq("tomato", coll.findOne().word);
+ admindb.logout();
+
+ // Dump the database.
+ t.runTool("dump", "--out", t.ext, "--username", "backup", "--password", "pass");
+
+ // Drop the relevant data in the database.
+ admindb.auth("root", "pass");
+ coll.getDB().dropDatabase();
+ admindb.dropUser("backup");
+ admindb.dropUser("test");
+ admindb.dropRole("customRole");
+
+ assert.eq(2, admindb.system.users.count(), "didn't drop backup and test users");
+ assert.eq(0, admindb.system.roles.count(), "didn't drop roles");
+ assert.eq(0, coll.count(), "didn't drop foo coll");
+
+ // This test depends on W=0 to mask unique index violations.
+ // This should be fixed once we implement TOOLS-341
+ t.runTool("restore",
+ "--dir",
+ t.ext,
+ "--username",
+ "restore",
+ "--password",
+ "pass",
+ "--writeConcern",
+ "0");
+
+ assert.soon("admindb.system.users.findOne()", "no data after restore");
+ assert.eq(4, admindb.system.users.count(), "didn't restore users");
+ assert.eq(2, admindb.system.users.getIndexes().length, "didn't restore user indexes");
+ assert.eq(1, admindb.system.roles.find({role: 'customRole'}).count(), "didn't restore roles");
+ assert.eq(2, admindb.system.roles.getIndexes().length, "didn't restore role indexes");
+
+ admindb.logout();
+
+ // Login as user with customRole to verify privileges are restored.
+ admindb.auth("test", "pass");
+ assert.eq("tomato", coll.findOne().word);
+ admindb.logout();
+
+ admindb.auth("root", "pass");
+ admindb.createUser({user: "root2", pwd: "pass", roles: ["root"]});
+ admindb.dropRole("customRole");
+ admindb.createRole({role: "customRole2", roles: [], privileges: []});
+ admindb.dropUser("root");
+ admindb.logout();
+
+ t.runTool("restore",
+ "--dir",
+ t.ext,
+ "--username",
+ "restore",
+ "--password",
+ "pass",
+ "--drop",
+ "--writeConcern",
+ "0");
+
+ admindb.auth("root", "pass");
+ assert.soon("1 == admindb.system.users.find({user:'root'}).count()", "didn't restore users 2");
+ assert.eq(0, admindb.system.users.find({user: 'root2'}).count(), "didn't drop users");
+ assert.eq(0, admindb.system.roles.find({role: 'customRole2'}).count(), "didn't drop roles");
+ assert.eq(1, admindb.system.roles.find({role: 'customRole'}).count(), "didn't restore roles");
+ assert.eq(2, admindb.system.users.getIndexes().length, "didn't maintain user indexes");
+ assert.eq(2, admindb.system.roles.getIndexes().length, "didn't maintain role indexes");
+ assert.eq(1, admindb.system.version.count(), "didn't restore version");
+ assert.docEq(
+ versionDoc, admindb.system.version.findOne(), "version doc wasn't restored properly");
+ admindb.logout();
+
+ t.stop();
};
diff --git a/jstests/tool/dumprestore_auth3.js b/jstests/tool/dumprestore_auth3.js
index 62eed2e7d84..6157020c2dd 100644
--- a/jstests/tool/dumprestore_auth3.js
+++ b/jstests/tool/dumprestore_auth3.js
@@ -4,7 +4,9 @@
// Runs the tool with the given name against the given mongod.
function runTool(toolName, mongod, options) {
- var opts = {host: mongod.host};
+ var opts = {
+ host: mongod.host
+ };
Object.extend(opts, options);
MongoRunner.runMongoTool(toolName, opts);
}
@@ -19,13 +21,15 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) {
admindb.createUser({user: 'root', pwd: 'pass', roles: ['root']});
admindb.createUser({user: 'backup', pwd: 'pass', roles: ['backup']});
admindb.createUser({user: 'restore', pwd: 'pass', roles: ['restore']});
- admindb.createRole({role: "dummyRole", roles: [], privileges:[]});
+ admindb.createRole({role: "dummyRole", roles: [], privileges: []});
db.createUser({user: 'user', pwd: 'pass', roles: jsTest.basicUserRoles});
- db.createRole({role: 'role', roles: [], privileges:[]});
+ db.createRole({role: 'role', roles: [], privileges: []});
var backupActions = ['find'];
- db.createRole({role: 'backupFooChester',
- privileges: [{resource: {db: 'foo', collection: 'chester'}, actions: backupActions}],
- roles: []});
+ db.createRole({
+ role: 'backupFooChester',
+ privileges: [{resource: {db: 'foo', collection: 'chester'}, actions: backupActions}],
+ roles: []
+ });
db.createUser({user: 'backupFooChester', pwd: 'pass', roles: ['backupFooChester']});
var userCount = db.getUsers().length;
@@ -35,7 +39,7 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) {
var systemUsersCount = admindb.system.users.count();
var systemVersionCount = admindb.system.version.count();
- db.bar.insert({a:1});
+ db.bar.insert({a: 1});
assert.eq(1, db.bar.findOne().a);
assert.eq(userCount, db.getUsers().length, "setup");
@@ -43,7 +47,7 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) {
assert.eq(adminUsersCount, admindb.getUsers().length, "setup3");
assert.eq(adminRolesCount, admindb.getRoles().length, "setup4");
assert.eq(systemUsersCount, admindb.system.users.count(), "setup5");
- assert.eq(systemVersionCount, admindb.system.version.count(),"system version");
+ assert.eq(systemVersionCount, admindb.system.version.count(), "system version");
assert.eq(1, admindb.system.users.count({user: "restore"}), "Restore user is missing");
assert.eq(1, admindb.system.users.count({user: "backup"}), "Backup user is missing");
var versionDoc = admindb.system.version.findOne();
@@ -60,21 +64,22 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) {
jsTestLog("Restore foo database from dump that doesn't contain user data ");
// This test depends on W=0 to mask unique index violations.
// This should be fixed once we implement TOOLS-341
- runTool("mongorestore",
- mongod,
- {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"}
- );
+ runTool("mongorestore",
+ mongod,
+ {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"});
db = mongod.getDB('foo');
- assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+ assert.soon(function() {
+ return db.bar.findOne();
+ }, "no data after restore");
assert.eq(1, db.bar.findOne().a);
assert.eq(0, db.getUsers().length, "Restore created users somehow");
assert.eq(0, db.getRoles().length, "Restore created roles somehow");
// Re-create user data
db.createUser({user: 'user', pwd: 'password', roles: jsTest.basicUserRoles});
- db.createRole({role: 'role', roles: [], privileges:[]});
+ db.createRole({role: 'role', roles: [], privileges: []});
userCount = 1;
rolesCount = 1;
@@ -98,25 +103,28 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) {
runTool("mongorestore", mongod, {dir: dumpDir + "foo/", db: 'foo', writeConcern: "0"});
db = mongod.getDB('foo');
- assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+ assert.soon(function() {
+ return db.bar.findOne();
+ }, "no data after restore");
assert.eq(1, db.bar.findOne().a);
assert.eq(0, db.getUsers().length, "Restored users even though it shouldn't have");
assert.eq(0, db.getRoles().length, "Restored roles even though it shouldn't have");
jsTestLog("Restore foo database *with* user data");
- runTool("mongorestore",
- mongod,
- {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"}
- );
+ runTool("mongorestore",
+ mongod,
+ {dir: dumpDir + "foo/", db: 'foo', restoreDbUsersAndRoles: "", writeConcern: "0"});
db = mongod.getDB('foo');
admindb = mongod.getDB('admin');
- assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+ assert.soon(function() {
+ return db.bar.findOne();
+ }, "no data after restore");
assert.eq(1, db.bar.findOne().a);
assert.eq(userCount, db.getUsers().length, "didn't restore users");
assert.eq(rolesCount, db.getRoles().length, "didn't restore roles");
- assert.eq(1, admindb.system.users.count({user: "restore", db: "admin"}),
- "Restore user is missing");
+ assert.eq(
+ 1, admindb.system.users.count({user: "restore", db: "admin"}), "Restore user is missing");
assert.docEq(versionDoc,
db.getSiblingDB('admin').system.version.findOne(),
"version doc was changed by restore");
@@ -125,18 +133,25 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) {
db.dropUser('user');
db.createUser({user: 'user2', pwd: 'password2', roles: jsTest.basicUserRoles});
db.dropRole('role');
- db.createRole({role: 'role2', roles: [], privileges:[]});
+ db.createRole({role: 'role2', roles: [], privileges: []});
jsTestLog("Restore foo database (and user data) with --drop so it overrides the changes made");
// Restore with --drop to override the changes to user data
- runTool("mongorestore",
- mongod,
- {dir: dumpDir + "foo/", db: 'foo', drop: "", restoreDbUsersAndRoles: "", writeConcern: "0"}
- );
+ runTool("mongorestore",
+ mongod,
+ {
+ dir: dumpDir + "foo/",
+ db: 'foo',
+ drop: "",
+ restoreDbUsersAndRoles: "",
+ writeConcern: "0"
+ });
db = mongod.getDB('foo');
admindb = mongod.getDB('admin');
- assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+ assert.soon(function() {
+ return db.bar.findOne();
+ }, "no data after restore");
assert.eq(adminUsersCount, admindb.getUsers().length, "Admin users were dropped");
assert.eq(adminRolesCount, admindb.getRoles().length, "Admin roles were dropped");
assert.eq(1, db.bar.findOne().a);
@@ -148,7 +163,6 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) {
db.getSiblingDB('admin').system.version.findOne(),
"version doc was changed by restore");
-
jsTestLog("Dump just the admin database. User data should be dumped by default");
// Make a user in another database to make sure it is properly captured
db.getSiblingDB('bar').createUser({user: "user", pwd: 'pwd', roles: []});
@@ -163,15 +177,16 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) {
db.getSiblingDB('admin').dropAllUsers();
jsTestLog("Restore just the admin database. User data should be restored by default");
- runTool("mongorestore",
- mongod,
- {dir: dumpDir + "admin/", db: 'admin', drop: "", writeConcern: "0"}
- );
+ runTool("mongorestore",
+ mongod,
+ {dir: dumpDir + "admin/", db: 'admin', drop: "", writeConcern: "0"});
db = mongod.getDB('foo');
var otherdb = db.getSiblingDB('bar');
var admindb = db.getSiblingDB('admin');
- assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+ assert.soon(function() {
+ return db.bar.findOne();
+ }, "no data after restore");
assert.eq(1, db.bar.findOne().a);
assert.eq(userCount, db.getUsers().length, "didn't restore users");
assert.eq("user", db.getUser('user').user, "didn't restore user");
@@ -179,8 +194,8 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) {
assert.eq("role", db.getRole('role').role, "didn't restore role");
assert.eq(1, otherdb.getUsers().length, "didn't restore users for bar database");
assert.eq("user", otherdb.getUsers()[0].user, "didn't restore user for bar database");
- assert.eq(adminUsersCount, admindb.getUsers().length,
- "didn't restore users for admin database");
+ assert.eq(
+ adminUsersCount, admindb.getUsers().length, "didn't restore users for admin database");
assert.eq("user", admindb.getUser("user").user, "didn't restore user for admin database");
assert.eq(6, admindb.system.users.count(), "has the wrong # of users for the whole server");
assert.eq(2, admindb.system.roles.count(), "has the wrong # of roles for the whole server");
@@ -204,7 +219,9 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) {
runTool("mongorestore", mongod, {dir: dumpDir, writeConcern: "0"});
db = mongod.getDB('foo');
- assert.soon(function() { return db.bar.findOne(); }, "no data after restore");
+ assert.soon(function() {
+ return db.bar.findOne();
+ }, "no data after restore");
assert.eq(1, db.bar.findOne().a);
assert.eq(1, db.getUsers().length, "didn't restore users");
assert.eq(1, db.getRoles().length, "didn't restore roles");
diff --git a/jstests/tool/dumprestore_excludecollections.js b/jstests/tool/dumprestore_excludecollections.js
index ac2059838a8..4563b8ffc03 100644
--- a/jstests/tool/dumprestore_excludecollections.js
+++ b/jstests/tool/dumprestore_excludecollections.js
@@ -1,7 +1,5 @@
// Tests for mongodump options for excluding collections
-
-
var testBaseName = "jstests_tool_dumprestore_excludecollections";
var dumpDir = MongoRunner.dataPath + testBaseName + "_dump_external/";
@@ -12,51 +10,58 @@ var mongodDest = MongoRunner.runMongod();
var destDB = mongodDest.getDB(testBaseName);
jsTest.log("Inserting documents into source mongod");
-sourceDB.test.insert({x:1});
-sourceDB.test2.insert({x:2});
-sourceDB.test3.insert({x:3});
-sourceDB.foo.insert({f:1});
-sourceDB.foo2.insert({f:2});
+sourceDB.test.insert({x: 1});
+sourceDB.test2.insert({x: 2});
+sourceDB.test3.insert({x: 3});
+sourceDB.foo.insert({f: 1});
+sourceDB.foo2.insert({f: 2});
jsTest.log("Testing incompabible option combinations");
resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- excludeCollection : "test",
- host : mongodSource.host });
+ret = MongoRunner.runMongoTool("mongodump",
+ {out: dumpDir, excludeCollection: "test", host: mongodSource.host});
assert.neq(ret, 0, "mongodump started successfully with --excludeCollection but no --db option");
resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- db : testBaseName,
- collection : "foo",
- excludeCollection : "test",
- host : mongodSource.host });
+ret = MongoRunner.runMongoTool("mongodump",
+ {
+ out: dumpDir,
+ db: testBaseName,
+ collection: "foo",
+ excludeCollection: "test",
+ host: mongodSource.host
+ });
assert.neq(ret, 0, "mongodump started successfully with --excludeCollection and --collection");
resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- excludeCollectionsWithPrefix : "test",
- host : mongodSource.host });
-assert.neq(ret, 0, "mongodump started successfully with --excludeCollectionsWithPrefix but " +
- "no --db option");
+ret = MongoRunner.runMongoTool(
+ "mongodump", {out: dumpDir, excludeCollectionsWithPrefix: "test", host: mongodSource.host});
+assert.neq(ret,
+ 0,
+ "mongodump started successfully with --excludeCollectionsWithPrefix but " +
+ "no --db option");
resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- db : testBaseName,
- collection : "foo",
- excludeCollectionsWithPrefix : "test",
- host : mongodSource.host });
-assert.neq(ret, 0, "mongodump started successfully with --excludeCollectionsWithPrefix and " +
- "--collection");
+ret = MongoRunner.runMongoTool("mongodump",
+ {
+ out: dumpDir,
+ db: testBaseName,
+ collection: "foo",
+ excludeCollectionsWithPrefix: "test",
+ host: mongodSource.host
+ });
+assert.neq(ret,
+ 0,
+ "mongodump started successfully with --excludeCollectionsWithPrefix and " +
+ "--collection");
jsTest.log("Testing proper behavior of collection exclusion");
resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- db : testBaseName,
- excludeCollection : "test",
- host : mongodSource.host });
+ret = MongoRunner.runMongoTool(
+ "mongodump",
+ {out: dumpDir, db: testBaseName, excludeCollection: "test", host: mongodSource.host});
-ret = MongoRunner.runMongoTool("mongorestore", { dir : dumpDir, host : mongodDest.host });
+ret = MongoRunner.runMongoTool("mongorestore", {dir: dumpDir, host: mongodDest.host});
assert.eq(ret, 0, "failed to run mongodump on expected successful call");
assert.eq(destDB.test.count(), 0, "Found documents in collection that we excluded");
assert.eq(destDB.test2.count(), 1, "Did not find document in collection that we did not exclude");
@@ -70,12 +75,15 @@ assert.eq(destDB.foo2.findOne().f, 2, "Wrong value in document");
destDB.dropDatabase();
resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- db : testBaseName,
- excludeCollectionsWithPrefix : "test",
- host : mongodSource.host });
-
-ret = MongoRunner.runMongoTool("mongorestore", { dir : dumpDir, host : mongodDest.host });
+ret = MongoRunner.runMongoTool("mongodump",
+ {
+ out: dumpDir,
+ db: testBaseName,
+ excludeCollectionsWithPrefix: "test",
+ host: mongodSource.host
+ });
+
+ret = MongoRunner.runMongoTool("mongorestore", {dir: dumpDir, host: mongodDest.host});
assert.eq(ret, 0, "failed to run mongodump on expected successful call");
assert.eq(destDB.test.count(), 0, "Found documents in collection that we excluded");
assert.eq(destDB.test2.count(), 0, "Found documents in collection that we excluded");
@@ -87,13 +95,16 @@ assert.eq(destDB.foo2.findOne().f, 2, "Wrong value in document");
destDB.dropDatabase();
resetDbpath(dumpDir);
-ret = MongoRunner.runMongoTool("mongodump", { out : dumpDir,
- db : testBaseName,
- excludeCollection : "foo",
- excludeCollectionsWithPrefix : "test",
- host : mongodSource.host });
-
-ret = MongoRunner.runMongoTool("mongorestore", { dir : dumpDir, host : mongodDest.host });
+ret = MongoRunner.runMongoTool("mongodump",
+ {
+ out: dumpDir,
+ db: testBaseName,
+ excludeCollection: "foo",
+ excludeCollectionsWithPrefix: "test",
+ host: mongodSource.host
+ });
+
+ret = MongoRunner.runMongoTool("mongorestore", {dir: dumpDir, host: mongodDest.host});
assert.eq(ret, 0, "failed to run mongodump on expected successful call");
assert.eq(destDB.test.count(), 0, "Found documents in collection that we excluded");
assert.eq(destDB.test2.count(), 0, "Found documents in collection that we excluded");
diff --git a/jstests/tool/dumpsecondary.js b/jstests/tool/dumpsecondary.js
index 31feacba674..9abe8d7476e 100644
--- a/jstests/tool/dumpsecondary.js
+++ b/jstests/tool/dumpsecondary.js
@@ -1,4 +1,4 @@
-var replTest = new ReplSetTest( {name: 'testSet', nodes: 2} );
+var replTest = new ReplSetTest({name: 'testSet', nodes: 2});
var nodes = replTest.startSet();
replTest.initiate();
@@ -9,29 +9,41 @@ db.foo.save({a: 1000});
replTest.awaitReplication();
replTest.awaitSecondaryNodes();
-assert.eq( 1 , db.foo.count() , "setup" );
+assert.eq(1, db.foo.count(), "setup");
var slaves = replTest.liveNodes.slaves;
-assert( slaves.length == 1, "Expected 1 slave but length was " + slaves.length );
+assert(slaves.length == 1, "Expected 1 slave but length was " + slaves.length);
slave = slaves[0];
-var args = ['mongodump', '-h', slave.host, '--out', MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
-var authargs = ['--username', jsTest.options().authUser, '--password', jsTest.options().authPassword];
+var args = [
+ 'mongodump',
+ '-h',
+ slave.host,
+ '--out',
+ MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'
+];
+var authargs =
+ ['--username', jsTest.options().authUser, '--password', jsTest.options().authPassword];
if (jsTest.options().keyFile) {
args = args.concat(authargs);
}
runMongoProgram.apply(null, args);
db.foo.drop();
-assert.eq( 0 , db.foo.count() , "after drop" );
-args = ['mongorestore', '-h', master.host, MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'];
+assert.eq(0, db.foo.count(), "after drop");
+args = [
+ 'mongorestore',
+ '-h',
+ master.host,
+ MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/'
+];
if (jsTest.options().keyFile) {
args = args.concat(authargs);
}
runMongoProgram.apply(null, args);
-assert.soon( "db.foo.findOne()" , "no data after sleep" );
-assert.eq( 1 , db.foo.count() , "after restore" );
-assert.eq( 1000 , db.foo.findOne().a , "after restore 2" );
+assert.soon("db.foo.findOne()", "no data after sleep");
+assert.eq(1, db.foo.count(), "after restore");
+assert.eq(1000, db.foo.findOne().a, "after restore 2");
resetDbpath(MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external');
diff --git a/jstests/tool/exportimport1.js b/jstests/tool/exportimport1.js
index 61379379fa4..69124b1f6b2 100644
--- a/jstests/tool/exportimport1.js
+++ b/jstests/tool/exportimport1.js
@@ -1,56 +1,55 @@
// exportimport1.js
-t = new ToolTest( "exportimport1" );
+t = new ToolTest("exportimport1");
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
+c = t.startDB("foo");
+assert.eq(0, c.count(), "setup1");
var arr = ["x", undefined, "y", undefined];
-c.save( { a : 22 , b : arr} );
-assert.eq( 1 , c.count() , "setup2" );
+c.save({a: 22, b: arr});
+assert.eq(1, c.count(), "setup2");
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo");
+assert.soon("c.findOne()", "no data after sleep");
+assert.eq(1, c.count(), "after restore 2");
var doc = c.findOne();
-assert.eq( 22 , doc.a , "after restore 2" );
-for (var i=0; i<arr.length; i++) {
- assert.eq( arr[i], doc.b[i] , "after restore array: "+i );
+assert.eq(22, doc.a, "after restore 2");
+for (var i = 0; i < arr.length; i++) {
+ assert.eq(arr[i], doc.b[i], "after restore array: " + i);
}
// now with --jsonArray
-t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+t.runTool("export", "--jsonArray", "--out", t.extFile, "-d", t.baseName, "-c", "foo");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
-assert.eq( 22 , c.findOne().a , "after restore 2" );
+t.runTool("import", "--jsonArray", "--file", t.extFile, "-d", t.baseName, "-c", "foo");
+assert.soon("c.findOne()", "no data after sleep");
+assert.eq(1, c.count(), "after restore 2");
+assert.eq(22, c.findOne().a, "after restore 2");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
arr = ["a", undefined, "c"];
-c.save({a : arr});
-assert.eq( 1 , c.count() , "setup2" );
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+c.save({a: arr});
+assert.eq(1, c.count(), "setup2");
+t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo");
+assert.soon("c.findOne()", "no data after sleep");
+assert.eq(1, c.count(), "after restore 2");
var doc = c.findOne();
-for (var i=0; i<arr.length; i++) {
- assert.eq( arr[i], doc.a[i] , "after restore array: "+i );
+for (var i = 0; i < arr.length; i++) {
+ assert.eq(arr[i], doc.a[i], "after restore array: " + i);
}
-
t.stop();
diff --git a/jstests/tool/exportimport3.js b/jstests/tool/exportimport3.js
index 686ff467a6f..481db797964 100644
--- a/jstests/tool/exportimport3.js
+++ b/jstests/tool/exportimport3.js
@@ -1,27 +1,25 @@
// exportimport3.js
-t = new ToolTest( "exportimport3" );
+t = new ToolTest("exportimport3");
-c = t.startDB( "foo" );
-assert.eq( 0 , c.count() , "setup1" );
-c.save({a:1});
-c.save({a:2});
-c.save({a:3});
-c.save({a:4});
-c.save({a:5});
+c = t.startDB("foo");
+assert.eq(0, c.count(), "setup1");
+c.save({a: 1});
+c.save({a: 2});
+c.save({a: 3});
+c.save({a: 4});
+c.save({a: 5});
-assert.eq( 5 , c.count() , "setup2" );
+assert.eq(5, c.count(), "setup2");
-
-t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+t.runTool("export", "--jsonArray", "--out", t.extFile, "-d", t.baseName, "-c", "foo");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
-
-t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 5 , c.count() , "after restore 2" );
+t.runTool("import", "--jsonArray", "--file", t.extFile, "-d", t.baseName, "-c", "foo");
+assert.soon("c.findOne()", "no data after sleep");
+assert.eq(5, c.count(), "after restore 2");
t.stop();
diff --git a/jstests/tool/exportimport4.js b/jstests/tool/exportimport4.js
index 605e21b7337..9c6f6d70b0a 100644
--- a/jstests/tool/exportimport4.js
+++ b/jstests/tool/exportimport4.js
@@ -1,56 +1,57 @@
// exportimport4.js
-t = new ToolTest( "exportimport4" );
-c = t.startDB( "foo" );
+t = new ToolTest("exportimport4");
+c = t.startDB("foo");
install_test_data = function() {
c.drop();
- assert.eq( 0 , c.count() , "setup1" );
+ assert.eq(0, c.count(), "setup1");
- c.save( { a : [1, 2, 3, NaN, 4, null, 5] } );
- c.save( { a : [1, 2, 3, 4, 5] } );
- c.save( { a : [ NaN ] } );
- c.save( { a : [1, 2, 3, 4, NaN, NaN, 5, NaN] } );
- c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
+ c.save({a: [1, 2, 3, NaN, 4, null, 5]});
+ c.save({a: [1, 2, 3, 4, 5]});
+ c.save({a: [NaN]});
+ c.save({a: [1, 2, 3, 4, NaN, NaN, 5, NaN]});
+ c.save({a: [1, 2, 3, 4, null, null, 5, null]});
- assert.eq( 5 , c.count() , "setup2" );
+ assert.eq(5, c.count(), "setup2");
};
// attempt to export fields without NaN
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[NaN]}}" );
+t.runTool(
+ "export", "--out", t.extFile, "-d", t.baseName, "-c", "foo", "-q", "{a:{\"$nin\":[NaN]}}");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--drop");
-assert.eq( 2 , c.count() , "after restore 1" );
+assert.eq(2, c.count(), "after restore 1");
// attempt to export fields with NaN
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:NaN}" );
+t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo", "-q", "{a:NaN}");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--drop");
-assert.eq( 3 , c.count() , "after restore 2" );
+assert.eq(3, c.count(), "after restore 2");
// attempt to export everything
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--drop");
-assert.eq( 5 , c.count() , "after restore 3" );
+assert.eq(5, c.count(), "after restore 3");
t.stop();
diff --git a/jstests/tool/exportimport5.js b/jstests/tool/exportimport5.js
index 427b03f0232..380e9391118 100644
--- a/jstests/tool/exportimport5.js
+++ b/jstests/tool/exportimport5.js
@@ -1,81 +1,90 @@
// exportimport4.js
-t = new ToolTest( "exportimport5" );
-c = t.startDB( "foo" );
+t = new ToolTest("exportimport5");
+c = t.startDB("foo");
install_test_data = function() {
c.drop();
- assert.eq( 0 , c.count() , "setup1" );
+ assert.eq(0, c.count(), "setup1");
- c.save( { a : [1, 2, 3, Infinity, 4, null, 5] } );
- c.save( { a : [1, 2, 3, 4, 5] } );
- c.save( { a : [ Infinity ] } );
- c.save( { a : [1, 2, 3, 4, Infinity, Infinity, 5, -Infinity] } );
- c.save( { a : [1, 2, 3, 4, null, null, 5, null] } );
- c.save( { a : [ -Infinity ] } );
+ c.save({a: [1, 2, 3, Infinity, 4, null, 5]});
+ c.save({a: [1, 2, 3, 4, 5]});
+ c.save({a: [Infinity]});
+ c.save({a: [1, 2, 3, 4, Infinity, Infinity, 5, -Infinity]});
+ c.save({a: [1, 2, 3, 4, null, null, 5, null]});
+ c.save({a: [-Infinity]});
- assert.eq( 6 , c.count() , "setup2" );
+ assert.eq(6, c.count(), "setup2");
};
// attempt to export fields without Infinity
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[Infinity]}}" );
+t.runTool(
+ "export", "--out", t.extFile, "-d", t.baseName, "-c", "foo", "-q", "{a:{\"$nin\":[Infinity]}}");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--drop");
-assert.eq( 3 , c.count() , "after restore 1" );
+assert.eq(3, c.count(), "after restore 1");
// attempt to export fields with Infinity
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:Infinity}" );
+t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo", "-q", "{a:Infinity}");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--drop");
-assert.eq( 3 , c.count() , "after restore 2" );
+assert.eq(3, c.count(), "after restore 2");
// attempt to export fields without -Infinity
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:{\"$nin\":[-Infinity]}}" );
+t.runTool("export",
+ "--out",
+ t.extFile,
+ "-d",
+ t.baseName,
+ "-c",
+ "foo",
+ "-q",
+ "{a:{\"$nin\":[-Infinity]}}");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--drop");
-assert.eq( 4 , c.count() , "after restore 3" );
+assert.eq(4, c.count(), "after restore 3");
// attempt to export fields with -Infinity
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo", "-q", "{a:-Infinity}" );
+t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo", "-q", "{a:-Infinity}");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--drop");
-assert.eq( 2 , c.count() , "after restore 4" );
+assert.eq(2, c.count(), "after restore 4");
// attempt to export everything
install_test_data();
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo");
c.drop();
-assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );
+assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo", "--drop" );
+t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--drop");
-assert.eq( 6 , c.count() , "after restore 5" );
+assert.eq(6, c.count(), "after restore 5");
t.stop();
diff --git a/jstests/tool/exportimport6.js b/jstests/tool/exportimport6.js
index 0924638e628..71d89baf01c 100644
--- a/jstests/tool/exportimport6.js
+++ b/jstests/tool/exportimport6.js
@@ -5,17 +5,28 @@ t = new ToolTest("exportimport6");
c = t.startDB("foo");
assert.eq(0, c.count(), "setup1");
-c.save({a:1, b:1});
-c.save({a:1, b:2});
-c.save({a:2, b:3});
-c.save({a:2, b:3});
-c.save({a:3, b:4});
-c.save({a:3, b:5});
+c.save({a: 1, b: 1});
+c.save({a: 1, b: 2});
+c.save({a: 2, b: 3});
+c.save({a: 2, b: 3});
+c.save({a: 3, b: 4});
+c.save({a: 3, b: 5});
assert.eq(6, c.count(), "setup2");
-t.runTool("export", "--out", t.extFile, "-d", t.baseName, "-c", "foo",
- "--sort", "{a:1, b:-1}", "--skip", "4", "--limit", "1");
+t.runTool("export",
+ "--out",
+ t.extFile,
+ "-d",
+ t.baseName,
+ "-c",
+ "foo",
+ "--sort",
+ "{a:1, b:-1}",
+ "--skip",
+ "4",
+ "--limit",
+ "1");
c.drop();
assert.eq(0, c.count(), "after drop", "-d", t.baseName, "-c", "foo");
diff --git a/jstests/tool/exportimport_bigarray.js b/jstests/tool/exportimport_bigarray.js
index 75d508b1ff4..0b801699d1b 100644
--- a/jstests/tool/exportimport_bigarray.js
+++ b/jstests/tool/exportimport_bigarray.js
@@ -11,19 +11,22 @@ dst.drop();
// Calculate the number of documents it takes to get above 16MB (here using 20MB just to be safe)
var bigString = new Array(1025).toString();
-var doc = {_id: new ObjectId(), x:bigString};
+var doc = {
+ _id: new ObjectId(),
+ x: bigString
+};
var docSize = Object.bsonsize(doc);
-var numDocs = Math.floor(20*1024*1024 / docSize);
+var numDocs = Math.floor(20 * 1024 * 1024 / docSize);
print('Size of one document: ' + docSize);
print('Number of documents to exceed maximum BSON size: ' + numDocs);
-print('About to insert ' + numDocs + ' documents into ' +
- exportimport_db.getName() + '.' + src.getName());
+print('About to insert ' + numDocs + ' documents into ' + exportimport_db.getName() + '.' +
+ src.getName());
var i;
var bulk = src.initializeUnorderedBulkOp();
for (i = 0; i < numDocs; ++i) {
- bulk.insert({ x: bigString });
+ bulk.insert({x: bigString});
}
assert.writeOK(bulk.execute());
@@ -31,27 +34,29 @@ data = 'data/exportimport_array_test.json';
print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
' with file: ' + data);
-tt.runTool('export', '--out' , data, '-d', exportimport_db.getName(), '-c', src.getName(),
- '--jsonArray');
+tt.runTool(
+ 'export', '--out', data, '-d', exportimport_db.getName(), '-c', src.getName(), '--jsonArray');
print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
' with file: ' + data);
-tt.runTool('import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.getName(),
- '--jsonArray');
+tt.runTool(
+ 'import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.getName(), '--jsonArray');
print('About to verify that source and destination collections match');
-src_cursor = src.find().sort({ _id : 1 });
-dst_cursor = dst.find().sort({ _id : 1 });
+src_cursor = src.find().sort({_id: 1});
+dst_cursor = dst.find().sort({_id: 1});
var documentCount = 0;
while (src_cursor.hasNext()) {
- assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
- 'Destination has ' + documentCount + ' documents.');
+ assert(dst_cursor.hasNext(),
+ 'Source has more documents than destination. ' +
+ 'Destination has ' + documentCount + ' documents.');
assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
++documentCount;
}
-assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
- 'Source has ' + documentCount + ' documents.');
+assert(!dst_cursor.hasNext(),
+ 'Destination has more documents than source. ' +
+ 'Source has ' + documentCount + ' documents.');
print('Verified that source and destination collections match');
diff --git a/jstests/tool/exportimport_date.js b/jstests/tool/exportimport_date.js
index 57a860ca1a8..ab51e0a2458 100644
--- a/jstests/tool/exportimport_date.js
+++ b/jstests/tool/exportimport_date.js
@@ -11,12 +11,12 @@ dst.drop();
// Insert a date that we can format
var formatable = ISODate("1970-01-02T05:00:00Z");
assert.eq(formatable.valueOf(), 104400000);
-src.insert({ "_id" : formatable });
+src.insert({"_id": formatable});
// Insert a date that we cannot format as an ISODate string
var nonformatable = ISODate("3001-01-01T00:00:00Z");
assert.eq(nonformatable.valueOf(), 32535216000000);
-src.insert({ "_id" : nonformatable });
+src.insert({"_id": nonformatable});
// Verify number of documents inserted
assert.eq(2, src.find().itcount());
@@ -25,7 +25,7 @@ data = 'data/exportimport_date_test.json';
print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
' with file: ' + data);
-tt.runTool('export', '--out' , data, '-d', exportimport_db.getName(), '-c', src.getName());
+tt.runTool('export', '--out', data, '-d', exportimport_db.getName(), '-c', src.getName());
print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
' with file: ' + data);
@@ -33,17 +33,19 @@ tt.runTool('import', '--file', data, '-d', exportimport_db.getName(), '-c', dst.
print('About to verify that source and destination collections match');
-src_cursor = src.find().sort({ _id : 1 });
-dst_cursor = dst.find().sort({ _id : 1 });
+src_cursor = src.find().sort({_id: 1});
+dst_cursor = dst.find().sort({_id: 1});
var documentCount = 0;
while (src_cursor.hasNext()) {
- assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
- 'Destination has ' + documentCount + ' documents.');
+ assert(dst_cursor.hasNext(),
+ 'Source has more documents than destination. ' +
+ 'Destination has ' + documentCount + ' documents.');
assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
++documentCount;
}
-assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
- 'Source has ' + documentCount + ' documents.');
+assert(!dst_cursor.hasNext(),
+ 'Destination has more documents than source. ' +
+ 'Source has ' + documentCount + ' documents.');
print('Verified that source and destination collections match');
diff --git a/jstests/tool/exportimport_minkey_maxkey.js b/jstests/tool/exportimport_minkey_maxkey.js
index 3e91b04e0c1..c6e1d5b7ea2 100644
--- a/jstests/tool/exportimport_minkey_maxkey.js
+++ b/jstests/tool/exportimport_minkey_maxkey.js
@@ -8,12 +8,12 @@ var dst = exportimport_db.dst;
src.drop();
dst.drop();
-src.insert({ "_id" : MaxKey });
-src.insert({ "_id" : MinKey });
+src.insert({"_id": MaxKey});
+src.insert({"_id": MinKey});
print('About to call mongoexport on: ' + exportimport_db.getName() + '.' + src.getName() +
' with file: ' + tt.extFile);
-tt.runTool('export', '--out' , tt.extFile, '-d', exportimport_db.getName(), '-c', src.getName());
+tt.runTool('export', '--out', tt.extFile, '-d', exportimport_db.getName(), '-c', src.getName());
print('About to call mongoimport on: ' + exportimport_db.getName() + '.' + dst.getName() +
' with file: ' + tt.extFile);
@@ -21,17 +21,19 @@ tt.runTool('import', '--file', tt.extFile, '-d', exportimport_db.getName(), '-c'
print('About to verify that source and destination collections match');
-src_cursor = src.find().sort({ _id : 1 });
-dst_cursor = dst.find().sort({ _id : 1 });
+src_cursor = src.find().sort({_id: 1});
+dst_cursor = dst.find().sort({_id: 1});
var documentCount = 0;
while (src_cursor.hasNext()) {
- assert(dst_cursor.hasNext(), 'Source has more documents than destination. ' +
- 'Destination has ' + documentCount + ' documents.');
+ assert(dst_cursor.hasNext(),
+ 'Source has more documents than destination. ' +
+ 'Destination has ' + documentCount + ' documents.');
assert.eq(src_cursor.next(), dst_cursor.next(), 'Mismatch on document ' + documentCount);
++documentCount;
}
-assert(!dst_cursor.hasNext(), 'Destination has more documents than source. ' +
- 'Source has ' + documentCount + ' documents.');
+assert(!dst_cursor.hasNext(),
+ 'Destination has more documents than source. ' +
+ 'Source has ' + documentCount + ' documents.');
print('Verified that source and destination collections match');
diff --git a/jstests/tool/files1.js b/jstests/tool/files1.js
index bd8ec971ad6..190ac983dae 100644
--- a/jstests/tool/files1.js
+++ b/jstests/tool/files1.js
@@ -1,27 +1,27 @@
// files1.js
-t = new ToolTest( "files1" );
+t = new ToolTest("files1");
db = t.startDB();
filename = 'mongod';
-if ( _isWindows() )
+if (_isWindows())
filename += '.exe';
-t.runTool( "files" , "-d" , t.baseName , "put" , filename );
+t.runTool("files", "-d", t.baseName, "put", filename);
md5 = md5sumFile(filename);
file_obj = db.fs.files.findOne();
-assert( file_obj , "A 0" );
+assert(file_obj, "A 0");
md5_stored = file_obj.md5;
md5_computed = db.runCommand({filemd5: file_obj._id}).md5;
-assert.eq( md5 , md5_stored , "A 1" );
-assert.eq( md5 , md5_computed, "A 2" );
+assert.eq(md5, md5_stored, "A 1");
+assert.eq(md5, md5_computed, "A 2");
mkdir(t.ext);
-t.runTool( "files" , "-d" , t.baseName , "get" , filename , '-l' , t.extFile );
+t.runTool("files", "-d", t.baseName, "get", filename, '-l', t.extFile);
md5 = md5sumFile(t.extFile);
-assert.eq( md5 , md5_stored , "B" );
+assert.eq(md5, md5_stored, "B");
t.stop();
diff --git a/jstests/tool/gridfs.js b/jstests/tool/gridfs.js
index c144563c45b..5fbf6e6036b 100644
--- a/jstests/tool/gridfs.js
+++ b/jstests/tool/gridfs.js
@@ -1,15 +1,10 @@
// tests gridfs with a sharded fs.chunks collection.
-var test = new ShardingTest({shards: 3,
- mongos: 1,
- config: 1,
- verbose: 2,
- other: {chunkSize:1}});
+var test = new ShardingTest({shards: 3, mongos: 1, config: 1, verbose: 2, other: {chunkSize: 1}});
var mongos = test.s0;
-
-var filename = "mongod"; // A large file we are guaranteed to have
+var filename = "mongod"; // A large file we are guaranteed to have
if (_isWindows())
filename += ".exe";
@@ -28,7 +23,7 @@ function testGridFS(name) {
assert.eq(d.fs.files.count(), 1);
var fileObj = d.fs.files.findOne();
print("fileObj: " + tojson(fileObj));
- assert.eq(rawmd5, fileObj.md5); //check that mongofiles inserted the correct md5
+ assert.eq(rawmd5, fileObj.md5); // check that mongofiles inserted the correct md5
// Call filemd5 ourself and check results.
var res = d.runCommand({filemd5: fileObj._id});
@@ -37,7 +32,7 @@ function testGridFS(name) {
assert.eq(rawmd5, res.md5);
var numChunks = d.fs.chunks.find({files_id: fileObj._id}).itcount();
- //var numChunks = d.fs.chunks.count({files_id: fileObj._id}) // this is broken for now
+ // var numChunks = d.fs.chunks.count({files_id: fileObj._id}) // this is broken for now
assert.eq(numChunks, res.numChunks);
}
@@ -53,13 +48,13 @@ testGridFS(name);
print('\n\n\t**** sharded collection on files_id ****\n\n');
name = 'sharded_files_id';
test.adminCommand({enablesharding: name});
-test.adminCommand({shardcollection: name+'.fs.chunks', key: {files_id:1}});
+test.adminCommand({shardcollection: name + '.fs.chunks', key: {files_id: 1}});
testGridFS(name);
print('\n\n\t**** sharded collection on files_id,n ****\n\n');
name = 'sharded_files_id_n';
test.adminCommand({enablesharding: name});
-test.adminCommand({shardcollection: name+'.fs.chunks', key: {files_id:1, n:1}});
+test.adminCommand({shardcollection: name + '.fs.chunks', key: {files_id: 1, n: 1}});
testGridFS(name);
test.stop();
diff --git a/jstests/tool/oplog1.js b/jstests/tool/oplog1.js
index bbee73d7f80..ad8146c080e 100644
--- a/jstests/tool/oplog1.js
+++ b/jstests/tool/oplog1.js
@@ -3,26 +3,27 @@
// very basic test for mongooplog
// need a lot more, but test that it functions at all
-t = new ToolTest( "oplog1" );
+t = new ToolTest("oplog1");
db = t.startDB();
output = db.output;
-doc = { _id : 5 , x : 17 };
+doc = {
+ _id: 5,
+ x: 17
+};
assert.commandWorked(db.createCollection(output.getName()));
-db.oplog.insert( { ts : new Timestamp() , "op" : "i" , "ns" : output.getFullName() , "o" : doc } );
+db.oplog.insert({ts: new Timestamp(), "op": "i", "ns": output.getFullName(), "o": doc});
-assert.eq( 0 , output.count() , "before" );
+assert.eq(0, output.count(), "before");
-t.runTool( "oplog" , "--oplogns" , db.getName() + ".oplog" , "--from" , "127.0.0.1:" + t.port , "-vv" );
+t.runTool("oplog", "--oplogns", db.getName() + ".oplog", "--from", "127.0.0.1:" + t.port, "-vv");
-assert.eq( 1 , output.count() , "after" );
+assert.eq(1, output.count(), "after");
-assert.docEq( doc , output.findOne() , "after check" );
+assert.docEq(doc, output.findOne(), "after check");
t.stop();
-
-
diff --git a/jstests/tool/oplog_all_ops.js b/jstests/tool/oplog_all_ops.js
index fb51f2b0b53..fb988174d24 100644
--- a/jstests/tool/oplog_all_ops.js
+++ b/jstests/tool/oplog_all_ops.js
@@ -4,10 +4,10 @@
* Correctness is verified using the dbhash command.
*/
-var repl1 = new ReplSetTest({ name: 'rs1', nodes: [{ nopreallocj: '' },
- { arbiter: true }, { arbiter: true }]});
+var repl1 =
+ new ReplSetTest({name: 'rs1', nodes: [{nopreallocj: ''}, {arbiter: true}, {arbiter: true}]});
-repl1.startSet({ oplogSize: 10 });
+repl1.startSet({oplogSize: 10});
repl1.initiate();
repl1.awaitSecondaryNodes();
@@ -16,52 +16,44 @@ var testDB = repl1Conn.getDB('test');
var testColl = testDB.user;
// op i
-testColl.insert({ x: 1 });
-testColl.insert({ x: 2 });
+testColl.insert({x: 1});
+testColl.insert({x: 2});
// op c
testDB.dropDatabase();
-testColl.insert({ y: 1 });
-testColl.insert({ y: 2 });
-testColl.insert({ y: 3 });
+testColl.insert({y: 1});
+testColl.insert({y: 2});
+testColl.insert({y: 3});
// op u
-testColl.update({}, { $inc: { z: 1 }}, true, true);
+testColl.update({}, {$inc: {z: 1}}, true, true);
// op d
-testColl.remove({ y: 2 });
+testColl.remove({y: 2});
// op n
var oplogColl = repl1Conn.getCollection('local.oplog.rs');
-oplogColl.insert({ ts: new Timestamp(), op: 'n', ns: testColl.getFullName(), 'o': { x: 'noop' }});
-
-var repl2 = new ReplSetTest({
- name: 'rs2',
- nodes: [
- {nopreallocj: ''},
- {arbiter: true},
- {arbiter: true}
- ]
-});
-
-repl2.startSet({ oplogSize: 10 });
+oplogColl.insert({ts: new Timestamp(), op: 'n', ns: testColl.getFullName(), 'o': {x: 'noop'}});
+
+var repl2 =
+ new ReplSetTest({name: 'rs2', nodes: [{nopreallocj: ''}, {arbiter: true}, {arbiter: true}]});
+
+repl2.startSet({oplogSize: 10});
repl2.initiate();
repl2.awaitSecondaryNodes();
var srcConn = repl1.getPrimary();
-runMongoProgram('mongooplog', '--from', repl1.getPrimary().host,
- '--host', repl2.getPrimary().host);
+runMongoProgram('mongooplog', '--from', repl1.getPrimary().host, '--host', repl2.getPrimary().host);
-var repl1Hash = testDB.runCommand({ dbhash: 1 });
+var repl1Hash = testDB.runCommand({dbhash: 1});
var repl2Conn = new Mongo(repl2.getURL());
var testDB2 = repl2Conn.getDB(testDB.getName());
-var repl2Hash = testDB2.runCommand({ dbhash: 1 });
+var repl2Hash = testDB2.runCommand({dbhash: 1});
assert(repl1Hash.md5);
assert.eq(repl1Hash.md5, repl2Hash.md5);
repl1.stopSet();
repl2.stopSet();
-
diff --git a/jstests/tool/restorewithauth.js b/jstests/tool/restorewithauth.js
index 6db4b0bf359..0fd29706ee1 100644
--- a/jstests/tool/restorewithauth.js
+++ b/jstests/tool/restorewithauth.js
@@ -1,5 +1,5 @@
/* SERVER-4972
- * Test for mongorestore on server with --auth allows restore without credentials of colls
+ * Test for mongorestore on server with --auth allows restore without credentials of colls
* with no index
*/
/*
@@ -14,14 +14,13 @@
* 9) Try restore with correct auth credentials. The restore should succeed this time.
*/
-
baseName = "jstests_restorewithauth";
var conn = MongoRunner.runMongod({nojournal: "", bind_ip: "127.0.0.1"});
// write to ns foo.bar
-var foo = conn.getDB( "foo" );
-for( var i = 0; i < 4; i++ ) {
- foo["bar"].save( { "x": i } );
+var foo = conn.getDB("foo");
+for (var i = 0; i < 4; i++) {
+ foo["bar"].save({"x": i});
foo["baz"].save({"x": i});
}
@@ -29,18 +28,18 @@ for( var i = 0; i < 4; i++ ) {
var collNames = foo.getCollectionNames();
assert.neq(-1, collNames.indexOf("bar"), "bar collection doesn't exist");
-//make sure it has no index except _id
+// make sure it has no index except _id
assert.eq(foo.bar.getIndexes().length, 1);
assert.eq(foo.baz.getIndexes().length, 1);
-foo.bar.createIndex({x:1});
+foo.bar.createIndex({x: 1});
assert.eq(foo.bar.getIndexes().length, 2);
assert.eq(foo.baz.getIndexes().length, 1);
// get data dump
var dumpdir = MongoRunner.dataDir + "/restorewithauth-dump1/";
-resetDbpath( dumpdir );
-x = runMongoProgram("mongodump", "--db", "foo", "-h", "127.0.0.1:"+ conn.port, "--out", dumpdir);
+resetDbpath(dumpdir);
+x = runMongoProgram("mongodump", "--db", "foo", "-h", "127.0.0.1:" + conn.port, "--out", dumpdir);
// now drop the db
foo.dropDatabase();
@@ -52,11 +51,11 @@ MongoRunner.stopMongod(conn);
conn = MongoRunner.runMongod({auth: "", nojournal: "", bind_ip: "127.0.0.1"});
// admin user
-var admin = conn.getDB( "admin" );
-admin.createUser({user: "admin" , pwd: "admin", roles: jsTest.adminUserRoles});
-admin.auth( "admin" , "admin" );
+var admin = conn.getDB("admin");
+admin.createUser({user: "admin", pwd: "admin", roles: jsTest.adminUserRoles});
+admin.auth("admin", "admin");
-var foo = conn.getDB( "foo" );
+var foo = conn.getDB("foo");
// make sure no collection with the same name exists
collNames = foo.getCollectionNames();
@@ -64,7 +63,7 @@ assert.eq(-1, collNames.indexOf("bar"), "bar collection already exists");
assert.eq(-1, collNames.indexOf("baz"), "baz collection already exists");
// now try to restore dump
-x = runMongoProgram( "mongorestore", "-h", "127.0.0.1:" + conn.port, "--dir" , dumpdir, "-vvvvv" );
+x = runMongoProgram("mongorestore", "-h", "127.0.0.1:" + conn.port, "--dir", dumpdir, "-vvvvv");
// make sure that the collection isn't restored
collNames = foo.getCollectionNames();
@@ -72,14 +71,19 @@ assert.eq(-1, collNames.indexOf("bar"), "bar collection was restored");
assert.eq(-1, collNames.indexOf("baz"), "baz collection was restored");
// now try to restore dump with correct credentials
-x = runMongoProgram( "mongorestore",
- "-h", "127.0.0.1:" + conn.port,
- "-d", "foo",
- "--authenticationDatabase=admin",
- "-u", "admin",
- "-p", "admin",
- "--dir", dumpdir + "foo/",
- "-vvvvv");
+x = runMongoProgram("mongorestore",
+ "-h",
+ "127.0.0.1:" + conn.port,
+ "-d",
+ "foo",
+ "--authenticationDatabase=admin",
+ "-u",
+ "admin",
+ "-p",
+ "admin",
+ "--dir",
+ dumpdir + "foo/",
+ "-vvvvv");
// make sure that the collection was restored
collNames = foo.getCollectionNames();
@@ -96,11 +100,16 @@ foo.createUser({user: 'user', pwd: 'password', roles: jsTest.basicUserRoles});
// now try to restore dump with foo database credentials
x = runMongoProgram("mongorestore",
- "-h", "127.0.0.1:" + conn.port,
- "-d", "foo",
- "-u", "user",
- "-p", "password",
- "--dir", dumpdir + "foo/",
+ "-h",
+ "127.0.0.1:" + conn.port,
+ "-d",
+ "foo",
+ "-u",
+ "user",
+ "-p",
+ "password",
+ "--dir",
+ dumpdir + "foo/",
"-vvvvv");
// make sure that the collection was restored
@@ -109,6 +118,7 @@ assert.neq(-1, collNames.indexOf("bar"), "bar collection was not restored");
assert.neq(-1, collNames.indexOf("baz"), "baz collection was not restored");
assert.eq(foo.bar.count(), 4);
assert.eq(foo.baz.count(), 4);
-assert.eq(foo.bar.getIndexes().length + foo.baz.getIndexes().length, 3); // _id on foo, _id on bar, x on foo
+assert.eq(foo.bar.getIndexes().length + foo.baz.getIndexes().length,
+ 3); // _id on foo, _id on bar, x on foo
MongoRunner.stopMongod(conn);
diff --git a/jstests/tool/stat1.js b/jstests/tool/stat1.js
index 0b5bf7f02b0..efdbcb0f376 100644
--- a/jstests/tool/stat1.js
+++ b/jstests/tool/stat1.js
@@ -3,13 +3,33 @@
baseName = "tool_stat1";
var m = MongoRunner.runMongod({auth: "", bind_ip: "127.0.0.1"});
-db = m.getDB( "admin" );
+db = m.getDB("admin");
-db.createUser({user: "eliot" , pwd: "eliot", roles: jsTest.adminUserRoles});
-assert( db.auth( "eliot" , "eliot" ) , "auth failed" );
+db.createUser({user: "eliot", pwd: "eliot", roles: jsTest.adminUserRoles});
+assert(db.auth("eliot", "eliot"), "auth failed");
-x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+m.port, "--username", "eliot", "--password", "eliot", "--rowcount", "1", "--authenticationDatabase", "admin" );
+x = runMongoProgram("mongostat",
+ "--host",
+ "127.0.0.1:" + m.port,
+ "--username",
+ "eliot",
+ "--password",
+ "eliot",
+ "--rowcount",
+ "1",
+ "--authenticationDatabase",
+ "admin");
assert.eq(x, 0, "mongostat should exit successfully with eliot:eliot");
-x = runMongoProgram( "mongostat", "--host", "127.0.0.1:"+m.port, "--username", "eliot", "--password", "wrong", "--rowcount", "1", "--authenticationDatabase", "admin" );
+x = runMongoProgram("mongostat",
+ "--host",
+ "127.0.0.1:" + m.port,
+ "--username",
+ "eliot",
+ "--password",
+ "wrong",
+ "--rowcount",
+ "1",
+ "--authenticationDatabase",
+ "admin");
assert.neq(x, 0, "mongostat should exit with -1 with eliot:wrong");
diff --git a/jstests/tool/tool1.js b/jstests/tool/tool1.js
index 6fb0a1f0f02..ce5e880b4ba 100644
--- a/jstests/tool/tool1.js
+++ b/jstests/tool/tool1.js
@@ -6,37 +6,52 @@ externalPath = MongoRunner.dataPath + baseName + "_external/";
externalBaseName = "export.json";
externalFile = externalPath + externalBaseName;
-function fileSize(){
- var l = listFiles( externalPath );
- for ( var i=0; i<l.length; i++ ){
- if ( l[i].baseName == externalBaseName )
+function fileSize() {
+ var l = listFiles(externalPath);
+ for (var i = 0; i < l.length; i++) {
+ if (l[i].baseName == externalBaseName)
return l[i].size;
}
return -1;
}
-
-resetDbpath( externalPath );
+resetDbpath(externalPath);
var m = MongoRunner.runMongod({dbpath: dbPath, noprealloc: "", bind_ip: "127.0.0.1"});
-c = m.getDB( baseName ).getCollection( baseName );
-c.save( { a: 1 } );
-assert( c.findOne() );
+c = m.getDB(baseName).getCollection(baseName);
+c.save({a: 1});
+assert(c.findOne());
-runMongoProgram( "mongodump", "--host", "127.0.0.1:" + m.port, "--out", externalPath );
+runMongoProgram("mongodump", "--host", "127.0.0.1:" + m.port, "--out", externalPath);
c.drop();
-runMongoProgram( "mongorestore", "--host", "127.0.0.1:" + m.port, "--dir", externalPath );
-assert.soon( "c.findOne()" , "mongodump then restore has no data w/sleep" );
-assert( c.findOne() , "mongodump then restore has no data" );
-assert.eq( 1 , c.findOne().a , "mongodump then restore has no broken data" );
-
-resetDbpath( externalPath );
-
-assert.eq( -1 , fileSize() , "mongoexport prep invalid" );
-runMongoProgram( "mongoexport", "--host", "127.0.0.1:" + m.port, "-d", baseName, "-c", baseName, "--out", externalFile );
-assert.lt( 10 , fileSize() , "file size changed" );
+runMongoProgram("mongorestore", "--host", "127.0.0.1:" + m.port, "--dir", externalPath);
+assert.soon("c.findOne()", "mongodump then restore has no data w/sleep");
+assert(c.findOne(), "mongodump then restore has no data");
+assert.eq(1, c.findOne().a, "mongodump then restore has no broken data");
+
+resetDbpath(externalPath);
+
+assert.eq(-1, fileSize(), "mongoexport prep invalid");
+runMongoProgram("mongoexport",
+ "--host",
+ "127.0.0.1:" + m.port,
+ "-d",
+ baseName,
+ "-c",
+ baseName,
+ "--out",
+ externalFile);
+assert.lt(10, fileSize(), "file size changed");
c.drop();
-runMongoProgram( "mongoimport", "--host", "127.0.0.1:" + m.port, "-d", baseName, "-c", baseName, "--file", externalFile );
-assert.soon( "c.findOne()" , "mongo import json A" );
-assert( c.findOne() && 1 == c.findOne().a , "mongo import json B" );
+runMongoProgram("mongoimport",
+ "--host",
+ "127.0.0.1:" + m.port,
+ "-d",
+ baseName,
+ "-c",
+ baseName,
+ "--file",
+ externalFile);
+assert.soon("c.findOne()", "mongo import json A");
+assert(c.findOne() && 1 == c.findOne().a, "mongo import json B");
diff --git a/jstests/tool/tool_replset.js b/jstests/tool/tool_replset.js
index 90560c9ee2c..efe55b46605 100644
--- a/jstests/tool/tool_replset.js
+++ b/jstests/tool/tool_replset.js
@@ -17,7 +17,7 @@
(function() {
"use strict";
- var replTest = new ReplSetTest({ name: 'tool_replset', nodes: 2, oplogSize: 5 });
+ var replTest = new ReplSetTest({name: 'tool_replset', nodes: 2, oplogSize: 5});
var nodes = replTest.startSet();
var config = replTest.getReplSetConfig();
config.members[0].priority = 3;
@@ -26,12 +26,12 @@
var master = replTest.getPrimary();
assert.eq(nodes[0], master, "incorrect master elected");
for (var i = 0; i < 100; i++) {
- assert.writeOK(master.getDB("foo").bar.insert({ a: i }));
+ assert.writeOK(master.getDB("foo").bar.insert({a: i}));
}
replTest.awaitReplication();
- var replSetConnString = "tool_replset/127.0.0.1:" + replTest.ports[0] +
- ",127.0.0.1:" + replTest.ports[1];
+ var replSetConnString =
+ "tool_replset/127.0.0.1:" + replTest.ports[0] + ",127.0.0.1:" + replTest.ports[1];
// Test with mongodump/mongorestore
print("dump the db");
@@ -54,33 +54,44 @@
// Test with mongoexport/mongoimport
print("export the collection");
var extFile = MongoRunner.dataDir + "/tool_replset/export";
- runMongoProgram("mongoexport", "--host", replSetConnString, "--out", extFile,
- "-d", "foo", "-c", "bar");
+ runMongoProgram(
+ "mongoexport", "--host", replSetConnString, "--out", extFile, "-d", "foo", "-c", "bar");
print("collection successfully exported, dropping now");
master.getDB("foo").getCollection("bar").drop();
replTest.awaitReplication();
print("import the collection");
- runMongoProgram("mongoimport", "--host", replSetConnString, "--file", extFile,
- "-d", "foo", "-c", "bar");
+ runMongoProgram(
+ "mongoimport", "--host", replSetConnString, "--file", extFile, "-d", "foo", "-c", "bar");
var x = master.getDB("foo").getCollection("bar").count();
assert.eq(x, 100, "mongoimport should have successfully imported the collection");
- var doc = {_id: 5, x: 17};
- var oplogEntry = {ts: new Timestamp(), "op": "i", "ns": "foo.bar", "o": doc, "v": NumberInt(2)};
+ var doc = {
+ _id: 5,
+ x: 17
+ };
+ var oplogEntry = {
+ ts: new Timestamp(),
+ "op": "i",
+ "ns": "foo.bar",
+ "o": doc,
+ "v": NumberInt(2)
+ };
assert.writeOK(master.getDB("local").oplog.rs.insert(oplogEntry));
- assert.eq(100, master.getDB("foo").getCollection("bar").count(), "count before running " +
- "mongooplog was not 100 as expected");
+ assert.eq(100,
+ master.getDB("foo").getCollection("bar").count(),
+ "count before running " + "mongooplog was not 100 as expected");
- runMongoProgram("mongooplog" , "--from", "127.0.0.1:" + replTest.ports[0],
- "--host", replSetConnString);
+ runMongoProgram(
+ "mongooplog", "--from", "127.0.0.1:" + replTest.ports[0], "--host", replSetConnString);
print("finished running mongooplog to replay the oplog");
- assert.eq(101, master.getDB("foo").getCollection("bar").count(), "count after running " +
- "mongooplog was not 101 as expected");
+ assert.eq(101,
+ master.getDB("foo").getCollection("bar").count(),
+ "count after running " + "mongooplog was not 101 as expected");
print("all tests successful, stopping replica set");
diff --git a/jstests/tool/tsv1.js b/jstests/tool/tsv1.js
index 8395a77c711..62316401521 100644
--- a/jstests/tool/tsv1.js
+++ b/jstests/tool/tsv1.js
@@ -1,32 +1,55 @@
// tsv1.js
-t = new ToolTest( "tsv1" );
-
-c = t.startDB( "foo" );
-
-base = { a : "", b : 1 , c : "foobar" , d: 5, e: -6 };
-
-t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "-f" , "a,b,c,d,e" );
-assert.soon( "2 == c.count()" , "restore 2" );
-
-a = c.find().sort( { a : 1 } ).toArray();
+t = new ToolTest("tsv1");
+
+c = t.startDB("foo");
+
+base = {
+ a: "",
+ b: 1,
+ c: "foobar",
+ d: 5,
+ e: -6
+};
+
+t.runTool("import",
+ "--file",
+ "jstests/tool/data/a.tsv",
+ "-d",
+ t.baseName,
+ "-c",
+ "foo",
+ "--type",
+ "tsv",
+ "-f",
+ "a,b,c,d,e");
+assert.soon("2 == c.count()", "restore 2");
+
+a = c.find().sort({a: 1}).toArray();
delete a[0]._id;
delete a[1]._id;
-assert.docEq( { a : "a" , b : "b" , c : "c" , d: "d", e: "e"} , a[1] , "tsv parse 1" );
-assert.docEq( base , a[0] , "tsv parse 0" );
+assert.docEq({a: "a", b: "b", c: "c", d: "d", e: "e"}, a[1], "tsv parse 1");
+assert.docEq(base, a[0], "tsv parse 0");
c.drop();
-assert.eq( 0 , c.count() , "after drop 2" );
-
-t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "--headerline" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 1 , c.count() , "after restore 2" );
+assert.eq(0, c.count(), "after drop 2");
+
+t.runTool("import",
+ "--file",
+ "jstests/tool/data/a.tsv",
+ "-d",
+ t.baseName,
+ "-c",
+ "foo",
+ "--type",
+ "tsv",
+ "--headerline");
+assert.soon("c.findOne()", "no data after sleep");
+assert.eq(1, c.count(), "after restore 2");
x = c.findOne();
delete x._id;
-assert.docEq( base , x , "tsv parse 2" );
-
-
+assert.docEq(base, x, "tsv parse 2");
t.stop();