diff options
author | Mike Grundy <michael.grundy@10gen.com> | 2016-02-04 12:31:43 -0500 |
---|---|---|
committer | Mike Grundy <michael.grundy@10gen.com> | 2016-02-05 15:00:52 -0500 |
commit | 55b5ad87239ba35e542add1e7402fe775dc19b8e (patch) | |
tree | 64e8ac3964c3c8d4c3e970737ebcc57ab604969c | |
parent | f541080e8a4283a79cf21c5d62ffac325f7dad05 (diff) | |
download | mongo-55b5ad87239ba35e542add1e7402fe775dc19b8e.tar.gz |
SERVER-22341 fix jslint errors in jstests/tool with eslint --fix
-rw-r--r-- | jstests/tool/csv1.js | 28 | ||||
-rw-r--r-- | jstests/tool/csvexport1.js | 40 | ||||
-rw-r--r-- | jstests/tool/csvexport2.js | 4 | ||||
-rw-r--r-- | jstests/tool/csvimport1.js | 32 | ||||
-rw-r--r-- | jstests/tool/dumpauth.js | 6 | ||||
-rw-r--r-- | jstests/tool/dumpfilename1.js | 2 | ||||
-rw-r--r-- | jstests/tool/dumprestore4.js | 4 | ||||
-rw-r--r-- | jstests/tool/dumprestoreWithNoOptions.js | 6 | ||||
-rw-r--r-- | jstests/tool/dumprestore_auth2.js | 10 | ||||
-rw-r--r-- | jstests/tool/dumprestore_auth3.js | 6 | ||||
-rw-r--r-- | jstests/tool/dumpsecondary.js | 10 | ||||
-rw-r--r-- | jstests/tool/exportimport1.js | 6 | ||||
-rw-r--r-- | jstests/tool/exportimport3.js | 12 | ||||
-rw-r--r-- | jstests/tool/exportimport6.js | 12 | ||||
-rw-r--r-- | jstests/tool/exportimport_bigarray.js | 4 | ||||
-rw-r--r-- | jstests/tool/files1.js | 10 | ||||
-rw-r--r-- | jstests/tool/gridfs.js | 72 | ||||
-rw-r--r-- | jstests/tool/oplog1.js | 4 | ||||
-rw-r--r-- | jstests/tool/restorewithauth.js | 4 | ||||
-rw-r--r-- | jstests/tool/tool_replset.js | 12 | ||||
-rw-r--r-- | jstests/tool/tsv1.js | 20 |
21 files changed, 152 insertions, 152 deletions
diff --git a/jstests/tool/csv1.js b/jstests/tool/csv1.js index 4366dd0bb69..3338d500fdf 100644 --- a/jstests/tool/csv1.js +++ b/jstests/tool/csv1.js @@ -1,6 +1,6 @@ // csv1.js -t = new ToolTest( "csv1" ) +t = new ToolTest( "csv1" ); c = t.startDB( "foo" ); @@ -8,35 +8,35 @@ base = { a : 1 , b : "foo,bar\"baz,qux" , c: 5, 'd d': -6 , e: '-', f : "."}; assert.eq( 0 , c.count() , "setup1" ); c.insert( base ); -delete base._id +delete base._id; assert.eq( 1 , c.count() , "setup2" ); -t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c,d d,e,f" ) +t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c,d d,e,f" ); -c.drop() -assert.eq( 0 , c.count() , "after drop" ) +c.drop(); +assert.eq( 0 , c.count() , "after drop" ); t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c,d d,e,f" ); assert.soon( "2 == c.count()" , "restore 2" ); a = c.find().sort( { a : 1 } ).toArray(); -delete a[0]._id -delete a[1]._id +delete a[0]._id; +delete a[1]._id; assert.docEq( { a : "a" , b : "b" , c : "c" , 'd d': "d d", e: 'e', f : "f"}, a[1], "csv parse 1" ); -assert.docEq( base, a[0], "csv parse 0" ) +assert.docEq( base, a[0], "csv parse 0" ); -c.drop() -assert.eq( 0 , c.count() , "after drop 2" ) +c.drop(); +assert.eq( 0 , c.count() , "after drop 2" ); -t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" ) +t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" ); assert.soon( "c.findOne()" , "no data after sleep" ); assert.eq( 1 , c.count() , "after restore 2" ); -x = c.findOne() +x = c.findOne(); delete x._id; -assert.docEq( base, x, "csv parse 2" ) +assert.docEq( base, x, "csv parse 2" ); -t.stop() +t.stop(); diff --git a/jstests/tool/csvexport1.js b/jstests/tool/csvexport1.js index 628ccb56994..f01acbcd6fc 100644 --- a/jstests/tool/csvexport1.js +++ b/jstests/tool/csvexport1.js @@ -1,64 +1,64 @@ // csvexport1.js -t = new ToolTest( "csvexport1" ) +t = new ToolTest( "csvexport1" ); c = t.startDB( "foo" ); assert.eq( 0 , c.count() , "setup1" ); -objId = ObjectId() +objId = ObjectId(); -c.insert({ a : new NumberInt(1) , b : objId , c: [1, 2, 3], d : {a : "hello", b : "world"} , e: '-'}) -c.insert({ a : -2.0, c : MinKey, d : "Then he said, \"Hello World!\"", e : new NumberLong(3)}) +c.insert({ a : new NumberInt(1) , b : objId , c: [1, 2, 3], d : {a : "hello", b : "world"} , e: '-'}); +c.insert({ a : -2.0, c : MinKey, d : "Then he said, \"Hello World!\"", e : new NumberLong(3)}); c.insert({ a : new BinData(0, "1234"), b : ISODate("2009-08-27T12:34:56.789"), c : new Timestamp(1234, 9876), d : /foo*\"bar\"/i, - e : function foo() { print("Hello World!"); }}) + e : function foo() { print("Hello World!"); }}); assert.eq( 3 , c.count() , "setup2" ); -t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b,c,d,e") +t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv", "-f", "a,b,c,d,e"); -c.drop() +c.drop(); -assert.eq( 0 , c.count() , "after drop" ) +assert.eq( 0 , c.count() , "after drop" ); t.runTool("import", "--file", t.extFile, "-d", t.baseName, "-c", "foo", "--type", "csv", "--headerline"); assert.soon ( 3 + " == c.count()", "after import"); // Note: Exporting and Importing to/from CSV is not designed to be round-trippable -expected = [] -expected.push({ a : 1, b : "ObjectId(" + objId.valueOf() + ")", c : [ 1, 2, 3 ], d : { "a" : "hello", "b" : "world" }, e : "-"}) -expected.push({ a : -2.0, b : "", c : "$MinKey", d : "Then he said, \"Hello World!\"", e : 3}) +expected = []; +expected.push({ a : 1, b : "ObjectId(" + objId.valueOf() + ")", c : [ 1, 2, 3 ], d : { "a" : "hello", "b" : "world" }, e : "-"}); +expected.push({ a : -2.0, b : "", c : "$MinKey", d : "Then he said, \"Hello World!\"", e : 3}); // "t" should be 1234, but the shell interprets the first field of timestamps as milliseconds while // they are stored as seconds. See SERVER-7718. expected.push({ a : "D76DF8", b : "2009-08-27T12:34:56.789Z", c : { "$timestamp" : { "t" : 1234, "i" : 9876 } }, - d : "/foo*\\\"bar\\\"/i", e : tojson(function foo() { print("Hello World!"); })}) + d : "/foo*\\\"bar\\\"/i", e : tojson(function foo() { print("Hello World!"); })}); -actual = [] +actual = []; actual.push(c.find({a : 1}).toArray()[0]); actual.push(c.find({a : -2.0}).toArray()[0]); actual.push(c.find({a : "D76DF8"}).toArray()[0]); for (i = 0; i < expected.length; i++) { - delete actual[i]._id - assert.eq(Object.keys(expected[i]).length, Object.keys(actual[i]).length) - keys = Object.keys(expected[i]) + delete actual[i]._id; + assert.eq(Object.keys(expected[i]).length, Object.keys(actual[i]).length); + keys = Object.keys(expected[i]); for(var j=0;j<keys.length;j++){ - expectedVal = expected[i][keys[j]] + expectedVal = expected[i][keys[j]]; if((typeof expectedVal)== "object"){ // For fields which contain arrays or objects, they have been // exported as JSON - parse the JSON in the output and verify // that it matches the original document's value - assert.docEq(expectedVal, JSON.parse(actual[i][keys[j]]), "CSV export " + i) + assert.docEq(expectedVal, JSON.parse(actual[i][keys[j]]), "CSV export " + i); }else{ // Otherwise just compare the values directly - assert.eq(expectedVal, actual[i][keys[j]], "CSV export " + i) + assert.eq(expectedVal, actual[i][keys[j]], "CSV export " + i); } } } -t.stop() +t.stop(); diff --git a/jstests/tool/csvexport2.js b/jstests/tool/csvexport2.js index 3e0dd2c6829..dc12288e83b 100644 --- a/jstests/tool/csvexport2.js +++ b/jstests/tool/csvexport2.js @@ -1,6 +1,6 @@ // csvexport2.js -t = new ToolTest( "csvexport2" ) +t = new ToolTest( "csvexport2" ); c = t.startDB( "foo" ); @@ -28,4 +28,4 @@ c = t.startDB( "foo" ); //assert.eq( expected, actual ); -t.stop()
\ No newline at end of file +t.stop();
\ No newline at end of file diff --git a/jstests/tool/csvimport1.js b/jstests/tool/csvimport1.js index f518e31e29b..a85470f2c11 100644 --- a/jstests/tool/csvimport1.js +++ b/jstests/tool/csvimport1.js @@ -1,16 +1,16 @@ // csvimport1.js -t = new ToolTest( "csvimport1" ) +t = new ToolTest( "csvimport1" ); c = t.startDB( "foo" ); -base = [] -base.push({ a : 1, b : "this is some text.\nThis text spans multiple lines, and just for fun\ncontains a comma", "c" : "This has leading and trailing whitespace!" }) -base.push({a : 2, b : "When someone says something you \"put it in quotes\"", "c" : "I like embedded quotes/slashes\\backslashes" }) -base.push({a : 3, b : " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", "c" : "" }) -base.push({a : 4, b : "", "c" : "How are empty entries handled?" }) -base.push({a : 5, b : "\"\"", c : "\"This string is in quotes and contains empty quotes (\"\")\""}) -base.push({ a : "a" , b : "b" , c : "c"}) +base = []; +base.push({ a : 1, b : "this is some text.\nThis text spans multiple lines, and just for fun\ncontains a comma", "c" : "This has leading and trailing whitespace!" }); +base.push({a : 2, b : "When someone says something you \"put it in quotes\"", "c" : "I like embedded quotes/slashes\\backslashes" }); +base.push({a : 3, b : " This line contains the empty string and has leading and trailing whitespace inside the quotes! ", "c" : "" }); +base.push({a : 4, b : "", "c" : "How are empty entries handled?" }); +base.push({a : 5, b : "\"\"", c : "\"This string is in quotes and contains empty quotes (\"\")\""}); +base.push({ a : "a" , b : "b" , c : "c"}); assert.eq( 0 , c.count() , "setup" ); @@ -19,22 +19,22 @@ assert.soon( base.length + " == c.count()" , "after import 1 " ); a = c.find().sort( { a : 1 } ).toArray(); for (i = 0; i < base.length; i++ ) { - delete a[i]._id - assert.docEq( base[i], a[i], "csv parse " + i) + delete a[i]._id; + assert.docEq( base[i], a[i], "csv parse " + i); } -c.drop() -assert.eq( 0 , c.count() , "after drop" ) +c.drop(); +assert.eq( 0 , c.count() , "after drop" ); -t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" ) +t.runTool( "import" , "--file" , "jstests/tool/data/csvimport1.csv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" ); assert.soon( "c.findOne()" , "no data after sleep" ); assert.eq( base.length - 1 , c.count() , "after import 2" ); x = c.find().sort( { a : 1 } ).toArray(); for (i = 0; i < base.length - 1; i++ ) { - delete x[i]._id - assert.docEq( base[i], x[i], "csv parse with headerline " + i) + delete x[i]._id; + assert.docEq( base[i], x[i], "csv parse with headerline " + i); } -t.stop() +t.stop(); diff --git a/jstests/tool/dumpauth.js b/jstests/tool/dumpauth.js index ba8ee9a5f81..86caf260328 100644 --- a/jstests/tool/dumpauth.js +++ b/jstests/tool/dumpauth.js @@ -2,9 +2,9 @@ // test mongodump with authentication var m = MongoRunner.runMongod({auth: "", bind_ip: "127.0.0.1"}); -var dbName = "admin" -var colName = "testcol" -var profileName = "system.profile" +var dbName = "admin"; +var colName = "testcol"; +var profileName = "system.profile"; var dumpDir = MongoRunner.dataPath + "jstests_tool_dumprestore_dump_system_profile/"; db = m.getDB(dbName); diff --git a/jstests/tool/dumpfilename1.js b/jstests/tool/dumpfilename1.js index 38b430896bf..4a79a11bdb1 100644 --- a/jstests/tool/dumpfilename1.js +++ b/jstests/tool/dumpfilename1.js @@ -8,6 +8,6 @@ t.startDB( "foo" ); c = t.db; assert.writeOK(c.getCollection("df/").insert({ a: 3 })); -assert(t.runTool( "dump" , "--out" , t.ext ) != 0, "dump should fail with non-zero return code") +assert(t.runTool( "dump" , "--out" , t.ext ) != 0, "dump should fail with non-zero return code"); t.stop(); diff --git a/jstests/tool/dumprestore4.js b/jstests/tool/dumprestore4.js index 337d9c34265..a4d33df7deb 100644 --- a/jstests/tool/dumprestore4.js +++ b/jstests/tool/dumprestore4.js @@ -13,7 +13,7 @@ t = new ToolTest( "dumprestore4" ); c = t.startDB( "dumprestore4" ); -db=t.db +db=t.db; dbname = db.getName(); dbname2 = "NOT_"+dbname; @@ -27,7 +27,7 @@ assert.eq( 0 , c.getIndexes().length , "setup1" ); c.ensureIndex({ x : 1} ); assert.eq( 2 , c.getIndexes().length , "setup2" ); // _id and x_1 -assert.eq( 0, t.runTool( "dump" , "-d" , dbname, "--out", t.ext ), "dump") +assert.eq( 0, t.runTool( "dump" , "-d" , dbname, "--out", t.ext ), "dump"); // to ensure issue (2), we have to clear out the first db. // By inspection, db.dropIndexes() doesn't get rid of the _id index on c, diff --git a/jstests/tool/dumprestoreWithNoOptions.js b/jstests/tool/dumprestoreWithNoOptions.js index bfd6f4fa579..b822deb93e8 100644 --- a/jstests/tool/dumprestoreWithNoOptions.js +++ b/jstests/tool/dumprestoreWithNoOptions.js @@ -23,7 +23,7 @@ dbname2 = "NOT_"+dbname; db.dropDatabase(); -var defaultFlags = {} +var defaultFlags = {}; var options = { capped: true, size: 4096, autoIndexId: true }; db.createCollection('capped', options); @@ -58,7 +58,7 @@ db.createCollection('capped', options); assert.eq( 1, db.capped.getIndexes().length, "auto index not created" ); var cappedOptions = db.capped.exists().options; for ( var opt in options ) { - assert.eq(options[opt], cappedOptions[opt], 'invalid option') + assert.eq(options[opt], cappedOptions[opt], 'invalid option'); } assert.writeOK(db.capped.insert({ x: 1 })); @@ -87,7 +87,7 @@ db.createCollection('capped', options); assert.eq( 1, db.capped.getIndexes().length, "auto index not created" ); var cappedOptions = db.capped.exists().options; for ( var opt in options ) { - assert.eq(options[opt], cappedOptions[opt], 'invalid option') + assert.eq(options[opt], cappedOptions[opt], 'invalid option'); } assert.writeOK(db.capped.insert({ x: 1 })); diff --git a/jstests/tool/dumprestore_auth2.js b/jstests/tool/dumprestore_auth2.js index 85f73ee442a..4d410d34ca9 100644 --- a/jstests/tool/dumprestore_auth2.js +++ b/jstests/tool/dumprestore_auth2.js @@ -7,7 +7,7 @@ var dumpRestoreAuth2 = function(backup_role, restore_role) { t = new ToolTest("dumprestore_auth2", {auth: ""}); coll = t.startDB("foo"); - admindb = coll.getDB().getSiblingDB("admin") + admindb = coll.getDB().getSiblingDB("admin"); // Create the relevant users and roles. admindb.createUser({user: "root", pwd: "pass", roles: ["root"]}); @@ -26,11 +26,11 @@ var dumpRestoreAuth2 = function(backup_role, restore_role) { coll.insert({word: "tomato"}); assert.eq(1, coll.count()); - assert.eq(4, admindb.system.users.count(), "setup users") + assert.eq(4, admindb.system.users.count(), "setup users"); assert.eq(2, admindb.system.users.getIndexes().length, "setup2: " + tojson( admindb.system.users.getIndexes() ) ); - assert.eq(1, admindb.system.roles.count(), "setup3") - assert.eq(2, admindb.system.roles.getIndexes().length, "setup4") + assert.eq(1, admindb.system.roles.count(), "setup3"); + assert.eq(2, admindb.system.roles.getIndexes().length, "setup4"); assert.eq(1, admindb.system.version.count()); var versionDoc = admindb.system.version.findOne(); @@ -109,7 +109,7 @@ var dumpRestoreAuth2 = function(backup_role, restore_role) { t.stop(); -} +}; // Tests that the default auth roles of backup and restore work properly. dumpRestoreAuth2("backup", "restore"); diff --git a/jstests/tool/dumprestore_auth3.js b/jstests/tool/dumprestore_auth3.js index 051c3c03699..62eed2e7d84 100644 --- a/jstests/tool/dumprestore_auth3.js +++ b/jstests/tool/dumprestore_auth3.js @@ -122,9 +122,9 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) { "version doc was changed by restore"); jsTestLog("Make modifications to user data that should be overridden by the restore"); - db.dropUser('user') + db.dropUser('user'); db.createUser({user: 'user2', pwd: 'password2', roles: jsTest.basicUserRoles}); - db.dropRole('role') + db.dropRole('role'); db.createRole({role: 'role2', roles: [], privileges:[]}); jsTestLog("Restore foo database (and user data) with --drop so it overrides the changes made"); @@ -213,7 +213,7 @@ var dumpRestoreAuth3 = function(backup_role, restore_role) { "version doc was changed by restore"); MongoRunner.stopMongod(mongod); -} +}; // Tests that the default auth roles of backup and restore work properly. dumpRestoreAuth3("backup", "restore"); diff --git a/jstests/tool/dumpsecondary.js b/jstests/tool/dumpsecondary.js index 7a641542498..31feacba674 100644 --- a/jstests/tool/dumpsecondary.js +++ b/jstests/tool/dumpsecondary.js @@ -4,7 +4,7 @@ var nodes = replTest.startSet(); replTest.initiate(); var master = replTest.getPrimary(); -db = master.getDB("foo") +db = master.getDB("foo"); db.foo.save({a: 1000}); replTest.awaitReplication(); replTest.awaitSecondaryNodes(); @@ -21,18 +21,18 @@ if (jsTest.options().keyFile) { args = args.concat(authargs); } runMongoProgram.apply(null, args); -db.foo.drop() +db.foo.drop(); assert.eq( 0 , db.foo.count() , "after drop" ); args = ['mongorestore', '-h', master.host, MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external/']; if (jsTest.options().keyFile) { args = args.concat(authargs); } -runMongoProgram.apply(null, args) +runMongoProgram.apply(null, args); assert.soon( "db.foo.findOne()" , "no data after sleep" ); assert.eq( 1 , db.foo.count() , "after restore" ); assert.eq( 1000 , db.foo.findOne().a , "after restore 2" ); -resetDbpath(MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external') +resetDbpath(MongoRunner.dataDir + '/jstests_tool_dumpsecondary_external'); -replTest.stopSet(15) +replTest.stopSet(15); diff --git a/jstests/tool/exportimport1.js b/jstests/tool/exportimport1.js index 451078e1b95..61379379fa4 100644 --- a/jstests/tool/exportimport1.js +++ b/jstests/tool/exportimport1.js @@ -11,7 +11,7 @@ assert.eq( 1 , c.count() , "setup2" ); t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" ); c.drop(); -assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );; +assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" ); t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" ); assert.soon( "c.findOne()" , "no data after sleep" ); @@ -27,7 +27,7 @@ for (var i=0; i<arr.length; i++) { t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" ); c.drop(); -assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );; +assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" ); t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" ); assert.soon( "c.findOne()" , "no data after sleep" ); @@ -42,7 +42,7 @@ c.save({a : arr}); assert.eq( 1 , c.count() , "setup2" ); t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" ); c.drop(); -assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );; +assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" ); t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" ); assert.soon( "c.findOne()" , "no data after sleep" ); diff --git a/jstests/tool/exportimport3.js b/jstests/tool/exportimport3.js index f18ba6cbd4b..686ff467a6f 100644 --- a/jstests/tool/exportimport3.js +++ b/jstests/tool/exportimport3.js @@ -4,11 +4,11 @@ t = new ToolTest( "exportimport3" ); c = t.startDB( "foo" ); assert.eq( 0 , c.count() , "setup1" ); -c.save({a:1}) -c.save({a:2}) -c.save({a:3}) -c.save({a:4}) -c.save({a:5}) +c.save({a:1}); +c.save({a:2}); +c.save({a:3}); +c.save({a:4}); +c.save({a:5}); assert.eq( 5 , c.count() , "setup2" ); @@ -16,7 +16,7 @@ assert.eq( 5 , c.count() , "setup2" ); t.runTool( "export" , "--jsonArray" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" ); c.drop(); -assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );; +assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" ); t.runTool( "import" , "--jsonArray" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" ); diff --git a/jstests/tool/exportimport6.js b/jstests/tool/exportimport6.js index a01d49a9c8b..0924638e628 100644 --- a/jstests/tool/exportimport6.js +++ b/jstests/tool/exportimport6.js @@ -5,12 +5,12 @@ t = new ToolTest("exportimport6"); c = t.startDB("foo"); assert.eq(0, c.count(), "setup1"); -c.save({a:1, b:1}) -c.save({a:1, b:2}) -c.save({a:2, b:3}) -c.save({a:2, b:3}) -c.save({a:3, b:4}) -c.save({a:3, b:5}) +c.save({a:1, b:1}); +c.save({a:1, b:2}); +c.save({a:2, b:3}); +c.save({a:2, b:3}); +c.save({a:3, b:4}); +c.save({a:3, b:5}); assert.eq(6, c.count(), "setup2"); diff --git a/jstests/tool/exportimport_bigarray.js b/jstests/tool/exportimport_bigarray.js index dbdd2a9a6d7..75d508b1ff4 100644 --- a/jstests/tool/exportimport_bigarray.js +++ b/jstests/tool/exportimport_bigarray.js @@ -15,8 +15,8 @@ var doc = {_id: new ObjectId(), x:bigString}; var docSize = Object.bsonsize(doc); var numDocs = Math.floor(20*1024*1024 / docSize); -print('Size of one document: ' + docSize) -print('Number of documents to exceed maximum BSON size: ' + numDocs) +print('Size of one document: ' + docSize); +print('Number of documents to exceed maximum BSON size: ' + numDocs); print('About to insert ' + numDocs + ' documents into ' + exportimport_db.getName() + '.' + src.getName()); diff --git a/jstests/tool/files1.js b/jstests/tool/files1.js index acfcc16dcc3..bd8ec971ad6 100644 --- a/jstests/tool/files1.js +++ b/jstests/tool/files1.js @@ -1,17 +1,17 @@ // files1.js -t = new ToolTest( "files1" ) +t = new ToolTest( "files1" ); db = t.startDB(); -filename = 'mongod' +filename = 'mongod'; if ( _isWindows() ) - filename += '.exe' + filename += '.exe'; t.runTool( "files" , "-d" , t.baseName , "put" , filename ); md5 = md5sumFile(filename); -file_obj = db.fs.files.findOne() +file_obj = db.fs.files.findOne(); assert( file_obj , "A 0" ); md5_stored = file_obj.md5; md5_computed = db.runCommand({filemd5: file_obj._id}).md5; @@ -24,4 +24,4 @@ t.runTool( "files" , "-d" , t.baseName , "get" , filename , '-l' , t.extFile ); md5 = md5sumFile(t.extFile); assert.eq( md5 , md5_stored , "B" ); -t.stop() +t.stop(); diff --git a/jstests/tool/gridfs.js b/jstests/tool/gridfs.js index cea92c812f7..fba1654d1eb 100644 --- a/jstests/tool/gridfs.js +++ b/jstests/tool/gridfs.js @@ -4,62 +4,62 @@ var test = new ShardingTest({shards: 3, mongos: 1, config: 1, verbose: 2, - other: {chunkSize:1}}) + other: {chunkSize:1}}); -var mongos = test.s0 +var mongos = test.s0; -var filename = "mongod" // A large file we are guaranteed to have +var filename = "mongod"; // A large file we are guaranteed to have if (_isWindows()) - filename += ".exe" + filename += ".exe"; function testGridFS(name) { - var d = mongos.getDB(name) + var d = mongos.getDB(name); // this function should be called on a clean db - assert.eq(d.name.files.count(), 0) - assert.eq(d.fs.chunks.count(), 0) + assert.eq(d.name.files.count(), 0); + assert.eq(d.fs.chunks.count(), 0); - var rawmd5 = md5sumFile(filename) + var rawmd5 = md5sumFile(filename); // upload file (currently calls filemd5 internally) runMongoProgram.apply(null, ["mongofiles", "--port", mongos.port, "put", filename, '--db', name]); - assert.eq(d.fs.files.count(), 1) - var fileObj = d.fs.files.findOne() - print("fileObj: " + tojson(fileObj)) - assert.eq(rawmd5, fileObj.md5) //check that mongofiles inserted the correct md5 + assert.eq(d.fs.files.count(), 1); + var fileObj = d.fs.files.findOne(); + print("fileObj: " + tojson(fileObj)); + assert.eq(rawmd5, fileObj.md5); //check that mongofiles inserted the correct md5 // Call filemd5 ourself and check results. - var res = d.runCommand({filemd5: fileObj._id}) - print("filemd5 output: " + tojson(res)) - assert(res.ok) - assert.eq(rawmd5, res.md5) + var res = d.runCommand({filemd5: fileObj._id}); + print("filemd5 output: " + tojson(res)); + assert(res.ok); + assert.eq(rawmd5, res.md5); - var numChunks = d.fs.chunks.find({files_id: fileObj._id}).itcount() + var numChunks = d.fs.chunks.find({files_id: fileObj._id}).itcount(); //var numChunks = d.fs.chunks.count({files_id: fileObj._id}) // this is broken for now - assert.eq(numChunks, res.numChunks) + assert.eq(numChunks, res.numChunks); } -print('\n\n\t**** unsharded ****\n\n') -name = 'unsharded' -testGridFS(name) +print('\n\n\t**** unsharded ****\n\n'); +name = 'unsharded'; +testGridFS(name); -print('\n\n\t**** sharded db, unsharded collection ****\n\n') -name = 'sharded_db' -test.adminCommand({enablesharding: name}) -testGridFS(name) +print('\n\n\t**** sharded db, unsharded collection ****\n\n'); +name = 'sharded_db'; +test.adminCommand({enablesharding: name}); +testGridFS(name); -print('\n\n\t**** sharded collection on files_id ****\n\n') -name = 'sharded_files_id' -test.adminCommand({enablesharding: name}) -test.adminCommand({shardcollection: name+'.fs.chunks', key: {files_id:1}}) -testGridFS(name) +print('\n\n\t**** sharded collection on files_id ****\n\n'); +name = 'sharded_files_id'; +test.adminCommand({enablesharding: name}); +test.adminCommand({shardcollection: name+'.fs.chunks', key: {files_id:1}}); +testGridFS(name); -print('\n\n\t**** sharded collection on files_id,n ****\n\n') -name = 'sharded_files_id_n' -test.adminCommand({enablesharding: name}) -test.adminCommand({shardcollection: name+'.fs.chunks', key: {files_id:1, n:1}}) -testGridFS(name) +print('\n\n\t**** sharded collection on files_id,n ****\n\n'); +name = 'sharded_files_id_n'; +test.adminCommand({enablesharding: name}); +test.adminCommand({shardcollection: name+'.fs.chunks', key: {files_id:1, n:1}}); +testGridFS(name); -test.stop() +test.stop(); diff --git a/jstests/tool/oplog1.js b/jstests/tool/oplog1.js index 765257c8d62..bbee73d7f80 100644 --- a/jstests/tool/oplog1.js +++ b/jstests/tool/oplog1.js @@ -7,7 +7,7 @@ t = new ToolTest( "oplog1" ); db = t.startDB(); -output = db.output +output = db.output; doc = { _id : 5 , x : 17 }; @@ -15,7 +15,7 @@ assert.commandWorked(db.createCollection(output.getName())); db.oplog.insert( { ts : new Timestamp() , "op" : "i" , "ns" : output.getFullName() , "o" : doc } ); -assert.eq( 0 , output.count() , "before" ) +assert.eq( 0 , output.count() , "before" ); t.runTool( "oplog" , "--oplogns" , db.getName() + ".oplog" , "--from" , "127.0.0.1:" + t.port , "-vv" ); diff --git a/jstests/tool/restorewithauth.js b/jstests/tool/restorewithauth.js index aec3d6859bc..6db4b0bf359 100644 --- a/jstests/tool/restorewithauth.js +++ b/jstests/tool/restorewithauth.js @@ -52,11 +52,11 @@ MongoRunner.stopMongod(conn); conn = MongoRunner.runMongod({auth: "", nojournal: "", bind_ip: "127.0.0.1"}); // admin user -var admin = conn.getDB( "admin" ) +var admin = conn.getDB( "admin" ); admin.createUser({user: "admin" , pwd: "admin", roles: jsTest.adminUserRoles}); admin.auth( "admin" , "admin" ); -var foo = conn.getDB( "foo" ) +var foo = conn.getDB( "foo" ); // make sure no collection with the same name exists collNames = foo.getCollectionNames(); diff --git a/jstests/tool/tool_replset.js b/jstests/tool/tool_replset.js index af5c7981482..90560c9ee2c 100644 --- a/jstests/tool/tool_replset.js +++ b/jstests/tool/tool_replset.js @@ -45,7 +45,7 @@ print("restore the db"); runMongoProgram("mongorestore", "--host", replSetConnString, "--dir", data); - print("db successfully restored, checking count") + print("db successfully restored, checking count"); var x = master.getDB("foo").getCollection("bar").count(); assert.eq(x, 100, "mongorestore should have successfully restored the collection"); @@ -68,7 +68,7 @@ var x = master.getDB("foo").getCollection("bar").count(); assert.eq(x, 100, "mongoimport should have successfully imported the collection"); var doc = {_id: 5, x: 17}; - var oplogEntry = {ts: new Timestamp(), "op": "i", "ns": "foo.bar", "o": doc, "v": NumberInt(2)} + var oplogEntry = {ts: new Timestamp(), "op": "i", "ns": "foo.bar", "o": doc, "v": NumberInt(2)}; assert.writeOK(master.getDB("local").oplog.rs.insert(oplogEntry)); assert.eq(100, master.getDB("foo").getCollection("bar").count(), "count before running " + @@ -77,14 +77,14 @@ runMongoProgram("mongooplog" , "--from", "127.0.0.1:" + replTest.ports[0], "--host", replSetConnString); - print("finished running mongooplog to replay the oplog") + print("finished running mongooplog to replay the oplog"); assert.eq(101, master.getDB("foo").getCollection("bar").count(), "count after running " + - "mongooplog was not 101 as expected") + "mongooplog was not 101 as expected"); - print("all tests successful, stopping replica set") + print("all tests successful, stopping replica set"); replTest.stopSet(); - print("replica set stopped, test complete") + print("replica set stopped, test complete"); }()); diff --git a/jstests/tool/tsv1.js b/jstests/tool/tsv1.js index 9317ce89c02..8395a77c711 100644 --- a/jstests/tool/tsv1.js +++ b/jstests/tool/tsv1.js @@ -1,6 +1,6 @@ // tsv1.js -t = new ToolTest( "tsv1" ) +t = new ToolTest( "tsv1" ); c = t.startDB( "foo" ); @@ -10,23 +10,23 @@ t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , assert.soon( "2 == c.count()" , "restore 2" ); a = c.find().sort( { a : 1 } ).toArray(); -delete a[0]._id -delete a[1]._id +delete a[0]._id; +delete a[1]._id; assert.docEq( { a : "a" , b : "b" , c : "c" , d: "d", e: "e"} , a[1] , "tsv parse 1" ); -assert.docEq( base , a[0] , "tsv parse 0" ) +assert.docEq( base , a[0] , "tsv parse 0" ); -c.drop() -assert.eq( 0 , c.count() , "after drop 2" ) +c.drop(); +assert.eq( 0 , c.count() , "after drop 2" ); -t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "--headerline" ) +t.runTool( "import" , "--file" , "jstests/tool/data/a.tsv" , "-d" , t.baseName , "-c" , "foo" , "--type" , "tsv" , "--headerline" ); assert.soon( "c.findOne()" , "no data after sleep" ); assert.eq( 1 , c.count() , "after restore 2" ); -x = c.findOne() +x = c.findOne(); delete x._id; -assert.docEq( base , x , "tsv parse 2" ) +assert.docEq( base , x , "tsv parse 2" ); -t.stop() +t.stop(); |