diff options
Diffstat (limited to 'jstests')
20 files changed, 242 insertions, 242 deletions
diff --git a/jstests/noPassthrough/command_line_parsing.js b/jstests/noPassthrough/command_line_parsing.js index 81acf2f8359..91aa2e5c077 100644 --- a/jstests/noPassthrough/command_line_parsing.js +++ b/jstests/noPassthrough/command_line_parsing.js @@ -5,7 +5,7 @@ var baseName = "jstests_slowNightly_command_line_parsing"; // test notablescan var m = MongoRunner.runMongod({notablescan: ""}); m.getDB( baseName ).getCollection( baseName ).save( {a:1} ); -assert.throws( function() { m.getDB( baseName ).getCollection( baseName ).find( {a:1} ).toArray() } ); +assert.throws( function() { m.getDB( baseName ).getCollection( baseName ).find( {a:1} ).toArray(); } ); // test config file var m2 = MongoRunner.runMongod({config: "jstests/libs/testconfig"}); diff --git a/jstests/noPassthrough/count_helper_read_preference.js b/jstests/noPassthrough/count_helper_read_preference.js index b888107ccd6..e2701139126 100644 --- a/jstests/noPassthrough/count_helper_read_preference.js +++ b/jstests/noPassthrough/count_helper_read_preference.js @@ -5,7 +5,7 @@ var commandsRan = []; // Create a new DB object backed by a mock connection. - function MockMongo() {}; + function MockMongo() {} MockMongo.prototype = Mongo.prototype; MockMongo.prototype.runCommand = function(db, cmd, opts) { commandsRan.push({db: db, cmd: cmd, opts:opts}); diff --git a/jstests/noPassthrough/exit_logging.js b/jstests/noPassthrough/exit_logging.js index 2c69cdc211b..33102667dcd 100644 --- a/jstests/noPassthrough/exit_logging.js +++ b/jstests/noPassthrough/exit_logging.js @@ -13,7 +13,7 @@ data: { how: crashHow } })); admin.shutdownServer(); - } + }; } function makeRegExMatchFn(pattern) { @@ -24,7 +24,7 @@ print("--- END LOG CONTENTS ---"); doassert("Log contents did not match " + pattern); } - } + }; } function testShutdownLogging(launcher, crashFn, matchFn, expectedExitCode) { @@ -46,7 +46,7 @@ const SIGABRT = 6; testShutdownLogging( launcher, - function (conn) { conn.getDB('admin').shutdownServer() }, + function (conn) { conn.getDB('admin').shutdownServer(); }, makeRegExMatchFn(/shutdown command received[\s\S]*dbexit:/), MongoRunner.EXIT_CLEAN); @@ -96,7 +96,7 @@ }); var mongosLauncher = { start: function (opts) { - var actualOpts = { configdb: st._configDB } + var actualOpts = { configdb: st._configDB }; Object.extend(actualOpts, opts); return MongoRunner.runMongos(actualOpts); }, diff --git a/jstests/noPassthrough/geo_full.js b/jstests/noPassthrough/geo_full.js index 60d18f96c68..2fe9d8d7438 100644 --- a/jstests/noPassthrough/geo_full.js +++ b/jstests/noPassthrough/geo_full.js @@ -17,8 +17,8 @@ // test fails, and hard-wiring that as the test number. // -load( "jstests/libs/slow_weekly_util.js" ) -testServer = new SlowWeeklyMongod( "geo_full" ) +load( "jstests/libs/slow_weekly_util.js" ); +testServer = new SlowWeeklyMongod( "geo_full" ); db = testServer.getDB( "test" ); var randEnvironment = function(){ @@ -32,78 +32,78 @@ var randEnvironment = function(){ bucketSize : 360 / ( 4 * 1024 * 1024 * 1024 ) }; } - var scales = [ 0.0001, 0.001, 0.01, 0.1, 1, 10, 100, 1000, 10000, 100000 ] - var scale = scales[ Math.floor( Random.rand() * scales.length ) ] - var offset = Random.rand() * scale + var scales = [ 0.0001, 0.001, 0.01, 0.1, 1, 10, 100, 1000, 10000, 100000 ]; + var scale = scales[ Math.floor( Random.rand() * scales.length ) ]; + var offset = Random.rand() * scale; - var max = Random.rand() * scale + offset - var min = - Random.rand() * scale + offset - var bits = Math.floor( Random.rand() * 32 ) + 1 - var bits = Math.floor( Random.rand() * 32 ) + 1 - var range = max - min - var bucketSize = range / ( 4 * 1024 * 1024 * 1024 ) + var max = Random.rand() * scale + offset; + var min = - Random.rand() * scale + offset; + var bits = Math.floor( Random.rand() * 32 ) + 1; + var bits = Math.floor( Random.rand() * 32 ) + 1; + var range = max - min; + var bucketSize = range / ( 4 * 1024 * 1024 * 1024 ); return { max : max, min : min, bits : bits, earth : false, - bucketSize : bucketSize } + bucketSize : bucketSize }; }; var randPoint = function( env, query ) { if( query && Random.rand() > 0.5 ) - return query.exact + return query.exact; if( env.earth ) - return [ Random.rand() * 360 - 180, Random.rand() * 180 - 90 ] + return [ Random.rand() * 360 - 180, Random.rand() * 180 - 90 ]; - var range = env.max - env.min + var range = env.max - env.min; return [ Random.rand() * range + env.min, Random.rand() * range + env.min ]; -} +}; var randLocType = function( loc, wrapIn ){ - return randLocTypes( [ loc ], wrapIn )[0] -} + return randLocTypes( [ loc ], wrapIn )[0]; +}; var randLocTypes = function( locs, wrapIn ) { - var rLocs = [] + var rLocs = []; for( var i = 0; i < locs.length; i++ ){ - rLocs.push( locs[i] ) + rLocs.push( locs[i] ); } if( wrapIn ){ - var wrappedLocs = [] + var wrappedLocs = []; for( var i = 0; i < rLocs.length; i++ ){ - var wrapper = {} - wrapper[wrapIn] = rLocs[i] - wrappedLocs.push( wrapper ) + var wrapper = {}; + wrapper[wrapIn] = rLocs[i]; + wrappedLocs.push( wrapper ); } - return wrappedLocs + return wrappedLocs; } - return rLocs + return rLocs; }; var randDataType = function() { - var scales = [ 1, 10, 100, 1000, 10000 ] - var docScale = scales[ Math.floor( Random.rand() * scales.length ) ] - var locScale = scales[ Math.floor( Random.rand() * scales.length ) ] + var scales = [ 1, 10, 100, 1000, 10000 ]; + var docScale = scales[ Math.floor( Random.rand() * scales.length ) ]; + var locScale = scales[ Math.floor( Random.rand() * scales.length ) ]; - var numDocs = 40000 - var maxLocs = 40000 + var numDocs = 40000; + var maxLocs = 40000; // Make sure we don't blow past our test resources while( numDocs * maxLocs > 40000 ){ - numDocs = Math.floor( Random.rand() * docScale ) + 1 - maxLocs = Math.floor( Random.rand() * locScale ) + 1 + numDocs = Math.floor( Random.rand() * docScale ) + 1; + maxLocs = Math.floor( Random.rand() * locScale ) + 1; } return { numDocs : numDocs, - maxLocs : maxLocs } + maxLocs : maxLocs }; }; function deg2rad(arg) { return arg * Math.PI / 180.0; } @@ -152,40 +152,40 @@ function pointIsOK(startPoint, radius, env) { } var randQuery = function( env ) { - var center = randPoint( env ) + var center = randPoint( env ); - var sphereRadius = -1 - var sphereCenter = null + var sphereRadius = -1; + var sphereCenter = null; if( env.earth ){ // Get a start point that doesn't require wrapping // TODO: Are we a bit too aggressive with wrapping issues? - var i + var i; for( i = 0; i < 5; i++ ){ - sphereRadius = Random.rand() * 45 * Math.PI / 180 - sphereCenter = randPoint( env ) + sphereRadius = Random.rand() * 45 * Math.PI / 180; + sphereCenter = randPoint( env ); if (pointIsOK(sphereCenter, sphereRadius, env)) { break; } } if( i == 5 ) sphereRadius = -1; } - var box = [ randPoint( env ), randPoint( env ) ] + var box = [ randPoint( env ), randPoint( env ) ]; var boxPoly = [[ box[0][0], box[0][1] ], [ box[0][0], box[1][1] ], [ box[1][0], box[1][1] ], - [ box[1][0], box[0][1] ] ] + [ box[1][0], box[0][1] ] ]; if( box[0][0] > box[1][0] ){ - var swap = box[0][0] - box[0][0] = box[1][0] - box[1][0] = swap + var swap = box[0][0]; + box[0][0] = box[1][0]; + box[1][0] = swap; } if( box[0][1] > box[1][1] ){ - var swap = box[0][1] - box[0][1] = box[1][1] - box[1][1] = swap + var swap = box[0][1]; + box[0][1] = box[1][1]; + box[1][1] = swap; } return { center : center, @@ -194,19 +194,19 @@ var randQuery = function( env ) { sphereCenter : sphereCenter, sphereRadius : sphereRadius, box : box, - boxPoly : boxPoly } + boxPoly : boxPoly }; }; var resultTypes = { "exact" : function( loc ){ - return query.exact[0] == loc[0] && query.exact[1] == loc[1] + return query.exact[0] == loc[0] && query.exact[1] == loc[1]; }, "center" : function( loc ){ - return Geo.distance( query.center, loc ) <= query.radius + return Geo.distance( query.center, loc ) <= query.radius; }, "box" : function( loc ){ return loc[0] >= query.box[0][0] && loc[0] <= query.box[1][0] && - loc[1] >= query.box[0][1] && loc[1] <= query.box[1][1] + loc[1] >= query.box[0][1] && loc[1] <= query.box[1][1]; }, "sphere" : function( loc ){ @@ -216,7 +216,7 @@ var resultTypes = { "poly" : function( loc ){ return loc[0] >= query.box[0][0] && loc[0] <= query.box[1][0] && loc[1] >= query.box[0][1] && loc[1] <= query.box[1][1]; -}} +}}; var queryResults = function( locs, query, results ){ @@ -227,55 +227,55 @@ var queryResults = function( locs, query, results ){ docsOut : 0, locsIn : 0, locsOut : 0 - } + }; } } - var indResults = {} + var indResults = {}; for( var type in resultTypes ){ indResults[type] = { docIn : false, locsIn : 0, locsOut : 0 - } + }; } for( var type in resultTypes ){ - var docIn = false + var docIn = false; for( var i = 0; i < locs.length; i++ ){ if( resultTypes[type]( locs[i] ) ){ - results[type].locsIn++ - indResults[type].locsIn++ - indResults[type].docIn = true + results[type].locsIn++; + indResults[type].locsIn++; + indResults[type].docIn = true; } else{ - results[type].locsOut++ - indResults[type].locsOut++ + results[type].locsOut++; + indResults[type].locsOut++; } } - if( indResults[type].docIn ) results[type].docsIn++ - else results[type].docsOut++ + if( indResults[type].docIn ) results[type].docsIn++; + else results[type].docsOut++; } - return indResults -} + return indResults; +}; var randQueryAdditions = function( doc, indResults ){ for( var type in resultTypes ){ - var choice = Random.rand() + var choice = Random.rand(); if( Random.rand() < 0.25 ) - doc[type] = ( indResults[type].docIn ? { docIn : "yes" } : { docIn : "no" } ) + doc[type] = ( indResults[type].docIn ? { docIn : "yes" } : { docIn : "no" } ); else if( Random.rand() < 0.5 ) - doc[type] = ( indResults[type].docIn ? { docIn : [ "yes" ] } : { docIn : [ "no" ] } ) + doc[type] = ( indResults[type].docIn ? { docIn : [ "yes" ] } : { docIn : [ "no" ] } ); else if( Random.rand() < 0.75 ) - doc[type] = ( indResults[type].docIn ? [ { docIn : "yes" } ] : [ { docIn : "no" } ] ) + doc[type] = ( indResults[type].docIn ? [ { docIn : "yes" } ] : [ { docIn : "no" } ] ); else doc[type] = ( indResults[type].docIn ? [{ docIn: [ "yes" ] }] : [{ docIn: [ "no" ] }]); } -} +}; var randIndexAdditions = function( indexDoc ){ @@ -283,9 +283,9 @@ var randIndexAdditions = function( indexDoc ){ if( Random.rand() < 0.5 ) continue; - var choice = Random.rand() + var choice = Random.rand(); if( Random.rand() < 0.5 ) - indexDoc[type] = 1 + indexDoc[type] = 1; else indexDoc[type + ".docIn"] = 1; } @@ -293,68 +293,68 @@ var randIndexAdditions = function( indexDoc ){ var randYesQuery = function(){ - var choice = Math.floor( Random.rand() * 7 ) + var choice = Math.floor( Random.rand() * 7 ); if( choice == 0 ) - return { $ne : "no" } + return { $ne : "no" }; else if( choice == 1 ) - return "yes" + return "yes"; else if( choice == 2 ) - return /^yes/ + return /^yes/; else if( choice == 3 ) - return { $in : [ "good", "yes", "ok" ] } + return { $in : [ "good", "yes", "ok" ] }; else if( choice == 4 ) - return { $exists : true } + return { $exists : true }; else if( choice == 5 ) - return { $nin : [ "bad", "no", "not ok" ] } + return { $nin : [ "bad", "no", "not ok" ] }; else if( choice == 6 ) - return { $not : /^no/ } -} + return { $not : /^no/ }; +}; var locArray = function( loc ){ - if( loc.x ) return [ loc.x, loc.y ] - if( ! loc.length ) return [ loc[0], loc[1] ] - return loc -} + if( loc.x ) return [ loc.x, loc.y ]; + if( ! loc.length ) return [ loc[0], loc[1] ]; + return loc; +}; var locsArray = function( locs ){ if( locs.loc ){ - arr = [] - for( var i = 0; i < locs.loc.length; i++ ) arr.push( locArray( locs.loc[i] ) ) - return arr + arr = []; + for( var i = 0; i < locs.loc.length; i++ ) arr.push( locArray( locs.loc[i] ) ); + return arr; } else{ - arr = [] - for( var i = 0; i < locs.length; i++ ) arr.push( locArray( locs[i].loc ) ) - return arr + arr = []; + for( var i = 0; i < locs.length; i++ ) arr.push( locArray( locs[i].loc ) ); + return arr; } -} +}; var minBoxSize = function( env, box ){ - return env.bucketSize * Math.pow( 2, minBucketScale( env, box ) ) -} + return env.bucketSize * Math.pow( 2, minBucketScale( env, box ) ); +}; var minBucketScale = function( env, box ){ if( box.length && box[0].length ) - box = [ box[0][0] - box[1][0], box[0][1] - box[1][1] ] + box = [ box[0][0] - box[1][0], box[0][1] - box[1][1] ]; if( box.length ) - box = Math.max( box[0], box[1] ) + box = Math.max( box[0], box[1] ); - print( box ) - print( env.bucketSize ) + print( box ); + print( env.bucketSize ); - return Math.ceil( Math.log( box / env.bucketSize ) / Math.log( 2 ) ) + return Math.ceil( Math.log( box / env.bucketSize ) / Math.log( 2 ) ); -} +}; // TODO: Add spherical $uniqueDocs tests -var numTests = 100 +var numTests = 100; // Our seed will change every time this is run, but // each individual test will be reproducible given // that seed and test number -var seed = new Date().getTime() +var seed = new Date().getTime(); //seed = 175 + 288 + 12 for ( var test = 0; test < numTests; test++ ) { @@ -362,43 +362,43 @@ for ( var test = 0; test < numTests; test++ ) { Random.srand( seed + test ); //Random.srand( 42240 ) //Random.srand( 7344 ) - var t = db.testAllGeo - t.drop() + var t = db.testAllGeo; + t.drop(); - print( "Generating test environment #" + test ) - var env = randEnvironment() + print( "Generating test environment #" + test ); + var env = randEnvironment(); //env.bits = 11 - var query = randQuery( env ) - var data = randDataType() + var query = randQuery( env ); + var data = randDataType(); //data.numDocs = 5; data.maxLocs = 1; - var paddingSize = Math.floor( Random.rand() * 10 + 1 ) - var results = {} - var totalPoints = 0 - print( "Calculating target results for " + data.numDocs + " docs with max " + data.maxLocs + " locs " ) + var paddingSize = Math.floor( Random.rand() * 10 + 1 ); + var results = {}; + var totalPoints = 0; + print( "Calculating target results for " + data.numDocs + " docs with max " + data.maxLocs + " locs " ); var bulk = t.initializeUnorderedBulkOp(); for ( var i = 0; i < data.numDocs; i++ ) { - var numLocs = Math.floor( Random.rand() * data.maxLocs + 1 ) - totalPoints += numLocs + var numLocs = Math.floor( Random.rand() * data.maxLocs + 1 ); + totalPoints += numLocs; - var multiPoint = [] + var multiPoint = []; for ( var p = 0; p < numLocs; p++ ) { - var point = randPoint( env, query ) - multiPoint.push( point ) + var point = randPoint( env, query ); + multiPoint.push( point ); } - var indResults = queryResults( multiPoint, query, results ) + var indResults = queryResults( multiPoint, query, results ); - var doc + var doc; // Nest the keys differently if( Random.rand() < 0.5 ) - doc = { locs : { loc : randLocTypes( multiPoint ) } } + doc = { locs : { loc : randLocTypes( multiPoint ) } }; else - doc = { locs : randLocTypes( multiPoint, "loc" ) } + doc = { locs : randLocTypes( multiPoint, "loc" ) }; - randQueryAdditions( doc, indResults ) + randQueryAdditions( doc, indResults ); - doc._id = i + doc._id = i; bulk.insert( doc ); } assert.writeOK(bulk.execute()); @@ -408,10 +408,10 @@ for ( var test = 0; test < numTests; test++ ) { t.ensureIndex( indexDoc, env ); assert.isnull( db.getLastError() ); - var padding = "x" - for( var i = 0; i < paddingSize; i++ ) padding = padding + padding + var padding = "x"; + for( var i = 0; i < paddingSize; i++ ) padding = padding + padding; - print( padding ) + print( padding ); printjson( { seed : seed, test: test, @@ -419,18 +419,18 @@ for ( var test = 0; test < numTests; test++ ) { query : query, data : data, results : results, - paddingSize : paddingSize } ) + paddingSize : paddingSize } ); // exact - print( "Exact query..." ) - assert.eq( results.exact.docsIn, t.find( { "locs.loc" : randLocType( query.exact ), "exact.docIn" : randYesQuery() } ).count() ) + print( "Exact query..." ); + assert.eq( results.exact.docsIn, t.find( { "locs.loc" : randLocType( query.exact ), "exact.docIn" : randYesQuery() } ).count() ); // $center - print( "Center query..." ) - print( "Min box : " + minBoxSize( env, query.radius ) ) - assert.eq( results.center.docsIn, t.find( { "locs.loc" : { $within : { $center : [ query.center, query.radius ], $uniqueDocs : 1 } }, "center.docIn" : randYesQuery() } ).count() ) + print( "Center query..." ); + print( "Min box : " + minBoxSize( env, query.radius ) ); + assert.eq( results.center.docsIn, t.find( { "locs.loc" : { $within : { $center : [ query.center, query.radius ], $uniqueDocs : 1 } }, "center.docIn" : randYesQuery() } ).count() ); - print( "Center query update..." ) + print( "Center query update..." ); var res = t.update({ "locs.loc": { $within: { $center: [ query.center, query.radius ], $uniqueDocs: true }}, "center.docIn": randYesQuery() }, @@ -439,11 +439,11 @@ for ( var test = 0; test < numTests; test++ ) { if( query.sphereRadius >= 0 ){ - print( "Center sphere query...") + print( "Center sphere query..."); // $centerSphere - assert.eq( results.sphere.docsIn, t.find( { "locs.loc" : { $within : { $centerSphere : [ query.sphereCenter, query.sphereRadius ] } }, "sphere.docIn" : randYesQuery() } ).count() ) + assert.eq( results.sphere.docsIn, t.find( { "locs.loc" : { $within : { $centerSphere : [ query.sphereCenter, query.sphereRadius ] } }, "sphere.docIn" : randYesQuery() } ).count() ); - print( "Center sphere query update..." ) + print( "Center sphere query update..." ); res = t.update({ "locs.loc": { $within: { $centerSphere: [ query.sphereCenter, query.sphereRadius ], $uniqueDocs: true } }, @@ -453,34 +453,34 @@ for ( var test = 0; test < numTests; test++ ) { } // $box - print( "Box query..." ) - assert.eq( results.box.docsIn, t.find( { "locs.loc" : { $within : { $box : query.box, $uniqueDocs : true } }, "box.docIn" : randYesQuery() } ).count() ) + print( "Box query..." ); + assert.eq( results.box.docsIn, t.find( { "locs.loc" : { $within : { $box : query.box, $uniqueDocs : true } }, "box.docIn" : randYesQuery() } ).count() ); // $polygon - print( "Polygon query..." ) - assert.eq( results.poly.docsIn, t.find( { "locs.loc" : { $within : { $polygon : query.boxPoly } }, "poly.docIn" : randYesQuery() } ).count() ) + print( "Polygon query..." ); + assert.eq( results.poly.docsIn, t.find( { "locs.loc" : { $within : { $polygon : query.boxPoly } }, "poly.docIn" : randYesQuery() } ).count() ); var defaultDocLimit = 100; // $near - print( "Near query..." ) + print( "Near query..." ); assert.eq( results.center.docsIn, t.find( { "locs.loc" : { $near : query.center, $maxDistance : query.radius } } ).count( true ), "Near query: center: " + query.center + "; radius: " + query.radius + "; docs: " + results.center.docsIn + - "; locs: " + results.center.locsIn ) + "; locs: " + results.center.locsIn ); if( query.sphereRadius >= 0 ){ - print( "Near sphere query...") + print( "Near sphere query..."); // $centerSphere assert.eq( results.sphere.docsIn, t.find( { "locs.loc" : { $nearSphere : query.sphereCenter, $maxDistance : query.sphereRadius } } ).count( true ), "Near sphere query: sphere center: " + query.sphereCenter + "; radius: " + query.sphereRadius + - "; docs: " + results.sphere.docsIn + "; locs: " + results.sphere.locsIn ) + "; docs: " + results.sphere.docsIn + "; locs: " + results.sphere.locsIn ); } // geoNear @@ -488,14 +488,14 @@ for ( var test = 0; test < numTests; test++ ) { if( data.maxLocs < defaultDocLimit ){ // GeoNear query - print( "GeoNear query..." ) + print( "GeoNear query..." ); // GeoNear command has a default doc limit 100. assert.eq( Math.min( defaultDocLimit, results.center.docsIn ), t.getDB().runCommand( { geoNear : "testAllGeo", near : query.center, maxDistance : query.radius } ).results.length, "GeoNear query: center: " + query.center + "; radius: " + query.radius + - "; docs: " + results.center.docsIn + "; locs: " + results.center.locsIn ) + "; docs: " + results.center.docsIn + "; locs: " + results.center.locsIn ); var num = Math.min( 2* defaultDocLimit, 2 * results.center.docsIn); @@ -505,41 +505,41 @@ for ( var test = 0; test < numTests; test++ ) { near : query.center, maxDistance : query.radius , includeLocs : true, - num : num } ).results + num : num } ).results; assert.eq( Math.min( num, results.center.docsIn ), output.length, "GeoNear query with limit of " + num + ": center: " + query.center + "; radius: " + query.radius + - "; docs: " + results.center.docsIn + "; locs: " + results.center.locsIn ) + "; docs: " + results.center.docsIn + "; locs: " + results.center.locsIn ); var distance = 0; for ( var i = 0; i < output.length; i++ ) { - var retDistance = output[i].dis - var retLoc = locArray( output[i].loc ) + var retDistance = output[i].dis; + var retLoc = locArray( output[i].loc ); - var arrLocs = locsArray( output[i].obj.locs ) + var arrLocs = locsArray( output[i].obj.locs ); - assert.contains( retLoc, arrLocs ) + assert.contains( retLoc, arrLocs ); - var distInObj = false + var distInObj = false; for ( var j = 0; j < arrLocs.length && distInObj == false; j++ ) { - var newDistance = Geo.distance( locArray( query.center ) , arrLocs[j] ) - distInObj = ( newDistance >= retDistance - 0.0001 && newDistance <= retDistance + 0.0001 ) + var newDistance = Geo.distance( locArray( query.center ) , arrLocs[j] ); + distInObj = ( newDistance >= retDistance - 0.0001 && newDistance <= retDistance + 0.0001 ); } - assert( distInObj ) - assert.between( retDistance - 0.0001 , Geo.distance( locArray( query.center ), retLoc ), retDistance + 0.0001 ) - assert.lte( retDistance, query.radius ) - assert.gte( retDistance, distance ) - distance = retDistance + assert( distInObj ); + assert.between( retDistance - 0.0001 , Geo.distance( locArray( query.center ), retLoc ), retDistance + 0.0001 ); + assert.lte( retDistance, query.radius ); + assert.gte( retDistance, distance ); + distance = retDistance; } } // $polygon - print( "Polygon remove..." ) + print( "Polygon remove..." ); res = t.remove({ "locs.loc": { $within: { $polygon: query.boxPoly }}, "poly.docIn": randYesQuery() }); assert.eq( results.poly.docsIn, res.nRemoved ); diff --git a/jstests/noPassthrough/geo_mnypts_plus_fields.js b/jstests/noPassthrough/geo_mnypts_plus_fields.js index 7c5e23d4b97..7058d2795c1 100644 --- a/jstests/noPassthrough/geo_mnypts_plus_fields.js +++ b/jstests/noPassthrough/geo_mnypts_plus_fields.js @@ -1,42 +1,42 @@ // Test sanity of geo queries with a lot of points -load( "jstests/libs/slow_weekly_util.js" ) -testServer = new SlowWeeklyMongod( "geo_mnypts_plus_fields" ) +load( "jstests/libs/slow_weekly_util.js" ); +testServer = new SlowWeeklyMongod( "geo_mnypts_plus_fields" ); db = testServer.getDB( "test" ); var maxFields = 3; for( var fields = 1; fields < maxFields; fields++ ){ - var coll = db.testMnyPts - coll.drop() + var coll = db.testMnyPts; + coll.drop(); - var totalPts = 500 * 1000 + var totalPts = 500 * 1000; var bulk = coll.initializeUnorderedBulkOp(); // Add points in a 100x100 grid for( var i = 0; i < totalPts; i++ ){ - var ii = i % 10000 + var ii = i % 10000; - var doc = { loc : [ ii % 100, Math.floor( ii / 100 ) ] } + var doc = { loc : [ ii % 100, Math.floor( ii / 100 ) ] }; // Add fields with different kinds of data for( var j = 0; j < fields; j++ ){ - var field = null + var field = null; if( j % 3 == 0 ){ // Make half the points not searchable - field = "abcdefg" + ( i % 2 == 0 ? "h" : "" ) + field = "abcdefg" + ( i % 2 == 0 ? "h" : "" ); } else if( j % 3 == 1 ){ - field = new Date() + field = new Date(); } else{ - field = true + field = true; } - doc[ "field" + j ] = field + doc[ "field" + j ] = field; } bulk.insert( doc ); @@ -44,61 +44,61 @@ for( var fields = 1; fields < maxFields; fields++ ){ assert.writeOK(bulk.execute()); // Create the query for the additional fields - queryFields = {} + queryFields = {}; for( var j = 0; j < fields; j++ ){ - var field = null + var field = null; if( j % 3 == 0 ){ - field = "abcdefg" + field = "abcdefg"; } else if( j % 3 == 1 ){ - field = { $lte : new Date() } + field = { $lte : new Date() }; } else{ - field = true + field = true; } - queryFields[ "field" + j ] = field + queryFields[ "field" + j ] = field; } - coll.ensureIndex({ loc : "2d" }) + coll.ensureIndex({ loc : "2d" }); // Check that quarter of points in each quadrant for( var i = 0; i < 4; i++ ){ - var x = i % 2 - var y = Math.floor( i / 2 ) + var x = i % 2; + var y = Math.floor( i / 2 ); - var box = [[0, 0], [49, 49]] - box[0][0] += ( x == 1 ? 50 : 0 ) - box[1][0] += ( x == 1 ? 50 : 0 ) - box[0][1] += ( y == 1 ? 50 : 0 ) - box[1][1] += ( y == 1 ? 50 : 0 ) + var box = [[0, 0], [49, 49]]; + box[0][0] += ( x == 1 ? 50 : 0 ); + box[1][0] += ( x == 1 ? 50 : 0 ); + box[0][1] += ( y == 1 ? 50 : 0 ); + box[1][1] += ( y == 1 ? 50 : 0 ); // Now only half of each result comes back - assert.eq( totalPts / ( 4 * 2 ), coll.find(Object.extend( { loc : { $within : { $box : box } } }, queryFields ) ).count() ) - assert.eq( totalPts / ( 4 * 2 ), coll.find(Object.extend( { loc : { $within : { $box : box } } }, queryFields ) ).itcount() ) + assert.eq( totalPts / ( 4 * 2 ), coll.find(Object.extend( { loc : { $within : { $box : box } } }, queryFields ) ).count() ); + assert.eq( totalPts / ( 4 * 2 ), coll.find(Object.extend( { loc : { $within : { $box : box } } }, queryFields ) ).itcount() ); } // Check that half of points in each half for( var i = 0; i < 2; i++ ){ - var box = [[0, 0], [49, 99]] - box[0][0] += ( i == 1 ? 50 : 0 ) - box[1][0] += ( i == 1 ? 50 : 0 ) + var box = [[0, 0], [49, 99]]; + box[0][0] += ( i == 1 ? 50 : 0 ); + box[1][0] += ( i == 1 ? 50 : 0 ); - assert.eq( totalPts / ( 2 * 2 ), coll.find(Object.extend( { loc : { $within : { $box : box } } }, queryFields ) ).count() ) - assert.eq( totalPts / ( 2 * 2 ), coll.find(Object.extend( { loc : { $within : { $box : box } } }, queryFields ) ).itcount() ) + assert.eq( totalPts / ( 2 * 2 ), coll.find(Object.extend( { loc : { $within : { $box : box } } }, queryFields ) ).count() ); + assert.eq( totalPts / ( 2 * 2 ), coll.find(Object.extend( { loc : { $within : { $box : box } } }, queryFields ) ).itcount() ); } // Check that all but corner set of points in radius - var circle = [[0, 0], (100 - 1) * Math.sqrt( 2 ) - 0.25 ] + var circle = [[0, 0], (100 - 1) * Math.sqrt( 2 ) - 0.25 ]; // All [99,x] pts are field0 : "abcdefg" - assert.eq( totalPts / 2 - totalPts / ( 100 * 100 ), coll.find(Object.extend( { loc : { $within : { $center : circle } } }, queryFields ) ).count() ) - assert.eq( totalPts / 2 - totalPts / ( 100 * 100 ), coll.find(Object.extend( { loc : { $within : { $center : circle } } }, queryFields ) ).itcount() ) + assert.eq( totalPts / 2 - totalPts / ( 100 * 100 ), coll.find(Object.extend( { loc : { $within : { $center : circle } } }, queryFields ) ).count() ); + assert.eq( totalPts / 2 - totalPts / ( 100 * 100 ), coll.find(Object.extend( { loc : { $within : { $center : circle } } }, queryFields ) ).itcount() ); } diff --git a/jstests/noPassthrough/geo_near_random1.js b/jstests/noPassthrough/geo_near_random1.js index b401e6447cd..77f7fdb28bf 100644 --- a/jstests/noPassthrough/geo_near_random1.js +++ b/jstests/noPassthrough/geo_near_random1.js @@ -1,8 +1,8 @@ // this tests all points using $near load("jstests/libs/geo_near_random.js"); -load( "jstests/libs/slow_weekly_util.js" ) +load( "jstests/libs/slow_weekly_util.js" ); -testServer = new SlowWeeklyMongod( "geo_near_random1" ) +testServer = new SlowWeeklyMongod( "geo_near_random1" ); db = testServer.getDB( "test" ); diff --git a/jstests/noPassthrough/geo_near_random2.js b/jstests/noPassthrough/geo_near_random2.js index 94f874812d6..030a6b8c4ac 100644 --- a/jstests/noPassthrough/geo_near_random2.js +++ b/jstests/noPassthrough/geo_near_random2.js @@ -1,8 +1,8 @@ // this tests 1% of all points using $near and $nearSphere load("jstests/libs/geo_near_random.js"); -load( "jstests/libs/slow_weekly_util.js" ) +load( "jstests/libs/slow_weekly_util.js" ); -testServer = new SlowWeeklyMongod( "geo_near_random2" ) +testServer = new SlowWeeklyMongod( "geo_near_random2" ); db = testServer.getDB( "test" ); var test = new GeoNearRandomTest("weekly.geo_near_random2"); @@ -16,7 +16,7 @@ test.testPt(test.mkPt(), opts); test.testPt(test.mkPt(), opts); test.testPt(test.mkPt(), opts); -opts.sphere = 1 +opts.sphere = 1; test.testPt([0,0], opts); test.testPt(test.mkPt(0.8), opts); test.testPt(test.mkPt(0.8), opts); diff --git a/jstests/noPassthrough/initial_sync_cloner_dups.js b/jstests/noPassthrough/initial_sync_cloner_dups.js index d6d74744aa1..f43c2cf43c4 100644 --- a/jstests/noPassthrough/initial_sync_cloner_dups.js +++ b/jstests/noPassthrough/initial_sync_cloner_dups.js @@ -23,7 +23,7 @@ var contains = function(logLines, func) { } } return false; -} +}; var replTest = new ReplSetTest({name: 'cloner', nodes: 3, oplogSize: 150 /*~1.5x data size*/}); replTest.startSet(); @@ -70,7 +70,7 @@ var insertAndRemove = function(host) { } jsTestLog("finished bg writes on " + host); -} +}; var worker = new ScopedThread(insertAndRemove, primary.host); worker.start(); @@ -104,7 +104,7 @@ if (!droppedDups) { jsTestLog("Warning: Test did not trigger duplicate documents, this run will be a false negative"); } -jsTestLog("stopping writes and waiting for replica set to coalesce") +jsTestLog("stopping writes and waiting for replica set to coalesce"); primary.getDB('test').stop.insert({}); worker.join(); //make sure all secondaries are caught up, after init sync diff --git a/jstests/noPassthrough/lock_stats.js b/jstests/noPassthrough/lock_stats.js index 2445b333435..d32cd9f3af1 100644 --- a/jstests/noPassthrough/lock_stats.js +++ b/jstests/noPassthrough/lock_stats.js @@ -57,4 +57,4 @@ var db = conn.getDB('test'); printjson([1, 10, 100, 500, 1000, 1500].map(testBlockTime)); MongoRunner.stopMongod(conn); -})() +})(); diff --git a/jstests/noPassthrough/minvalid.js b/jstests/noPassthrough/minvalid.js index cbaf26e1b71..8686152ec56 100644 --- a/jstests/noPassthrough/minvalid.js +++ b/jstests/noPassthrough/minvalid.js @@ -2,7 +2,7 @@ // this tests that members will stay in RECOVERING state on startup if they have not reached // their stored minvalid -var name = "minvalid" +var name = "minvalid"; var replTest = new ReplSetTest({name: name, nodes: 1, oplogSize:1}); var host = getHostName(); diff --git a/jstests/noPassthrough/minvalid2.js b/jstests/noPassthrough/minvalid2.js index c12d735cccf..148e6f4fc4d 100644 --- a/jstests/noPassthrough/minvalid2.js +++ b/jstests/noPassthrough/minvalid2.js @@ -19,7 +19,7 @@ */ print("1. make 3-member set w/arb (2)"); -var name = "minvalid" +var name = "minvalid"; var replTest = new ReplSetTest({name: name, nodes: 3, oplogSize:1}); var host = getHostName(); @@ -61,7 +61,7 @@ replTest.stop(masterId); print("6: start up slave"); replTest.restart(slaveId); -print("7: writes on former slave") +print("7: writes on former slave"); master = replTest.getPrimary(); mdb1 = master.getDB("foo"); mdb1.foo.save({a:1002}); diff --git a/jstests/noPassthrough/ns1.js b/jstests/noPassthrough/ns1.js index 0eb07935bbc..ae5c6efd2f5 100644 --- a/jstests/noPassthrough/ns1.js +++ b/jstests/noPassthrough/ns1.js @@ -1,7 +1,7 @@ -load( "jstests/libs/slow_weekly_util.js" ) +load( "jstests/libs/slow_weekly_util.js" ); -testServer = new SlowWeeklyMongod( "ns1" ) +testServer = new SlowWeeklyMongod( "ns1" ); mydb = testServer.getDB( "test_ns1" ); check = function( n , isNew ){ @@ -13,7 +13,7 @@ check = function( n , isNew ){ assert.eq( 1 , coll.count() , "pop b: " + n ); assert.eq( n , coll.findOne()._id , "pop c: " + n ); return coll; -} +}; max = 0; @@ -46,6 +46,6 @@ for ( i=0; i<its; i++ ){ print( i + "/" + its ); } } -print( "yay" ) +print( "yay" ); mydb.dropDatabase(); diff --git a/jstests/noPassthrough/query_yield1.js b/jstests/noPassthrough/query_yield1.js index 7c168c1e208..50b7d4bf60d 100644 --- a/jstests/noPassthrough/query_yield1.js +++ b/jstests/noPassthrough/query_yield1.js @@ -1,16 +1,16 @@ if (0) { // Test disabled until SERVER-8579 is finished. Reminder ticket: SERVER-8342 -load( "jstests/libs/slow_weekly_util.js" ) -testServer = new SlowWeeklyMongod( "query_yield1" ) +load( "jstests/libs/slow_weekly_util.js" ); +testServer = new SlowWeeklyMongod( "query_yield1" ); db = testServer.getDB( "test" ); t = db.query_yield1; -t.drop() +t.drop(); N = 20000; i = 0; -q = function(){ var x=this.n; for ( var i=0; i<250; i++ ){ x = x * 2; } return false; } +q = function(){ var x=this.n; for ( var i=0; i<250; i++ ){ x = x * 2; } return false; }; while ( true ){ function fill(){ @@ -45,7 +45,7 @@ while ( true ){ assert.eq( 0, db.currentOp().inprog.length , "setup broken" ); -join = startParallelShell( "print( 0 == db.query_yield1.find( function(){ var x=this.n; for ( var i=0; i<500; i++ ){ x = x * 2; } return false; } ).itcount() ); " ) +join = startParallelShell( "print( 0 == db.query_yield1.find( function(){ var x=this.n; for ( var i=0; i<500; i++ ){ x = x * 2; } return false; } ).itcount() ); " ); assert.soon( function(){ @@ -61,7 +61,7 @@ start = new Date(); biggestMe = 0; while ( ( (new Date()).getTime() - start ) < ( time * 2 ) ){ var me = Date.timeFunc( function(){ t.insert( { x : 1 } ); }); - var x = db.currentOp() + var x = db.currentOp(); if ( num++ == 0 ){ assert.eq( 1 , x.inprog.length , "nothing in prog" ); @@ -81,7 +81,7 @@ while ( ( (new Date()).getTime() - start ) < ( time * 2 ) ){ join(); -var x = db.currentOp() +var x = db.currentOp(); assert.eq( 0 , x.inprog.length , "weird 2" ); testServer.stop(); diff --git a/jstests/noPassthrough/query_yield2.js b/jstests/noPassthrough/query_yield2.js index e1fc2a7ed75..5997fa15ec8 100644 --- a/jstests/noPassthrough/query_yield2.js +++ b/jstests/noPassthrough/query_yield2.js @@ -20,7 +20,7 @@ t.drop(); N = 200; i = 0; -q = function() { var x=this.n; for ( var i=0; i<25000; i++ ) { x = x * 2; } return false; } +q = function() { var x=this.n; for ( var i=0; i<25000; i++ ) { x = x * 2; } return false; }; print( "Shell ==== Creating test.query_yield2 collection ..." ); print( "Shell ==== Adding documents until a time-wasting query takes over 2 seconds to complete" ); @@ -68,7 +68,7 @@ if ( len ) { print( "Shell ==== The test is working so far: db.currentOp().inprog.length is " + len ); print( "Shell ==== Starting parallel shell to test if slow query will yield to write" ); -join = startParallelShell( "print( 0 == db.query_yield2.find( function(){ var x=this.n; for ( var i=0; i<50000; i++ ){ x = x * 2; } return false; } ).itcount() ); " ) +join = startParallelShell( "print( 0 == db.query_yield2.find( function(){ var x=this.n; for ( var i=0; i<50000; i++ ){ x = x * 2; } return false; } ).itcount() ); " ); print( "Shell ==== Waiting until db.currentOp().inprog becomes non-empty" ); assert.soon( diff --git a/jstests/noPassthrough/read_majority.js b/jstests/noPassthrough/read_majority.js index 9c35711f7c6..b1f1a5801f8 100644 --- a/jstests/noPassthrough/read_majority.js +++ b/jstests/noPassthrough/read_majority.js @@ -107,7 +107,7 @@ assert.eq(getReadMajorityAggCursor().itcount(), 10); getReadMajorityAggCursor().forEach(function(doc) { // Note: agg uses internal batching so can't reliably test flipping snapshot. However, it uses // the same mechanism as find, so if one works, both should. - assert.eq(doc.version, 3) + assert.eq(doc.version, 3); }); assert.eq(getReadMajorityCursor().itcount(), 10); diff --git a/jstests/noPassthrough/repair2.js b/jstests/noPassthrough/repair2.js index 9db45146a0b..ec3e1f8a299 100644 --- a/jstests/noPassthrough/repair2.js +++ b/jstests/noPassthrough/repair2.js @@ -2,8 +2,8 @@ baseName = "jstests_repair2"; -load( "jstests/libs/slow_weekly_util.js" ) -testServer = new SlowWeeklyMongod( baseName ) +load( "jstests/libs/slow_weekly_util.js" ); +testServer = new SlowWeeklyMongod( baseName ); t = testServer.getDB( baseName )[ baseName ]; t.drop(); diff --git a/jstests/noPassthrough/update_server-5552.js b/jstests/noPassthrough/update_server-5552.js index c164ba67694..9459cfcc95e 100644 --- a/jstests/noPassthrough/update_server-5552.js +++ b/jstests/noPassthrough/update_server-5552.js @@ -1,9 +1,9 @@ -load( "jstests/libs/slow_weekly_util.js" ) -testServer = new SlowWeeklyMongod( "update_server-5552" ) +load( "jstests/libs/slow_weekly_util.js" ); +testServer = new SlowWeeklyMongod( "update_server-5552" ); db = testServer.getDB( "test" ); t = db.foo; -t.drop() +t.drop(); N = 10000; @@ -13,7 +13,7 @@ for ( i=0; i<N; i++ ) { } assert.writeOK(bulk.execute()); -join = startParallelShell( "while( db.foo.findOne( { _id : 0 } ).x == 1 ); db.foo.ensureIndex( { x : 1 } );" ) +join = startParallelShell( "while( db.foo.findOne( { _id : 0 } ).x == 1 ); db.foo.ensureIndex( { x : 1 } );" ); t.update( { $where : function(){ sleep(1); return true; } } , { $set : { x : 5 } } , false , true ); db.getLastError(); diff --git a/jstests/noPassthrough/write_local.js b/jstests/noPassthrough/write_local.js index 66b50f968ca..2f9cedba080 100644 --- a/jstests/noPassthrough/write_local.js +++ b/jstests/noPassthrough/write_local.js @@ -1,6 +1,6 @@ // SERVER-22011: Deadlock in ticket distribution (function() { - 'use strict' + 'use strict'; // Limit concurrent WiredTiger transactions to maximize locking issues, harmless for other SEs. var options = { verbose: 1 }; @@ -37,11 +37,11 @@ for (var i = 0; i < 1000; i++) { print(local.stats().objects); sleep(1); - }; + } // Wait for parallel shells to terminate and stop our replset. shells.forEach((function(f) { f(); })); replTest.stopSet(); -}()) +}()); diff --git a/jstests/noPassthrough/wt_nojournal_fsync.js b/jstests/noPassthrough/wt_nojournal_fsync.js index 1e2f84b7e36..a6e4f5b07ff 100644 --- a/jstests/noPassthrough/wt_nojournal_fsync.js +++ b/jstests/noPassthrough/wt_nojournal_fsync.js @@ -14,7 +14,7 @@ function writeDataAndRestart(doFsync) { } if (doFsync) { - jsTestLog("run fsync on the node") + jsTestLog("run fsync on the node"); assert.commandWorked(conn.getDB("admin").runCommand({fsync : 1})); } diff --git a/jstests/noPassthrough/wt_nojournal_repl.js b/jstests/noPassthrough/wt_nojournal_repl.js index 71cf78c5afc..5525db9526b 100644 --- a/jstests/noPassthrough/wt_nojournal_repl.js +++ b/jstests/noPassthrough/wt_nojournal_repl.js @@ -23,7 +23,7 @@ var contains = function(logLines, func) { } } return false; -} +}; // This test can only be run if the storageEngine is wiredTiger if (jsTest.options().storageEngine && jsTest.options().storageEngine !== "wiredTiger") { @@ -50,7 +50,7 @@ else { replTest.awaitReplication(); assert.eq(secondary1.getDB("test").foo.count(), 100); - jsTestLog("run fsync on the secondary to ensure it remains after restart") + jsTestLog("run fsync on the secondary to ensure it remains after restart"); assert.commandWorked(secondary1.getDB("admin").runCommand({fsync : 1})); jsTestLog("kill -9 secondary 1"); |