diff options
Diffstat (limited to 'jstests')
-rw-r--r-- | jstests/aggregation/bugs/cond.js | 138 | ||||
-rw-r--r-- | jstests/aggregation/bugs/firstlast.js | 225 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server10176.js | 2 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server11675.js | 182 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server12015.js | 34 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server17943.js | 10 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server4588.js | 24 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server4899.js | 16 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server5012.js | 19 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server6127.js | 57 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server6147.js | 84 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server6185.js | 25 | ||||
-rw-r--r-- | jstests/aggregation/bugs/server6779.js | 27 | ||||
-rw-r--r-- | jstests/aggregation/bugs/sort_arrays.js | 9 | ||||
-rw-r--r-- | jstests/aggregation/expressions/date_to_parts.js | 50 | ||||
-rw-r--r-- | jstests/aggregation/expressions/size.js | 23 | ||||
-rw-r--r-- | jstests/aggregation/testall.js | 1773 |
17 files changed, 1404 insertions, 1294 deletions
diff --git a/jstests/aggregation/bugs/cond.js b/jstests/aggregation/bugs/cond.js index c48c6b724f0..313316f4418 100644 --- a/jstests/aggregation/bugs/cond.js +++ b/jstests/aggregation/bugs/cond.js @@ -1,82 +1,88 @@ // $cond returns the evaluated second argument if the first evaluates to true but the evaluated // third argument if the first evaluates to false. -load('jstests/aggregation/extras/utils.js'); +(function() { + "use strict"; + load('jstests/aggregation/extras/utils.js'); -t = db.jstests_aggregation_cond; -t.drop(); + const coll = db.jstests_aggregation_cond; + coll.drop(); -t.save({}); + coll.save({}); -function assertError(expectedErrorCode, condSpec) { - assertErrorCode(t, {$project: {a: {$cond: condSpec}}}, expectedErrorCode); -} + function assertError(expectedErrorCode, condSpec) { + assertErrorCode(coll, {$project: {a: {$cond: condSpec}}}, expectedErrorCode); + } -function assertResult(expectedResult, arg) { - assert.eq(expectedResult, t.aggregate({$project: {a: {$cond: arg}}}).toArray()[0].a); -} + function assertResult(expectedResult, arg) { + assert.eq(expectedResult, coll.aggregate({$project: {a: {$cond: arg}}}).toArray()[0].a); + } -// Wrong number of args. -assertError(16020, []); -assertError(16020, [1]); -assertError(16020, [false]); -assertError(16020, [1, 1]); -assertError(16020, [1, 1, null, 1]); -assertError(16020, [1, 1, 1, undefined]); + // Wrong number of args. + assertError(16020, []); + assertError(16020, [1]); + assertError(16020, [false]); + assertError(16020, [1, 1]); + assertError(16020, [1, 1, null, 1]); + assertError(16020, [1, 1, 1, undefined]); -// Bad object cases -assertError(17080, {"else": 1, then: 1}); -assertError(17081, {"if": 1, "else": 1}); -assertError(17082, {"if": 1, then: 1}); -assertError(17083, {asdf: 1, then: 1}); + // Bad object cases. + assertError(17080, {"else": 1, then: 1}); + assertError(17081, {"if": 1, "else": 1}); + assertError(17082, {"if": 1, then: 1}); + assertError(17083, {asdf: 1, then: 1}); -// Literal expressions. -assertResult(1, [true, 1, 2]); -assertResult(2, [false, 1, 2]); + // Literal expressions. + assertResult(1, [true, 1, 2]); + assertResult(2, [false, 1, 2]); -// Order independence for object case -assertResult(1, {"if": true, "then": 1, "else": 2}); -assertResult(1, {"if": true, "else": 2, "then": 1}); -assertResult(1, {"then": 1, "if": true, "else": 2}); -assertResult(1, {"then": 1, "else": 2, "if": true}); -assertResult(1, {"else": 2, "then": 1, "if": true}); -assertResult(1, {"else": 2, "if": true, "then": 1}); + // Order independence for object case. + assertResult(1, {"if": true, "then": 1, "else": 2}); + assertResult(1, {"if": true, "else": 2, "then": 1}); + assertResult(1, {"then": 1, "if": true, "else": 2}); + assertResult(1, {"then": 1, "else": 2, "if": true}); + assertResult(1, {"else": 2, "then": 1, "if": true}); + assertResult(1, {"else": 2, "if": true, "then": 1}); -// Computed expressions. -assertResult(1, [{$and: []}, {$add: [1]}, {$add: [1, 1]}]); -assertResult(2, [{$or: []}, {$add: [1]}, {$add: [1, 1]}]); + // Computed expressions. + assertResult(1, [{$and: []}, {$add: [1]}, {$add: [1, 1]}]); + assertResult(2, [{$or: []}, {$add: [1]}, {$add: [1, 1]}]); -t.drop(); -t.save({t: true, f: false, x: 'foo', y: 'bar'}); + assert(coll.drop()); + assert.writeOK(coll.insert({t: true, f: false, x: 'foo', y: 'bar'})); -// Field path expressions. -assertResult('foo', ['$t', '$x', '$y']); -assertResult('bar', ['$f', '$x', '$y']); + // Field path expressions. + assertResult('foo', ['$t', '$x', '$y']); + assertResult('bar', ['$f', '$x', '$y']); -t.drop(); -t.save({}); + assert(coll.drop()); + assert.writeOK(coll.insert({})); -// Coerce to bool. -assertResult('a', [1, 'a', 'b']); -assertResult('a', ['', 'a', 'b']); -assertResult('b', [0, 'a', 'b']); + // Coerce to bool. + assertResult('a', [1, 'a', 'b']); + assertResult('a', ['', 'a', 'b']); + assertResult('b', [0, 'a', 'b']); -// Nested. -t.drop(); -t.save({noonSense: 'am', mealCombined: 'no'}); -t.save({noonSense: 'am', mealCombined: 'yes'}); -t.save({noonSense: 'pm', mealCombined: 'yes'}); -t.save({noonSense: 'pm', mealCombined: 'no'}); -assert.eq(['breakfast', 'brunch', 'linner', 'dinner'], - t.aggregate({ - $project: { - a: { - $cond: [ - {$eq: ['$noonSense', 'am']}, - {$cond: [{$eq: ['$mealCombined', 'yes']}, 'brunch', 'breakfast']}, - {$cond: [{$eq: ['$mealCombined', 'yes']}, 'linner', 'dinner']} - ] - } - } - }).map(function(x) { - return x.a; - })); + // Nested. + assert(coll.drop()); + assert.writeOK(coll.insert({noonSense: 'am', mealCombined: 'no'})); + assert.writeOK(coll.insert({noonSense: 'am', mealCombined: 'yes'})); + assert.writeOK(coll.insert({noonSense: 'pm', mealCombined: 'yes'})); + assert.writeOK(coll.insert({noonSense: 'pm', mealCombined: 'no'})); + assert.eq( + ['breakfast', 'brunch', 'dinner', 'linner'], + coll.aggregate([ + { + $project: { + meal: { + $cond: [ + {$eq: ['$noonSense', 'am']}, + {$cond: [{$eq: ['$mealCombined', 'yes']}, 'brunch', 'breakfast']}, + {$cond: [{$eq: ['$mealCombined', 'yes']}, 'linner', 'dinner']} + ] + } + } + }, + {$sort: {meal: 1}} + ]) + .map(doc => doc.meal)); +}()); diff --git a/jstests/aggregation/bugs/firstlast.js b/jstests/aggregation/bugs/firstlast.js index 54f0f8be0e9..aa360a25b7e 100644 --- a/jstests/aggregation/bugs/firstlast.js +++ b/jstests/aggregation/bugs/firstlast.js @@ -1,108 +1,121 @@ -// Check $first/$last group accumulators. SERVER-3862 -// $first/$last select first/last value for a group key from the previous pipeline. - -t = db.jstests_aggregation_firstlast; -t.drop(); - -/** Check expected $first and $last result values. */ -function assertFirstLast(expectedFirst, expectedLast, pipeline, expression) { - pipeline = pipeline || []; - expression = expression || '$b'; - pipeline.push({$group: {_id: '$a', first: {$first: expression}, last: {$last: expression}}}); - result = t.aggregate(pipeline).toArray(); - for (var i = 0; i < result.length; ++i) { - if (result[i]._id == 1) { - // Check results for group _id 1. - assert.eq(expectedFirst, result[i].first); - assert.eq(expectedLast, result[i].last); - return; +/** + * Tests the $first and $last accumulators in $group. + */ +(function() { + 'use strict'; + const coll = db.jstests_aggregation_firstlast; + coll.drop(); + + /** Check expected $first and $last result values. */ + function assertFirstLast(expectedFirst, expectedLast, stages, expression) { + let pipeline = [{$sort: {_id: 1}}]; + if (stages) { + pipeline = pipeline.concat(stages); + } + + expression = expression || '$b'; + pipeline.push( + {$group: {_id: '$a', first: {$first: expression}, last: {$last: expression}}}); + + const result = coll.aggregate(pipeline).toArray(); + for (let i = 0; i < result.length; ++i) { + if (result[i]._id === 1) { + // Check results for group _id 1. + assert.eq(expectedFirst, result[i].first); + assert.eq(expectedLast, result[i].last); + return; + } } + throw new Error('Expected $group _id "1" is missing'); } - assert(false, "Expected group _id '1' missing."); -} - -// One document. -t.save({a: 1, b: 1}); -assertFirstLast(1, 1); - -// Two documents. -t.save({a: 1, b: 2}); -assertFirstLast(1, 2); - -// Three documents. -t.save({a: 1, b: 3}); -assertFirstLast(1, 3); - -// Another 'a' key value does not affect outcome. -t.drop(); -t.save({a: 3, b: 0}); -t.save({a: 1, b: 1}); -t.save({a: 1, b: 2}); -t.save({a: 1, b: 3}); -t.save({a: 2, b: 0}); -assertFirstLast(1, 3); - -// Additional pipeline stages do not affect outcome if order is maintained. -assertFirstLast(1, 3, [{$project: {x: '$a', y: '$b'}}, {$project: {a: '$x', b: '$y'}}]); - -// Additional pipeline stages affect outcome if order is modified. -assertFirstLast(3, 1, [{$sort: {b: -1}}]); - -// Skip and limit affect the results seen. -t.drop(); -t.save({a: 1, b: 1}); -t.save({a: 1, b: 2}); -t.save({a: 1, b: 3}); -assertFirstLast(1, 2, [{$limit: 2}]); -assertFirstLast(2, 3, [{$skip: 1}, {$limit: 2}]); -assertFirstLast(2, 2, [{$skip: 1}, {$limit: 1}]); - -// Mixed type values. -t.save({a: 1, b: 'foo'}); -assertFirstLast(1, 'foo'); - -t.drop(); -t.save({a: 1, b: 'bar'}); -t.save({a: 1, b: true}); -assertFirstLast('bar', true); - -// Value null. -t.drop(); -t.save({a: 1, b: null}); -t.save({a: 1, b: 2}); -assertFirstLast(null, 2); -t.drop(); -t.save({a: 1, b: 2}); -t.save({a: 1, b: null}); -assertFirstLast(2, null); -t.drop(); -t.save({a: 1, b: null}); -t.save({a: 1, b: null}); -assertFirstLast(null, null); - -// Value missing. -t.drop(); -t.save({a: 1}); -t.save({a: 1, b: 2}); -assertFirstLast(undefined, 2); -t.drop(); -t.save({a: 1, b: 2}); -t.save({a: 1}); -assertFirstLast(2, undefined); -t.drop(); -t.save({a: 1}); -t.save({a: 1}); -assertFirstLast(undefined, undefined); - -// Dotted field. -t.drop(); -t.save({a: 1, b: [{c: 1}, {c: 2}]}); -t.save({a: 1, b: [{c: 6}, {}]}); -assertFirstLast([1, 2], [6], [], '$b.c'); - -// Computed expressions. -t.drop(); -t.save({a: 1, b: 1}); -t.save({a: 1, b: 2}); -assertFirstLast(1, 0, [], {$mod: ['$b', 2]}); -assertFirstLast(0, 1, [], {$mod: [{$add: ['$b', 1]}, 2]}); + + // One document. + assert.writeOK(coll.insert({a: 1, b: 1})); + assertFirstLast(1, 1); + + // Two documents. + assert.writeOK(coll.insert({a: 1, b: 2})); + assertFirstLast(1, 2); + + // Three documents. + assert.writeOK(coll.insert({a: 1, b: 3})); + assertFirstLast(1, 3); + + // Another 'a' key value does not affect outcome. + assert(coll.drop()); + assert.writeOK(coll.insert({a: 3, b: 0})); + assert.writeOK(coll.insert({a: 1, b: 1})); + assert.writeOK(coll.insert({a: 1, b: 2})); + assert.writeOK(coll.insert({a: 1, b: 3})); + assert.writeOK(coll.insert({a: 2, b: 0})); + assertFirstLast(1, 3); + + // Additional pipeline stages do not affect outcome if order is maintained. + assertFirstLast(1, 3, [{$project: {x: '$a', y: '$b'}}, {$project: {a: '$x', b: '$y'}}]); + + // Additional pipeline stages affect outcome if order is modified. + assertFirstLast(3, 1, [{$sort: {b: -1}}]); + + // Skip and limit affect the results seen. + assert(coll.drop()); + assert.writeOK(coll.insert({a: 1, b: 1})); + assert.writeOK(coll.insert({a: 1, b: 2})); + assert.writeOK(coll.insert({a: 1, b: 3})); + assertFirstLast(1, 2, [{$limit: 2}]); + assertFirstLast(2, 3, [{$skip: 1}, {$limit: 2}]); + assertFirstLast(2, 2, [{$skip: 1}, {$limit: 1}]); + + // Mixed type values. + assert.writeOK(coll.insert({a: 1, b: 'foo'})); + assertFirstLast(1, 'foo'); + + assert(coll.drop()); + assert.writeOK(coll.insert({a: 1, b: 'bar'})); + assert.writeOK(coll.insert({a: 1, b: true})); + assertFirstLast('bar', true); + + // Value null. + assert(coll.drop()); + assert.writeOK(coll.insert({a: 1, b: null})); + assert.writeOK(coll.insert({a: 1, b: 2})); + assertFirstLast(null, 2); + + assert(coll.drop()); + assert.writeOK(coll.insert({a: 1, b: 2})); + assert.writeOK(coll.insert({a: 1, b: null})); + assertFirstLast(2, null); + + assert(coll.drop()); + assert.writeOK(coll.insert({a: 1, b: null})); + assert.writeOK(coll.insert({a: 1, b: null})); + assertFirstLast(null, null); + + // Value missing. + assert(coll.drop()); + assert.writeOK(coll.insert({a: 1})); + assert.writeOK(coll.insert({a: 1, b: 2})); + assertFirstLast(undefined, 2); + + assert(coll.drop()); + assert.writeOK(coll.insert({a: 1, b: 2})); + assert.writeOK(coll.insert({a: 1})); + assertFirstLast(2, undefined); + + assert(coll.drop()); + assert.writeOK(coll.insert({a: 1})); + assert.writeOK(coll.insert({a: 1})); + assertFirstLast(undefined, undefined); + + // Dotted field. + assert(coll.drop()); + assert.writeOK(coll.insert({a: 1, b: [{c: 1}, {c: 2}]})); + assert.writeOK(coll.insert({a: 1, b: [{c: 6}, {}]})); + assertFirstLast([1, 2], [6], [], '$b.c'); + + // Computed expressions. + assert(coll.drop()); + assert.writeOK(coll.insert({a: 1, b: 1})); + assert.writeOK(coll.insert({a: 1, b: 2})); + assertFirstLast(1, 0, [], {$mod: ['$b', 2]}); + assertFirstLast(0, 1, [], {$mod: [{$add: ['$b', 1]}, 2]}); +}()); diff --git a/jstests/aggregation/bugs/server10176.js b/jstests/aggregation/bugs/server10176.js index b04db0c4319..988beb24f13 100644 --- a/jstests/aggregation/bugs/server10176.js +++ b/jstests/aggregation/bugs/server10176.js @@ -32,7 +32,7 @@ load('jstests/aggregation/extras/utils.js'); // valid use of $abs: numbers become positive, null/undefined/nonexistent become null - var results = coll.aggregate([{$project: {a: {$abs: "$a"}}}]).toArray(); + var results = coll.aggregate([{$project: {a: {$abs: "$a"}}}, {$sort: {_id: 1}}]).toArray(); assert.eq(results, [ {_id: 0, a: 5}, {_id: 1, a: 5}, diff --git a/jstests/aggregation/bugs/server11675.js b/jstests/aggregation/bugs/server11675.js index c80f9a38af5..ee677aa085a 100644 --- a/jstests/aggregation/bugs/server11675.js +++ b/jstests/aggregation/bugs/server11675.js @@ -1,28 +1,29 @@ // SERVER-11675 Text search integration with aggregation load('jstests/aggregation/extras/utils.js'); -var server11675 = function() { - var t = db.server11675; - t.drop(); +const server11675 = function() { + const coll = db.server11675; + coll.drop(); if (typeof(RUNNING_IN_SHARDED_AGG_TEST) != 'undefined') { // see end of testshard1.js - db.adminCommand({shardcollection: t.getFullName(), key: {"_id": 1}}); + assert.commandWorked( + db.adminCommand({shardcollection: coll.getFullName(), key: {"_id": 1}})); } - t.insert({_id: 1, text: "apple", words: 1}); - t.insert({_id: 2, text: "banana", words: 1}); - t.insert({_id: 3, text: "apple banana", words: 2}); - t.insert({_id: 4, text: "cantaloupe", words: 1}); + assert.writeOK(coll.insert({_id: 1, text: "apple", words: 1})); + assert.writeOK(coll.insert({_id: 2, text: "banana", words: 1})); + assert.writeOK(coll.insert({_id: 3, text: "apple banana", words: 2})); + assert.writeOK(coll.insert({_id: 4, text: "cantaloupe", words: 1})); - t.ensureIndex({text: "text"}); + assert.commandWorked(coll.createIndex({text: "text"})); // query should have subfields query, project, sort, skip and limit. All but query are optional. - var assertSameAsFind = function(query) { - var cursor = t.find(query.query); - var pipeline = [{$match: query.query}]; + const assertSameAsFind = function(query) { + let cursor = coll.find(query.query); + const pipeline = [{$match: query.query}]; if ('project' in query) { - cursor = t.find(query.query, query.project); // no way to add to constructed cursor + cursor = coll.find(query.query, query.project); // no way to add to constructed cursor pipeline.push({$project: query.project}); } @@ -41,8 +42,8 @@ var server11675 = function() { pipeline.push({$limit: query.limit}); } - var findRes = cursor.toArray(); - var aggRes = t.aggregate(pipeline).toArray(); + const findRes = cursor.toArray(); + const aggRes = coll.aggregate(pipeline).toArray(); // If the query doesn't specify its own sort, there is a possibility that find() and // aggregate() will return the same results in different orders. We sort by _id on the @@ -95,128 +96,137 @@ var server11675 = function() { // $meta sort specification should be rejected if it has additional keys. assert.throws(function() { - t.aggregate([ - {$match: {$text: {$search: 'apple banana'}}}, - {$sort: {textScore: {$meta: 'textScore', extra: 1}}} - ]).itcount(); + coll.aggregate([ + {$match: {$text: {$search: 'apple banana'}}}, + {$sort: {textScore: {$meta: 'textScore', extra: 1}}} + ]) + .itcount(); }); // $meta sort specification should be rejected if the type of meta sort is not known. assert.throws(function() { - t.aggregate([ - {$match: {$text: {$search: 'apple banana'}}}, - {$sort: {textScore: {$meta: 'unknown'}}} - ]).itcount(); + coll.aggregate([ + {$match: {$text: {$search: 'apple banana'}}}, + {$sort: {textScore: {$meta: 'unknown'}}} + ]) + .itcount(); }); // Sort specification should be rejected if a $-keyword other than $meta is used. assert.throws(function() { - t.aggregate([ - {$match: {$text: {$search: 'apple banana'}}}, - {$sort: {textScore: {$notMeta: 'textScore'}}} - ]).itcount(); + coll.aggregate([ + {$match: {$text: {$search: 'apple banana'}}}, + {$sort: {textScore: {$notMeta: 'textScore'}}} + ]) + .itcount(); }); // Sort specification should be rejected if it is a string, not an object with $meta. assert.throws(function() { - t.aggregate([ - {$match: {$text: {$search: 'apple banana'}}}, - {$sort: {textScore: 'textScore'}} - ]).itcount(); + coll.aggregate( + [{$match: {$text: {$search: 'apple banana'}}}, {$sort: {textScore: 'textScore'}}]) + .itcount(); }); // sharded find requires projecting the score to sort, but sharded agg does not. - var findRes = t.find({$text: {$search: "apple banana"}}, {textScore: {$meta: 'textScore'}}) + var findRes = coll.find({$text: {$search: "apple banana"}}, {textScore: {$meta: 'textScore'}}) .sort({textScore: {$meta: 'textScore'}}) .map(function(obj) { delete obj.textScore; // remove it to match agg output return obj; }); - var res = t.aggregate([ - {$match: {$text: {$search: 'apple banana'}}}, - {$sort: {textScore: {$meta: 'textScore'}}} - ]).toArray(); + let res = coll.aggregate([ + {$match: {$text: {$search: 'apple banana'}}}, + {$sort: {textScore: {$meta: 'textScore'}}} + ]) + .toArray(); assert.eq(res, findRes); // Make sure {$meta: 'textScore'} can be used as a sub-expression - var res = t.aggregate([ - {$match: {_id: 1, $text: {$search: 'apple'}}}, - { - $project: { - words: 1, - score: {$meta: 'textScore'}, - wordsTimesScore: {$multiply: ['$words', {$meta: 'textScore'}]} - } - } - ]).toArray(); + res = coll.aggregate([ + {$match: {_id: 1, $text: {$search: 'apple'}}}, + { + $project: { + words: 1, + score: {$meta: 'textScore'}, + wordsTimesScore: {$multiply: ['$words', {$meta: 'textScore'}]} + } + } + ]) + .toArray(); assert.eq(res[0].wordsTimesScore, res[0].words * res[0].score, tojson(res)); // And can be used in $group - var res = t.aggregate([ - {$match: {_id: 1, $text: {$search: 'apple banana'}}}, - {$group: {_id: {$meta: 'textScore'}, score: {$first: {$meta: 'textScore'}}}} - ]).toArray(); + res = coll.aggregate([ + {$match: {_id: 1, $text: {$search: 'apple banana'}}}, + {$group: {_id: {$meta: 'textScore'}, score: {$first: {$meta: 'textScore'}}}} + ]) + .toArray(); assert.eq(res[0]._id, res[0].score, tojson(res)); // Make sure metadata crosses shard -> merger boundary - var res = t.aggregate([ - {$match: {_id: 1, $text: {$search: 'apple'}}}, - {$project: {scoreOnShard: {$meta: 'textScore'}}}, - {$limit: 1} // force a split. later stages run on merger - , - {$project: {scoreOnShard: 1, scoreOnMerger: {$meta: 'textScore'}}} - ]).toArray(); + res = coll.aggregate([ + {$match: {_id: 1, $text: {$search: 'apple'}}}, + {$project: {scoreOnShard: {$meta: 'textScore'}}}, + {$limit: 1}, // force a split. later stages run on merger + {$project: {scoreOnShard: 1, scoreOnMerger: {$meta: 'textScore'}}} + ]) + .toArray(); assert.eq(res[0].scoreOnMerger, res[0].scoreOnShard); - var score = res[0].scoreOnMerger; // save for later tests + let score = res[0].scoreOnMerger; // save for later tests // Make sure metadata crosses shard -> merger boundary even if not used on shard - var res = t.aggregate([ - {$match: {_id: 1, $text: {$search: 'apple'}}}, - {$limit: 1} // force a split. later stages run on merger - , - {$project: {scoreOnShard: 1, scoreOnMerger: {$meta: 'textScore'}}} - ]).toArray(); + res = coll.aggregate([ + {$match: {_id: 1, $text: {$search: 'apple'}}}, + {$limit: 1}, // force a split. later stages run on merger + {$project: {scoreOnShard: 1, scoreOnMerger: {$meta: 'textScore'}}} + ]) + .toArray(); assert.eq(res[0].scoreOnMerger, score); // Make sure metadata works if first $project doesn't use it. - var res = t.aggregate([ - {$match: {_id: 1, $text: {$search: 'apple'}}}, - {$project: {_id: 1}}, - {$project: {_id: 1, score: {$meta: 'textScore'}}} - ]).toArray(); + res = coll.aggregate([ + {$match: {_id: 1, $text: {$search: 'apple'}}}, + {$project: {_id: 1}}, + {$project: {_id: 1, score: {$meta: 'textScore'}}} + ]) + .toArray(); assert.eq(res[0].score, score); // Make sure the pipeline fails if it tries to reference the text score and it doesn't exist. - var res = t.runCommand( - {aggregate: t.getName(), pipeline: [{$project: {_id: 1, score: {$meta: 'textScore'}}}]}); + res = coll.runCommand( + {aggregate: coll.getName(), pipeline: [{$project: {_id: 1, score: {$meta: 'textScore'}}}]}); assert.commandFailed(res); // Make sure the metadata is 'missing()' when it doesn't exist because the document changed - var res = t.aggregate([ - {$match: {_id: 1, $text: {$search: 'apple banana'}}}, - {$group: {_id: 1, score: {$first: {$meta: 'textScore'}}}}, - {$project: {_id: 1, scoreAgain: {$meta: 'textScore'}}}, - ]).toArray(); + res = coll.aggregate([ + {$match: {_id: 1, $text: {$search: 'apple banana'}}}, + {$group: {_id: 1, score: {$first: {$meta: 'textScore'}}}}, + {$project: {_id: 1, scoreAgain: {$meta: 'textScore'}}}, + ]) + .toArray(); assert(!("scoreAgain" in res[0])); // Make sure metadata works after a $unwind - t.insert({_id: 5, text: 'mango', words: [1, 2, 3]}); - var res = t.aggregate([ - {$match: {$text: {$search: 'mango'}}}, - {$project: {score: {$meta: "textScore"}, _id: 1, words: 1}}, - {$unwind: '$words'}, - {$project: {scoreAgain: {$meta: "textScore"}, score: 1}} - ]).toArray(); + assert.writeOK(coll.insert({_id: 5, text: 'mango', words: [1, 2, 3]})); + res = coll.aggregate([ + {$match: {$text: {$search: 'mango'}}}, + {$project: {score: {$meta: "textScore"}, _id: 1, words: 1}}, + {$unwind: '$words'}, + {$project: {scoreAgain: {$meta: "textScore"}, score: 1}} + ]) + .toArray(); assert.eq(res[0].scoreAgain, res[0].score); // Error checking // $match, but wrong position - assertErrorCode(t, [{$sort: {text: 1}}, {$match: {$text: {$search: 'apple banana'}}}], 17313); + assertErrorCode( + coll, [{$sort: {text: 1}}, {$match: {$text: {$search: 'apple banana'}}}], 17313); // wrong $stage, but correct position - assertErrorCode(t, + assertErrorCode(coll, [{$project: {searchValue: {$text: {$search: 'apple banana'}}}}], ErrorCodes.InvalidPipelineOperator); - assertErrorCode(t, [{$sort: {$text: {$search: 'apple banana'}}}], 17312); + assertErrorCode(coll, [{$sort: {$text: {$search: 'apple banana'}}}], 17312); }; server11675(); diff --git a/jstests/aggregation/bugs/server12015.js b/jstests/aggregation/bugs/server12015.js index c237e4f6f90..150ff30e708 100644 --- a/jstests/aggregation/bugs/server12015.js +++ b/jstests/aggregation/bugs/server12015.js @@ -10,9 +10,9 @@ load("jstests/aggregation/extras/utils.js"); // For orderedArrayEq. (function() { "use strict"; - var coll = db.server12015; + const coll = db.server12015; coll.drop(); - var indexSpec = {a: 1, b: 1}; + const indexSpec = {a: 1, b: 1}; assert.writeOK(coll.insert({_id: 0, a: 0, b: 0})); assert.writeOK(coll.insert({_id: 1, a: 0, b: 1})); @@ -21,10 +21,11 @@ load("jstests/aggregation/extras/utils.js"); // For orderedArrayEq. /** * Helper to test that for a given pipeline, the same results are returned whether or not an - * index is present. + * index is present. If 'ignoreSortOrder' is present, test for result parity without assuming + * the order of results. */ - function assertResultsMatch(pipeline) { - // Add a match stage to ensure index scans are considerd for planning (workaround for + function assertResultsMatch(pipeline, ignoreSortOrder) { + // Add a match stage to ensure index scans are considered for planning (workaround for // SERVER-20066). pipeline = [{$match: {a: {$gte: 0}}}].concat(pipeline); @@ -36,18 +37,27 @@ load("jstests/aggregation/extras/utils.js"); // For orderedArrayEq. assert.commandWorked(coll.dropIndex(indexSpec)); var resultsWithoutIndex = coll.aggregate(pipeline).toArray(); - assert(orderedArrayEq(resultsWithIndex, resultsWithoutIndex)); + if (ignoreSortOrder) { + assert(arrayEq(resultsWithIndex, resultsWithoutIndex), tojson({ + resultsWithIndex: resultsWithIndex, + resultsWithoutIndex: resultsWithoutIndex + })); + } else { + assert.eq(resultsWithIndex, resultsWithoutIndex); + } } // Uncovered $project, no $sort. - assertResultsMatch([{$project: {_id: 1, a: 1, b: 1}}]); + const ignoreSortOrder = true; + assertResultsMatch([{$project: {_id: 1, a: 1, b: 1}}], ignoreSortOrder); // Covered $project, no $sort. - assertResultsMatch([{$project: {_id: 0, a: 1}}]); - assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}]); - assertResultsMatch([{$project: {_id: 0, a: 1, b: 1, c: {$literal: 1}}}]); - assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}, {$project: {a: 1}}]); - assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}, {$group: {_id: null, a: {$sum: "$a"}}}]); + assertResultsMatch([{$project: {_id: 0, a: 1}}], ignoreSortOrder); + assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}], ignoreSortOrder); + assertResultsMatch([{$project: {_id: 0, a: 1, b: 1, c: {$literal: 1}}}], ignoreSortOrder); + assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}, {$project: {a: 1}}], ignoreSortOrder); + assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}, {$group: {_id: null, a: {$sum: "$a"}}}], + ignoreSortOrder); // Non-blocking $sort, uncovered $project. assertResultsMatch([{$sort: {a: -1, b: -1}}, {$project: {_id: 1, a: 1, b: 1}}]); diff --git a/jstests/aggregation/bugs/server17943.js b/jstests/aggregation/bugs/server17943.js index 35d67d4fe20..075623c705d 100644 --- a/jstests/aggregation/bugs/server17943.js +++ b/jstests/aggregation/bugs/server17943.js @@ -28,7 +28,8 @@ load('jstests/aggregation/extras/utils.js'); {_id: 5, b: null}, {_id: 6, b: null}, ]; - var results = coll.aggregate([{$project: {b: {$filter: filterDoc}}}]).toArray(); + var results = + coll.aggregate([{$project: {b: {$filter: filterDoc}}}, {$sort: {_id: 1}}]).toArray(); assert.eq(results, expectedResults); // create filter that uses the default variable name in 'cond' @@ -42,14 +43,11 @@ load('jstests/aggregation/extras/utils.js'); {_id: 5, b: null}, {_id: 6, b: null}, ]; - results = coll.aggregate([{$project: {b: {$filter: filterDoc}}}]).toArray(); + results = coll.aggregate([{$project: {b: {$filter: filterDoc}}}, {$sort: {_id: 1}}]).toArray(); assert.eq(results, expectedResults); // Invalid filter expressions. - // Insert a document so that the initial cursor doesn't immediately return EOF. - coll.insert({_id: 0, a: [1, 2]}); - // '$filter' is not a document. var filterDoc = 'string'; assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28646); @@ -78,7 +76,7 @@ load('jstests/aggregation/extras/utils.js'); filterDoc = {input: '$a', cond: {$eq: [1, '$$var']}}; assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 17276); - coll.drop(); + assert(coll.drop()); assert.writeOK(coll.insert({a: 'string'})); filterDoc = {input: '$a', as: 'x', cond: true}; assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28651); diff --git a/jstests/aggregation/bugs/server4588.js b/jstests/aggregation/bugs/server4588.js index 49f41283f6d..000cc8f0231 100644 --- a/jstests/aggregation/bugs/server4588.js +++ b/jstests/aggregation/bugs/server4588.js @@ -2,7 +2,7 @@ (function() { "use strict"; - var coll = db.server4588; + const coll = db.server4588; coll.drop(); assert.writeOK(coll.insert({_id: 0})); @@ -12,8 +12,9 @@ assert.writeOK(coll.insert({_id: 4, x: 5})); // Without includeArrayIndex. - var actualResults = coll.aggregate([{$unwind: {path: "$x"}}]).toArray(); - var expectedResults = [ + let actualResults = + coll.aggregate([{$unwind: {path: "$x"}}, {$sort: {_id: 1, x: 1}}]).toArray(); + let expectedResults = [ {_id: 3, x: 1}, {_id: 3, x: 2}, {_id: 3, x: 3}, @@ -22,7 +23,10 @@ assert.eq(expectedResults, actualResults, "Incorrect results for normal $unwind"); // With includeArrayIndex, index inserted into a new field. - actualResults = coll.aggregate([{$unwind: {path: "$x", includeArrayIndex: "index"}}]).toArray(); + actualResults = + coll.aggregate( + [{$unwind: {path: "$x", includeArrayIndex: "index"}}, {$sort: {_id: 1, x: 1}}]) + .toArray(); expectedResults = [ {_id: 3, x: 1, index: NumberLong(0)}, {_id: 3, x: 2, index: NumberLong(1)}, @@ -32,12 +36,14 @@ assert.eq(expectedResults, actualResults, "Incorrect results $unwind with includeArrayIndex"); // With both includeArrayIndex and preserveNullAndEmptyArrays. - // TODO: update this test when SERVER-20168 is resolved. actualResults = - coll.aggregate([{ - $unwind: - {path: "$x", includeArrayIndex: "index", preserveNullAndEmptyArrays: true} - }]) + coll.aggregate([ + { + $unwind: + {path: "$x", includeArrayIndex: "index", preserveNullAndEmptyArrays: true} + }, + {$sort: {_id: 1, x: 1}} + ]) .toArray(); expectedResults = [ {_id: 0, index: null}, diff --git a/jstests/aggregation/bugs/server4899.js b/jstests/aggregation/bugs/server4899.js deleted file mode 100644 index b90ed984c2a..00000000000 --- a/jstests/aggregation/bugs/server4899.js +++ /dev/null @@ -1,16 +0,0 @@ -// test $size -load('jstests/aggregation/extras/utils.js'); - -c = db.server4899; -c.drop(); -c.save({arr: []}); -c.save({arr: [1]}); -c.save({arr: ["asdf", "asdfasdf"]}); -c.save({arr: [1, "asdf", 1234, 4.3, {key: 23}]}); -c.save({arr: [3, [31, 31, 13, 13]]}); - -result = c.aggregate({$project: {_id: 0, length: {$size: "$arr"}}}); -assert.eq(result.toArray(), [{length: 0}, {length: 1}, {length: 2}, {length: 5}, {length: 2}]); - -c.save({arr: 231}); -assertErrorCode(c, {$project: {_id: 0, length: {$size: "$arr"}}}, 17124); diff --git a/jstests/aggregation/bugs/server5012.js b/jstests/aggregation/bugs/server5012.js index 64f55369dc4..a9955349490 100644 --- a/jstests/aggregation/bugs/server5012.js +++ b/jstests/aggregation/bugs/server5012.js @@ -1,12 +1,11 @@ -// use aggdb -db = db.getSiblingDB("aggdb"); -var article = db.article; +(function() { + "use strict"; + load('jstests/aggregation/data/articles.js'); -load('jstests/aggregation/data/articles.js'); + const article = db.getSiblingDB("aggdb").getCollection("article"); + const cursor = article.aggregate( + [{$sort: {_id: 1}}, {$project: {author: 1, _id: 0}}, {$project: {Writer: "$author"}}]); + const expected = [{Writer: "bob"}, {Writer: "dave"}, {Writer: "jane"}]; -// original crash from ticket -var r3 = article.aggregate({$project: {author: 1, _id: 0}}, {$project: {Writer: "$author"}}); - -var r3result = [{"Writer": "bob"}, {"Writer": "dave"}, {"Writer": "jane"}]; - -assert.eq(r3.toArray(), r3result, 's5012 failed'); + assert.eq(cursor.toArray(), expected); +}()); diff --git a/jstests/aggregation/bugs/server6127.js b/jstests/aggregation/bugs/server6127.js index f217e9a8d93..26585c87d21 100644 --- a/jstests/aggregation/bugs/server6127.js +++ b/jstests/aggregation/bugs/server6127.js @@ -1,44 +1,27 @@ /* - * SERVER-6127 : $project uasserts if an expected nested field has a non object parent in a document + * SERVER-6127 : $project uasserts if an expected nested field has a non object parent in a + * document. * * This test validates the SERVER-6127 ticket. Return undefined when retrieving a field along a - * path, when the subpath does not exist (this is what happens when a field does not exist and - * there is no path). Previous it would uassert causing the aggregation to end. + * path, when the subpath does not exist (this is what happens when a field does not exist and there + * is no path). Previous it would uassert causing the aggregation to end. */ +(function() { + "use strict"; + db.s6127.drop(); -/* - * 1) Clear and create testing db - * 2) Run an aggregation that simply projects a two fields, one with a sub path one without - * 3) Assert that the result is what we expected - */ - -// Clear db -db.s6127.drop(); - -// Populate db -db.s6127.save({a: 1}); -db.s6127.save({foo: 2}); -db.s6127.save({foo: {bar: 3}}); - -// Aggregate checking the field foo and the path foo.bar -var s6127 = db.s6127.aggregate({$project: {_id: 0, 'foo.bar': 1, field: "$foo", path: "$foo.bar"}}); - -/* - * The first document should contain nothing as neither field exists, the second document should - * contain only field as it has a value in foo, but foo does not have a field bar so it cannot walk - * that path, the third document should have both the field and path as foo is an object which has - * a field bar - */ -var s6127result = [ - {}, - {field: 2}, - { - foo: {bar: 3}, - field: {bar: 3}, - path: 3 + assert.writeOK(db.s6127.insert({_id: 0, a: 1})); + assert.writeOK(db.s6127.insert({_id: 1, foo: 2})); + assert.writeOK(db.s6127.insert({_id: 2, foo: {bar: 3}})); - } -]; + // Aggregate checking the field foo and the path foo.bar. + const cursor = db.s6127.aggregate( + [{$sort: {_id: 1}}, {$project: {_id: 0, "foo.bar": 1, field: "$foo", path: "$foo.bar"}}]); -// Assert -assert.eq(s6127.toArray(), s6127result, 's6127 failed'); + // The first document should contain nothing as neither field exists, the second document should + // contain only field as it has a value in foo, but foo does not have a field bar so it cannot + // walk that path, the third document should have both the field and path as foo is an object + // which has a field bar. + const expected = [{}, {field: 2}, {foo: {bar: 3}, field: {bar: 3}, path: 3}]; + assert.eq(cursor.toArray(), expected); +}()); diff --git a/jstests/aggregation/bugs/server6147.js b/jstests/aggregation/bugs/server6147.js index b376afa3c75..0969b366636 100644 --- a/jstests/aggregation/bugs/server6147.js +++ b/jstests/aggregation/bugs/server6147.js @@ -1,55 +1,49 @@ /* - * SERVER-6147 : aggregation $ne expression applied to constant returns incorrect result + * SERVER-6147 : aggregation $ne expression applied to constant returns incorrect result. * * This test validates the SERVER-6147 ticket. Return true when comparing a constant to a field * containing a different value using $ne. Previously it would return false when comparing a * constant and a field regardless of whether they were equal or not. */ +(function() { + "use strict"; + db.s6147.drop(); -/* - * 1) Clear and create testing db - * 2) Run an aggregation with $ne comparing constants and fields in various configurations - * 3) Assert that the result is what we expected - */ - -// Clear db -db.s6147.drop(); + assert.writeOK(db.s6147.insert({a: 1})); + assert.writeOK(db.s6147.insert({a: 2})); -// Populate db -db.s6147.save({a: 1}); -db.s6147.save({a: 2}); + // Aggregate checking various combinations of the constant and the field. + const cursor = db.s6147.aggregate([ + {$sort: {a: 1}}, + { + $project: { + _id: 0, + constantAndField: {$ne: [1, "$a"]}, + fieldAndConstant: {$ne: ["$a", 1]}, + constantAndConstant: {$ne: [1, 1]}, + fieldAndField: {$ne: ["$a", "$a"]} + } + } + ]); -// Aggregate checking various combinations of the constant and the field -var s6147 = db.s6147.aggregate({ - $project: { - _id: 0, - constantAndField: {$ne: [1, "$a"]}, - fieldAndConstant: {$ne: ["$a", 1]}, - constantAndConstant: {$ne: [1, 1]}, - fieldAndField: {$ne: ["$a", "$a"]} - } -}); - -/* - * In both documents the constantAndConstant and fieldAndField should be false since they compare - * something with itself but the constantAndField and fieldAndConstant should be different as - * document one contains 1 which should return false and document 2 contains something different so - * should return true - */ -var s6147result = [ - { - constantAndField: false, - fieldAndConstant: false, - constantAndConstant: false, - fieldAndField: false - }, - { - constantAndField: true, - fieldAndConstant: true, - constantAndConstant: false, - fieldAndField: false - } -]; + // In both documents, the constantAndConstant and fieldAndField should be false since they + // compare something with itself. However, the constantAndField and fieldAndConstant should be + // different as document one contains 1 which should return false and document 2 contains + // something different so should return true. + const expected = [ + { + constantAndField: false, + fieldAndConstant: false, + constantAndConstant: false, + fieldAndField: false + }, + { + constantAndField: true, + fieldAndConstant: true, + constantAndConstant: false, + fieldAndField: false + } + ]; -// Assert -assert.eq(s6147.toArray(), s6147result, 's6147 failed'); + assert.eq(cursor.toArray(), expected); +}()); diff --git a/jstests/aggregation/bugs/server6185.js b/jstests/aggregation/bugs/server6185.js index e1b19ad2c1f..cf084d4b371 100644 --- a/jstests/aggregation/bugs/server6185.js +++ b/jstests/aggregation/bugs/server6185.js @@ -1,12 +1,17 @@ -// projecting a non-existent subfield should work as it does in a query with projection -c = db.c; -c.drop(); +/** + * Tests that projecting a non-existent subfield behaves identically in both query and aggregation. + */ +(function() { + "use strict"; + const coll = db.c; + coll.drop(); -c.save({a: [1]}); -c.save({a: {c: 1}}); -c.save({a: [{c: 1}, {b: 1, c: 1}, {c: 1}]}); -c.save({a: 1}); -c.save({b: 1}); + assert.writeOK(coll.insert({a: [1]})); + assert.writeOK(coll.insert({a: {c: 1}})); + assert.writeOK(coll.insert({a: [{c: 1}, {b: 1, c: 1}, {c: 1}]})); + assert.writeOK(coll.insert({a: 1})); + assert.writeOK(coll.insert({b: 1})); -// assert the aggregation and the query produce the same thing -assert.eq(c.aggregate({$project: {'a.b': 1}}).toArray(), c.find({}, {'a.b': 1}).toArray()); + assert.eq(coll.aggregate([{$project: {'a.b': 1}}, {$sort: {_id: 1}}]).toArray(), + coll.find({}, {'a.b': 1}).sort({_id: 1}).toArray()); +}()); diff --git a/jstests/aggregation/bugs/server6779.js b/jstests/aggregation/bugs/server6779.js index e3b8aaeca08..44f641ea15d 100644 --- a/jstests/aggregation/bugs/server6779.js +++ b/jstests/aggregation/bugs/server6779.js @@ -1,17 +1,20 @@ // server 6779: serializing ExpressionCoerceToBool // This test only fails in debug mode with the bug since that tests round-tripping -function test(op, val) { - t = db.server6779; - t.drop(); +(function() { + "use strict"; - t.insert({a: true}); - t.insert({a: false}); + function test(op, val) { + const coll = db.server6779; + coll.drop(); + assert.writeOK(coll.insert({a: true})); + assert.writeOK(coll.insert({a: false})); - obj = {}; - obj[op] = ['$a', val]; - result = t.aggregate({$project: {_id: 0, bool: obj}}); + const obj = {}; + obj[op] = ['$a', val]; + const result = coll.aggregate([{$project: {_id: 0, bool: obj}}, {$sort: {bool: -1}}]); - assert.eq(result.toArray(), [{bool: true}, {bool: false}]); -} -test('$and', true); -test('$or', false); + assert.eq(result.toArray(), [{bool: true}, {bool: false}]); + } + test('$and', true); + test('$or', false); +}()); diff --git a/jstests/aggregation/bugs/sort_arrays.js b/jstests/aggregation/bugs/sort_arrays.js index 47c27736b76..9fbb707decb 100644 --- a/jstests/aggregation/bugs/sort_arrays.js +++ b/jstests/aggregation/bugs/sort_arrays.js @@ -2,15 +2,16 @@ // array. (function() { "use strict"; + const coll = db.foo; coll.drop(); assert.writeOK(coll.insert([{_id: 2, a: [2, 3]}, {_id: 3, a: [2, 4]}, {_id: 4, a: [2, 1]}])); const expectedOrder = [{_id: 4, a: [2, 1]}, {_id: 2, a: [2, 3]}, {_id: 3, a: [2, 4]}]; - assert.eq(coll.aggregate([{$sort: {a: 1}}]).toArray(), expectedOrder); - assert.eq(coll.find().sort({a: 1}).toArray(), expectedOrder); + assert.eq(coll.aggregate([{$sort: {a: 1, _id: 1}}]).toArray(), expectedOrder); + assert.eq(coll.find().sort({a: 1, _id: 1}).toArray(), expectedOrder); assert.commandWorked(coll.ensureIndex({a: 1})); - assert.eq(coll.aggregate([{$sort: {a: 1}}]).toArray(), expectedOrder); - assert.eq(coll.find().sort({a: 1}).toArray(), expectedOrder); + assert.eq(coll.aggregate([{$sort: {a: 1, _id: 1}}]).toArray(), expectedOrder); + assert.eq(coll.find().sort({a: 1, _id: 1}).toArray(), expectedOrder); }()); diff --git a/jstests/aggregation/expressions/date_to_parts.js b/jstests/aggregation/expressions/date_to_parts.js index b0b20e135d8..c1a41abf0b1 100644 --- a/jstests/aggregation/expressions/date_to_parts.js +++ b/jstests/aggregation/expressions/date_to_parts.js @@ -65,7 +65,8 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode } }, ], - coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date"}}}}]).toArray()); + coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date"}}}}, {$sort: {_id: 1}}]) + .toArray()); assert.eq( [ @@ -118,7 +119,10 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode } }, ], - coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date", "timezone": "$tz"}}}}]) + coll.aggregate([ + {$project: {date: {'$dateToParts': {date: "$date", "timezone": "$tz"}}}}, + {$sort: {_id: 1}} + ]) .toArray()); assert.eq( @@ -172,11 +176,15 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode } }, ], - coll.aggregate([{ - $project: { - date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": false}} - } - }]) + coll.aggregate([ + { + $project: { + date: + {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": false}} + } + }, + {$sort: {_id: 1}} + ]) .toArray()); assert.eq( @@ -230,10 +238,14 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode } }, ], - coll.aggregate([{ - $project: - {date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": true}}} - }]) + coll.aggregate([ + { + $project: { + date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": true}} + } + }, + {$sort: {_id: 1}} + ]) .toArray()); assert.eq( @@ -267,17 +279,17 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode {$match: {iso: {$exists: true}}}, { $project: { - date: { - '$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": "$iso"} - } + date: + {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": "$iso"}} } - } + }, + {$sort: {_id: 1}} ]) .toArray()); /* --------------------------------------------------------------------------------------- */ /* Tests with timestamp */ - coll.drop(); + assert(coll.drop()); assert.writeOK(coll.insert([ { @@ -380,7 +392,7 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode .toArray()); /* --------------------------------------------------------------------------------------- */ - coll.drop(); + assert(coll.drop()); assert.writeOK(coll.insert([ {_id: 0, date: ISODate("2017-06-27T12:00:20Z")}, @@ -394,7 +406,7 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode .toArray()); /* --------------------------------------------------------------------------------------- */ - coll.drop(); + assert(coll.drop()); assert.writeOK(coll.insert([ {_id: 0, date: ISODate("2017-06-27T12:00:20Z")}, @@ -408,7 +420,7 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode .toArray()); /* --------------------------------------------------------------------------------------- */ - coll.drop(); + assert(coll.drop()); assert.writeOK(coll.insert([ {_id: 0, tz: "Europe/London"}, diff --git a/jstests/aggregation/expressions/size.js b/jstests/aggregation/expressions/size.js new file mode 100644 index 00000000000..c3ccec34fb3 --- /dev/null +++ b/jstests/aggregation/expressions/size.js @@ -0,0 +1,23 @@ +/** + * Test the $size expression. + */ +(function() { + "use strict"; + load("jstests/aggregation/extras/utils.js"); + + const coll = db.expression_size; + coll.drop(); + + assert.writeOK(coll.insert({_id: 0, arr: []})); + assert.writeOK(coll.insert({_id: 1, arr: [1]})); + assert.writeOK(coll.insert({_id: 2, arr: ["asdf", "asdfasdf"]})); + assert.writeOK(coll.insert({_id: 3, arr: [1, "asdf", 1234, 4.3, {key: 23}]})); + assert.writeOK(coll.insert({_id: 4, arr: [3, [31, 31, 13, 13]]})); + + const result = + coll.aggregate([{$sort: {_id: 1}}, {$project: {_id: 0, length: {$size: "$arr"}}}]); + assert.eq(result.toArray(), [{length: 0}, {length: 1}, {length: 2}, {length: 5}, {length: 2}]); + + assert.writeOK(coll.insert({arr: 231})); + assertErrorCode(coll, {$project: {_id: 0, length: {$size: "$arr"}}}, 17124); +}()); diff --git a/jstests/aggregation/testall.js b/jstests/aggregation/testall.js index 66a4e76de35..33dd09a0463 100644 --- a/jstests/aggregation/testall.js +++ b/jstests/aggregation/testall.js @@ -1,875 +1,938 @@ -/* - Run all the aggregation tests -*/ - -/* load the test documents */ -load('jstests/aggregation/data/articles.js'); - -// make sure we're using the right db; this is the same as "use mydb;" in shell -db = db.getSiblingDB("aggdb"); - -// just passing through fields -var p1 = db.runCommand( - {aggregate: "article", pipeline: [{$project: {tags: 1, pageViews: 1}}], cursor: {}}); - -var p1result = [ - {"_id": 1, "pageViews": 5, "tags": ["fun", "good", "fun"]}, - {"_id": 2, "pageViews": 7, "tags": ["fun", "nasty"]}, - {"_id": 3, "pageViews": 6, "tags": ["nasty", "filthy"]} -]; - -assert.docEq(p1.cursor.firstBatch, p1result, 'p1 failed'); - -// a simple array unwinding -var u1 = db.runCommand({aggregate: "article", pipeline: [{$unwind: "$tags"}], cursor: {}}); - -var u1result = [ - { - "_id": 1, - "title": "this is my title", - "author": "bob", - "posted": ISODate("2004-03-21T18:59:54Z"), - "pageViews": 5, - "tags": "fun", - "comments": - [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}], - "other": {"foo": 5} - }, - { - "_id": 1, - "title": "this is my title", - "author": "bob", - "posted": ISODate("2004-03-21T18:59:54Z"), - "pageViews": 5, - "tags": "good", - "comments": - [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}], - "other": {"foo": 5} - }, - { - "_id": 1, - "title": "this is my title", - "author": "bob", - "posted": ISODate("2004-03-21T18:59:54Z"), - "pageViews": 5, - "tags": "fun", - "comments": - [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}], - "other": {"foo": 5} - }, - { - "_id": 2, - "title": "this is your title", - "author": "dave", - "posted": ISODate("2030-08-08T04:11:10Z"), - "pageViews": 7, - "tags": "fun", - "comments": [ - {"author": "barbara", "text": "this is interesting"}, - {"author": "jenny", "text": "i like to play pinball", "votes": 10} - ], - "other": {"bar": 14} - }, - { - "_id": 2, - "title": "this is your title", - "author": "dave", - "posted": ISODate("2030-08-08T04:11:10Z"), - "pageViews": 7, - "tags": "nasty", - "comments": [ - {"author": "barbara", "text": "this is interesting"}, - {"author": "jenny", "text": "i like to play pinball", "votes": 10} - ], - "other": {"bar": 14} - }, - { - "_id": 3, - "title": "this is some other title", - "author": "jane", - "posted": ISODate("2000-12-31T05:17:14Z"), - "pageViews": 6, - "tags": "nasty", - "comments": [ - {"author": "will", "text": "i don't like the color"}, - {"author": "jenny", "text": "can i get that in green?"} - ], - "other": {"bar": 14} - }, - { - "_id": 3, - "title": "this is some other title", - "author": "jane", - "posted": ISODate("2000-12-31T05:17:14Z"), - "pageViews": 6, - "tags": "filthy", - "comments": [ - {"author": "will", "text": "i don't like the color"}, - {"author": "jenny", "text": "can i get that in green?"} - ], - "other": {"bar": 14} - } -]; - -assert.docEq(u1.cursor.firstBatch, u1result, 'u1 failed'); - -// unwind an array at the end of a dotted path -db.ut.drop(); -db.ut.save({_id: 4, a: 1, b: {e: 7, f: [4, 3, 2, 1]}, c: 12, d: 17}); -var u2 = db.runCommand({aggregate: "ut", pipeline: [{$unwind: "$b.f"}], cursor: {}}); - -var u2result = [ - {"_id": 4, "a": 1, "b": {"e": 7, "f": 4}, "c": 12, "d": 17}, - {"_id": 4, "a": 1, "b": {"e": 7, "f": 3}, "c": 12, "d": 17}, - {"_id": 4, "a": 1, "b": {"e": 7, "f": 2}, "c": 12, "d": 17}, - {"_id": 4, "a": 1, "b": {"e": 7, "f": 1}, "c": 12, "d": 17} -]; - -assert.docEq(u2.cursor.firstBatch, u2result, 'u2 failed'); - -// combining a projection with unwinding an array -var p2 = db.runCommand({ - aggregate: "article", - pipeline: [{$project: {author: 1, tags: 1, pageViews: 1}}, {$unwind: "$tags"}], - cursor: {} -}); - -var p2result = [ - {"_id": 1, "author": "bob", "pageViews": 5, "tags": "fun"}, - {"_id": 1, "author": "bob", "pageViews": 5, "tags": "good"}, - {"_id": 1, "author": "bob", "pageViews": 5, "tags": "fun"}, - {"_id": 2, "author": "dave", "pageViews": 7, "tags": "fun"}, - {"_id": 2, "author": "dave", "pageViews": 7, "tags": "nasty"}, - {"_id": 3, "author": "jane", "pageViews": 6, "tags": "nasty"}, - {"_id": 3, "author": "jane", "pageViews": 6, "tags": "filthy"} -]; - -assert.docEq(p2.cursor.firstBatch, p2result, 'p2 failed'); - -// pulling values out of subdocuments -var p3 = db.runCommand({ - aggregate: "article", - pipeline: [{$project: {otherfoo: "$other.foo", otherbar: "$other.bar"}}], - cursor: {} -}); - -var p3result = [{"_id": 1, "otherfoo": 5}, {"_id": 2, "otherbar": 14}, {"_id": 3, "otherbar": 14}]; - -assert.docEq(p3.cursor.firstBatch, p3result, 'p3 failed'); - -// projection includes a computed value -var p4 = db.runCommand({ - aggregate: "article", - pipeline: [{$project: {author: 1, daveWroteIt: {$eq: ["$author", "dave"]}}}], - cursor: {} -}); - -var p4result = [ - {"_id": 1, "author": "bob", "daveWroteIt": false}, - {"_id": 2, "author": "dave", "daveWroteIt": true}, - {"_id": 3, "author": "jane", "daveWroteIt": false} -]; - -assert.docEq(p4.cursor.firstBatch, p4result, 'p4 failed'); - -// projection includes a virtual (fabricated) document -var p5 = db.runCommand({ - aggregate: "article", - pipeline: [ - {$project: {author: 1, pageViews: 1, tags: 1}}, - {$unwind: "$tags"}, - {$project: {author: 1, subDocument: {foo: "$pageViews", bar: "$tags"}}} - ], - cursor: {} -}); - -var p5result = [ - {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "fun"}}, - {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "good"}}, - {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "fun"}}, - {"_id": 2, "author": "dave", "subDocument": {"foo": 7, "bar": "fun"}}, - {"_id": 2, "author": "dave", "subDocument": {"foo": 7, "bar": "nasty"}}, - {"_id": 3, "author": "jane", "subDocument": {"foo": 6, "bar": "nasty"}}, - {"_id": 3, "author": "jane", "subDocument": {"foo": 6, "bar": "filthy"}} -]; - -assert.docEq(p5.cursor.firstBatch, p5result, 'p5 failed'); - -// multi-step aggregate -// nested expressions in computed fields -var p6 = db.runCommand({ - aggregate: "article", - pipeline: [ - {$project: {author: 1, tags: 1, pageViews: 1}}, - {$unwind: "$tags"}, +(function() { + "use strict"; + + // Loads data into the namespace 'aggdb.articles'. + load('jstests/aggregation/data/articles.js'); + load('jstests/aggregation/extras/utils.js'); + + const testDB = db.getSiblingDB("aggdb"); + + // just passing through fields + let p1 = testDB.runCommand({ + aggregate: "article", + pipeline: [{$project: {tags: 1, pageViews: 1}}, {$sort: {_id: 1}}], + cursor: {} + }); + + let p1result = [ + {"_id": 1, "pageViews": 5, "tags": ["fun", "good", "fun"]}, + {"_id": 2, "pageViews": 7, "tags": ["fun", "nasty"]}, + {"_id": 3, "pageViews": 6, "tags": ["nasty", "filthy"]} + ]; + + assert.docEq(p1.cursor.firstBatch, p1result, 'p1 failed'); + + // a simple array unwinding + let u1 = testDB.runCommand({aggregate: "article", pipeline: [{$unwind: "$tags"}], cursor: {}}); + + let u1result = [ + { + "_id": 1, + "title": "this is my title", + "author": "bob", + "posted": ISODate("2004-03-21T18:59:54Z"), + "pageViews": 5, + "tags": "fun", + "comments": + [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}], + "other": {"foo": 5} + }, + { + "_id": 1, + "title": "this is my title", + "author": "bob", + "posted": ISODate("2004-03-21T18:59:54Z"), + "pageViews": 5, + "tags": "good", + "comments": + [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}], + "other": {"foo": 5} + }, + { + "_id": 1, + "title": "this is my title", + "author": "bob", + "posted": ISODate("2004-03-21T18:59:54Z"), + "pageViews": 5, + "tags": "fun", + "comments": + [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}], + "other": {"foo": 5} + }, + { + "_id": 2, + "title": "this is your title", + "author": "dave", + "posted": ISODate("2030-08-08T04:11:10Z"), + "pageViews": 7, + "tags": "fun", + "comments": [ + {"author": "barbara", "text": "this is interesting"}, + {"author": "jenny", "text": "i like to play pinball", "votes": 10} + ], + "other": {"bar": 14} + }, + { + "_id": 2, + "title": "this is your title", + "author": "dave", + "posted": ISODate("2030-08-08T04:11:10Z"), + "pageViews": 7, + "tags": "nasty", + "comments": [ + {"author": "barbara", "text": "this is interesting"}, + {"author": "jenny", "text": "i like to play pinball", "votes": 10} + ], + "other": {"bar": 14} + }, { - $project: { - author: 1, - tag: "$tags", - pageViews: 1, - daveWroteIt: {$eq: ["$author", "dave"]}, - weLikeIt: {$or: [{$eq: ["$author", "dave"]}, {$eq: ["$tags", "good"]}]} - } + "_id": 3, + "title": "this is some other title", + "author": "jane", + "posted": ISODate("2000-12-31T05:17:14Z"), + "pageViews": 6, + "tags": "nasty", + "comments": [ + {"author": "will", "text": "i don't like the color"}, + {"author": "jenny", "text": "can i get that in green?"} + ], + "other": {"bar": 14} + }, + { + "_id": 3, + "title": "this is some other title", + "author": "jane", + "posted": ISODate("2000-12-31T05:17:14Z"), + "pageViews": 6, + "tags": "filthy", + "comments": [ + {"author": "will", "text": "i don't like the color"}, + {"author": "jenny", "text": "can i get that in green?"} + ], + "other": {"bar": 14} } - ], - cursor: {} -}); - -var p6result = [ - { - "_id": 1, - "author": "bob", - "pageViews": 5, - "tag": "fun", - "daveWroteIt": false, - "weLikeIt": false - }, - { - "_id": 1, - "author": "bob", - "pageViews": 5, - "tag": "good", - "daveWroteIt": false, - "weLikeIt": true - }, - { - "_id": 1, - "author": "bob", - "pageViews": 5, - "tag": "fun", - "daveWroteIt": false, - "weLikeIt": false - }, - { - "_id": 2, - "author": "dave", - "pageViews": 7, - "tag": "fun", - "daveWroteIt": true, - "weLikeIt": true - }, - { - "_id": 2, - "author": "dave", - "pageViews": 7, - "tag": "nasty", - "daveWroteIt": true, - "weLikeIt": true - }, - { - "_id": 3, - "author": "jane", - "pageViews": 6, - "tag": "nasty", - "daveWroteIt": false, - "weLikeIt": false - }, - { - "_id": 3, - "author": "jane", - "pageViews": 6, - "tag": "filthy", - "daveWroteIt": false, - "weLikeIt": false - } -]; - -assert.docEq(p6.cursor.firstBatch, p6result, 'p6 failed'); - -// slightly more complex computed expression; $ifNull -var p7 = db.runCommand({ - aggregate: "article", - pipeline: - [{$project: {theSum: {$add: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]}}}], - cursor: {} -}); - -var p7result = [{"_id": 1, "theSum": 10}, {"_id": 2, "theSum": 21}, {"_id": 3, "theSum": 20}]; - -assert.docEq(p7.cursor.firstBatch, p7result, 'p7 failed'); - -// dotted path inclusion; _id exclusion -var p8 = db.runCommand({ - aggregate: "article", - pipeline: [{$project: {_id: 0, author: 1, tags: 1, "comments.author": 1}}, {$unwind: "$tags"}], - cursor: {} -}); - -var p8result = [ - {"author": "bob", "tags": "fun", "comments": [{"author": "joe"}, {"author": "sam"}]}, - {"author": "bob", "tags": "good", "comments": [{"author": "joe"}, {"author": "sam"}]}, - {"author": "bob", "tags": "fun", "comments": [{"author": "joe"}, {"author": "sam"}]}, - {"author": "dave", "tags": "fun", "comments": [{"author": "barbara"}, {"author": "jenny"}]}, - {"author": "dave", "tags": "nasty", "comments": [{"author": "barbara"}, {"author": "jenny"}]}, - {"author": "jane", "tags": "nasty", "comments": [{"author": "will"}, {"author": "jenny"}]}, - {"author": "jane", "tags": "filthy", "comments": [{"author": "will"}, {"author": "jenny"}]} -]; - -assert.docEq(p8.cursor.firstBatch, p8result, 'p8 failed'); - -// collapse a dotted path with an intervening array -var p9 = db.runCommand({ - aggregate: "article", - pipeline: [{$project: {_id: 0, author: 1, commentsAuthor: "$comments.author"}}], - cursor: {} -}); - -var p9result = [ - {"author": "bob", "commentsAuthor": ["joe", "sam"]}, - {"author": "dave", "commentsAuthor": ["barbara", "jenny"]}, - {"author": "jane", "commentsAuthor": ["will", "jenny"]} -]; - -assert.docEq(p9.cursor.firstBatch, p9result, 'p9 failed'); - -// simple sort -var p10 = db.runCommand({aggregate: "article", pipeline: [{$sort: {title: 1}}], cursor: {}}); - -var p10result = [ - { - "_id": 1, - "title": "this is my title", - "author": "bob", - "posted": ISODate("2004-03-21T18:59:54Z"), - "pageViews": 5, - "tags": ["fun", "good", "fun"], - "comments": - [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}], - "other": {"foo": 5} - }, - { - "_id": 3, - "title": "this is some other title", - "author": "jane", - "posted": ISODate("2000-12-31T05:17:14Z"), - "pageViews": 6, - "tags": ["nasty", "filthy"], - "comments": [ - {"author": "will", "text": "i don't like the color"}, - {"author": "jenny", "text": "can i get that in green?"} - ], - "other": {"bar": 14} - }, - { - "_id": 2, - "title": "this is your title", - "author": "dave", - "posted": ISODate("2030-08-08T04:11:10Z"), - "pageViews": 7, - "tags": ["fun", "nasty"], - "comments": [ - {"author": "barbara", "text": "this is interesting"}, - {"author": "jenny", "text": "i like to play pinball", "votes": 10} - ], - "other": {"bar": 14} - } -]; - -assert.docEq(p10.cursor.firstBatch, p10result, 'p10 failed'); - -// unwind on nested array -db.p11.drop(); -db.p11.save({ - _id: 5, - name: 'MongoDB', - items: {authors: ['jay', 'vivek', 'bjornar'], dbg: [17, 42]}, - favorites: ['pickles', 'ice cream', 'kettle chips'] -}); - -var p11 = db.runCommand({ - aggregate: "p11", - pipeline: [ - {$unwind: "$items.authors"}, - {$project: {name: 1, author: "$items.authors"}}, - ], - cursor: {} -}); - -p11result = [ - {"_id": 5, "name": "MongoDB", "author": "jay"}, - {"_id": 5, "name": "MongoDB", "author": "vivek"}, - {"_id": 5, "name": "MongoDB", "author": "bjornar"} -]; - -assert.docEq(p11.cursor.firstBatch, p11result, 'p11 failed'); - -// multiply test -var p12 = db.runCommand({ - aggregate: "article", - pipeline: [{ - $project: - {theProduct: {$multiply: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]}} - }], - cursor: {} -}); - -var p12result = - [{"_id": 1, "theProduct": 25}, {"_id": 2, "theProduct": 98}, {"_id": 3, "theProduct": 84}]; - -assert.docEq(p12.cursor.firstBatch, p12result, 'p12 failed'); - -// subtraction test -var p13 = db.runCommand({ - aggregate: "article", - pipeline: [{ - $project: - {theDifference: {$subtract: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]}} - }], - cursor: {} -}); - -var p13result = [ - {"_id": 1, "theDifference": 0}, - {"_id": 2, "theDifference": -7}, - {"_id": 3, "theDifference": -8} -]; - -assert.docEq(p13.cursor.firstBatch, p13result, 'p13 failed'); - -// mod test -var p14 = db.runCommand({ - aggregate: "article", - pipeline: [{ - $project: { - theRemainder: { - $mod: [ - {$ifNull: ["$other.foo", "$other.bar"]}, - "$pageViews", - ] + ]; + + let firstBatch = u1.cursor.firstBatch; + assert(arrayEq(firstBatch, u1result), tojson({got: firstBatch, expected: u1result})); + + // unwind an array at the end of a dotted path + testDB.ut.drop(); + assert.writeOK(testDB.ut.insert({_id: 4, a: 1, b: {e: 7, f: [4, 3, 2, 1]}, c: 12, d: 17})); + let u2 = testDB.runCommand( + {aggregate: "ut", pipeline: [{$unwind: "$b.f"}, {$sort: {"b.f": -1}}], cursor: {}}); + + let u2result = [ + {"_id": 4, "a": 1, "b": {"e": 7, "f": 4}, "c": 12, "d": 17}, + {"_id": 4, "a": 1, "b": {"e": 7, "f": 3}, "c": 12, "d": 17}, + {"_id": 4, "a": 1, "b": {"e": 7, "f": 2}, "c": 12, "d": 17}, + {"_id": 4, "a": 1, "b": {"e": 7, "f": 1}, "c": 12, "d": 17} + ]; + + assert.docEq(u2.cursor.firstBatch, u2result, 'u2 failed'); + + // combining a projection with unwinding an array + let p2 = testDB.runCommand({ + aggregate: "article", + pipeline: [{$project: {author: 1, tags: 1, pageViews: 1}}, {$unwind: "$tags"}], + cursor: {} + }); + + let p2result = [ + {"_id": 1, "author": "bob", "pageViews": 5, "tags": "fun"}, + {"_id": 1, "author": "bob", "pageViews": 5, "tags": "good"}, + {"_id": 1, "author": "bob", "pageViews": 5, "tags": "fun"}, + {"_id": 2, "author": "dave", "pageViews": 7, "tags": "fun"}, + {"_id": 2, "author": "dave", "pageViews": 7, "tags": "nasty"}, + {"_id": 3, "author": "jane", "pageViews": 6, "tags": "nasty"}, + {"_id": 3, "author": "jane", "pageViews": 6, "tags": "filthy"} + ]; + + firstBatch = p2.cursor.firstBatch; + assert(arrayEq(firstBatch, p2result), tojson({got: firstBatch, expected: p2result})); + + // pulling values out of subdocuments + let p3 = testDB.runCommand({ + aggregate: "article", + pipeline: [{$project: {otherfoo: "$other.foo", otherbar: "$other.bar"}}, {$sort: {_id: 1}}], + cursor: {} + }); + + let p3result = + [{"_id": 1, "otherfoo": 5}, {"_id": 2, "otherbar": 14}, {"_id": 3, "otherbar": 14}]; + + assert.docEq(p3.cursor.firstBatch, p3result, 'p3 failed'); + + // projection includes a computed value + let p4 = testDB.runCommand({ + aggregate: "article", + pipeline: + [{$project: {author: 1, daveWroteIt: {$eq: ["$author", "dave"]}}}, {$sort: {_id: 1}}], + cursor: {} + }); + + let p4result = [ + {"_id": 1, "author": "bob", "daveWroteIt": false}, + {"_id": 2, "author": "dave", "daveWroteIt": true}, + {"_id": 3, "author": "jane", "daveWroteIt": false} + ]; + + assert.docEq(p4.cursor.firstBatch, p4result, 'p4 failed'); + + // projection includes a virtual (fabricated) document + let p5 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$project: {author: 1, pageViews: 1, tags: 1}}, + {$unwind: "$tags"}, + {$project: {author: 1, subDocument: {foo: "$pageViews", bar: "$tags"}}} + ], + cursor: {} + }); + + let p5result = [ + {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "fun"}}, + {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "good"}}, + {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "fun"}}, + {"_id": 2, "author": "dave", "subDocument": {"foo": 7, "bar": "fun"}}, + {"_id": 2, "author": "dave", "subDocument": {"foo": 7, "bar": "nasty"}}, + {"_id": 3, "author": "jane", "subDocument": {"foo": 6, "bar": "nasty"}}, + {"_id": 3, "author": "jane", "subDocument": {"foo": 6, "bar": "filthy"}} + ]; + + firstBatch = p5.cursor.firstBatch; + assert(arrayEq(firstBatch, p5result), tojson({got: firstBatch, expected: p5result})); + + // multi-step aggregate + // nested expressions in computed fields + let p6 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$project: {author: 1, tags: 1, pageViews: 1}}, + {$unwind: "$tags"}, + { + $project: { + author: 1, + tag: "$tags", + pageViews: 1, + daveWroteIt: {$eq: ["$author", "dave"]}, + weLikeIt: {$or: [{$eq: ["$author", "dave"]}, {$eq: ["$tags", "good"]}]} + } } + ], + cursor: {} + }); + + let p6result = [ + { + "_id": 1, + "author": "bob", + "pageViews": 5, + "tag": "fun", + "daveWroteIt": false, + "weLikeIt": false + }, + { + "_id": 1, + "author": "bob", + "pageViews": 5, + "tag": "good", + "daveWroteIt": false, + "weLikeIt": true + }, + { + "_id": 1, + "author": "bob", + "pageViews": 5, + "tag": "fun", + "daveWroteIt": false, + "weLikeIt": false + }, + { + "_id": 2, + "author": "dave", + "pageViews": 7, + "tag": "fun", + "daveWroteIt": true, + "weLikeIt": true + }, + { + "_id": 2, + "author": "dave", + "pageViews": 7, + "tag": "nasty", + "daveWroteIt": true, + "weLikeIt": true + }, + { + "_id": 3, + "author": "jane", + "pageViews": 6, + "tag": "nasty", + "daveWroteIt": false, + "weLikeIt": false + }, + { + "_id": 3, + "author": "jane", + "pageViews": 6, + "tag": "filthy", + "daveWroteIt": false, + "weLikeIt": false } - }], - cursor: {} -}); - -var p14result = - [{"_id": 1, "theRemainder": 0}, {"_id": 2, "theRemainder": 0}, {"_id": 3, "theRemainder": 2}]; - -assert.docEq(p14.cursor.firstBatch, p14result, 'p14 failed'); - -// toUpper test -var p15 = db.runCommand({ - aggregate: "article", - pipeline: [{$project: {author: {$toUpper: "$author"}, pageViews: 1}}], - cursor: {} -}); - -var p15result = [ - {"_id": 1, "author": "BOB", "pageViews": 5}, - {"_id": 2, "author": "DAVE", "pageViews": 7}, - {"_id": 3, "author": "JANE", "pageViews": 6} -]; - -assert.docEq(p15.cursor.firstBatch, p15result, 'p15 failed'); - -// toLower test -var p16 = db.runCommand({ - aggregate: "article", - pipeline: [ - {$project: {author: {$toUpper: "$author"}, pageViews: 1}}, - {$project: {author: {$toLower: "$author"}, pageViews: 1}} - ], - cursor: {} -}); - -var p16result = [ - { - "_id": 1, - "author": "bob", - "pageViews": 5, - }, - { - "_id": 2, - "author": "dave", - "pageViews": 7, - }, - { - "_id": 3, - "author": "jane", - "pageViews": 6, - } -]; - -assert.docEq(p16.cursor.firstBatch, p16result, 'p16 failed'); - -// substr test -var p17 = db.runCommand({ - aggregate: "article", - pipeline: [{ - $project: { - author: {$substrBytes: ["$author", 1, 2]}, - } - }], - cursor: {} -}); - -var p17result = - [{"_id": 1, "author": "ob"}, {"_id": 2, "author": "av"}, {"_id": 3, "author": "an"}]; - -assert.docEq(p17.cursor.firstBatch, p17result, 'p17 failed'); - -// strcasecmp test -var p18 = db.runCommand({ - aggregate: "article", - pipeline: [{ - $project: { - tags: 1, - thisisalametest: {$strcasecmp: ["foo", "bar"]}, - thisisalamepass: {$strcasecmp: ["foo", "foo"]} - } - }], - cursor: {} -}); - -var p18result = [ - {"_id": 1, "tags": ["fun", "good", "fun"], "thisisalametest": 1, "thisisalamepass": 0}, - {"_id": 2, "tags": ["fun", "nasty"], "thisisalametest": 1, "thisisalamepass": 0}, - {"_id": 3, "tags": ["nasty", "filthy"], "thisisalametest": 1, "thisisalamepass": 0} -]; - -assert.docEq(p18.cursor.firstBatch, p18result, 'p18 failed'); - -// date tests -var p19 = db.runCommand({ - aggregate: "article", - pipeline: [{ - $project: { - authors: 1, - posted: 1, - seconds: {$second: "$posted"}, - minutes: {$minute: "$posted"}, - hour: {$hour: "$posted"}, - dayOfYear: {$dayOfYear: "$posted"}, - dayOfMonth: {$dayOfMonth: "$posted"}, - dayOfWeek: {$dayOfWeek: "$posted"}, - month: {$month: "$posted"}, - week: {$week: "$posted"}, - year: {$year: "$posted"} + ]; + + firstBatch = p6.cursor.firstBatch; + assert(arrayEq(firstBatch, p6result), tojson({got: firstBatch, expected: p6result})); + + // slightly more complex computed expression; $ifNull + let p7 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$project: {theSum: {$add: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]}}}, + {$sort: {_id: 1}} + ], + cursor: {} + }); + + let p7result = [{"_id": 1, "theSum": 10}, {"_id": 2, "theSum": 21}, {"_id": 3, "theSum": 20}]; + + assert.docEq(p7.cursor.firstBatch, p7result, 'p7 failed'); + + // dotted path inclusion; _id exclusion + let p8 = testDB.runCommand({ + aggregate: "article", + pipeline: + [{$project: {_id: 0, author: 1, tags: 1, "comments.author": 1}}, {$unwind: "$tags"}], + cursor: {} + }); + + let p8result = [ + {"author": "bob", "tags": "fun", "comments": [{"author": "joe"}, {"author": "sam"}]}, + {"author": "bob", "tags": "good", "comments": [{"author": "joe"}, {"author": "sam"}]}, + {"author": "bob", "tags": "fun", "comments": [{"author": "joe"}, {"author": "sam"}]}, + {"author": "dave", "tags": "fun", "comments": [{"author": "barbara"}, {"author": "jenny"}]}, + { + "author": "dave", + "tags": "nasty", + "comments": [{"author": "barbara"}, {"author": "jenny"}] + }, + {"author": "jane", "tags": "nasty", "comments": [{"author": "will"}, {"author": "jenny"}]}, + { + "author": "jane", + "tags": "filthy", + "comments": [{"author": "will"}, {"author": "jenny"}] } - }], - cursor: {} -}); - -var p19result = [ - { - "_id": 1, - "posted": ISODate("2004-03-21T18:59:54Z"), - "seconds": 54, - "minutes": 59, - "hour": 18, - "dayOfYear": 81, - "dayOfMonth": 21, - "dayOfWeek": 1, - "month": 3, - "week": 12, - "year": 2004, - }, - { - "_id": 2, - "posted": ISODate("2030-08-08T04:11:10Z"), - "seconds": 10, - "minutes": 11, - "hour": 4, - "dayOfYear": 220, - "dayOfMonth": 8, - "dayOfWeek": 5, - "month": 8, - "week": 31, - "year": 2030, - }, - { - "_id": 3, - "posted": ISODate("2000-12-31T05:17:14Z"), - "seconds": 14, - "minutes": 17, - "hour": 5, - "dayOfYear": 366, - "dayOfMonth": 31, - "dayOfWeek": 1, - "month": 12, - "week": 53, - "year": 2000, - } -]; - -assert.docEq(p19.cursor.firstBatch, p19result, 'p19 failed'); - -db.vartype.drop(); -db.vartype.save({x: 17, y: "foo"}); - -// ternary conditional operator -var p21 = db.runCommand({ - aggregate: "article", - pipeline: [{ - $project: { - _id: 0, - author: 1, - pageViews: { - $cond: [{$eq: ["$author", "dave"]}, {$add: ["$pageViews", 1000]}, "$pageViews"] - } + ]; + + firstBatch = p8.cursor.firstBatch; + assert(arrayEq(firstBatch, p8result), tojson({got: firstBatch, expected: p8result})); + + // collapse a dotted path with an intervening array + let p9 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$project: {_id: 0, author: 1, commentsAuthor: "$comments.author"}}, + {$sort: {author: 1}} + ], + cursor: {} + }); + + let p9result = [ + {"author": "bob", "commentsAuthor": ["joe", "sam"]}, + {"author": "dave", "commentsAuthor": ["barbara", "jenny"]}, + {"author": "jane", "commentsAuthor": ["will", "jenny"]} + ]; + + assert.docEq(p9.cursor.firstBatch, p9result, 'p9 failed'); + + // simple sort + let p10 = + testDB.runCommand({aggregate: "article", pipeline: [{$sort: {title: 1}}], cursor: {}}); + + let p10result = [ + { + "_id": 1, + "title": "this is my title", + "author": "bob", + "posted": ISODate("2004-03-21T18:59:54Z"), + "pageViews": 5, + "tags": ["fun", "good", "fun"], + "comments": + [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}], + "other": {"foo": 5} + }, + { + "_id": 3, + "title": "this is some other title", + "author": "jane", + "posted": ISODate("2000-12-31T05:17:14Z"), + "pageViews": 6, + "tags": ["nasty", "filthy"], + "comments": [ + {"author": "will", "text": "i don't like the color"}, + {"author": "jenny", "text": "can i get that in green?"} + ], + "other": {"bar": 14} + }, + { + "_id": 2, + "title": "this is your title", + "author": "dave", + "posted": ISODate("2030-08-08T04:11:10Z"), + "pageViews": 7, + "tags": ["fun", "nasty"], + "comments": [ + {"author": "barbara", "text": "this is interesting"}, + {"author": "jenny", "text": "i like to play pinball", "votes": 10} + ], + "other": {"bar": 14} } - }], - cursor: {} -}); - -var p21result = [ - {"author": "bob", "pageViews": 5}, - {"author": "dave", "pageViews": 1007}, - {"author": "jane", "pageViews": 6} -]; - -assert.docEq(p21.cursor.firstBatch, p21result, 'p21 failed'); - -// simple matching -var m1 = db.runCommand({aggregate: "article", pipeline: [{$match: {author: "dave"}}], cursor: {}}); - -var m1result = [{ - "_id": 2, - "title": "this is your title", - "author": "dave", - "posted": ISODate("2030-08-08T04:11:10Z"), - "pageViews": 7, - "tags": ["fun", "nasty"], - "comments": [ - {"author": "barbara", "text": "this is interesting"}, - {"author": "jenny", "text": "i like to play pinball", "votes": 10} - ], - "other": {"bar": 14} -}]; - -assert.docEq(m1.cursor.firstBatch, m1result, 'm1 failed'); - -// combining matching with a projection -var m2 = db.runCommand({ - aggregate: "article", - pipeline: [ - {$project: {title: 1, author: 1, pageViews: 1, tags: 1, comments: 1}}, - {$unwind: "$tags"}, - {$match: {tags: "nasty"}} - ], - cursor: {} -}); - -var m2result = [ - { - "_id": 2, - "title": "this is your title", - "author": "dave", - "pageViews": 7, - "tags": "nasty", - "comments": [ - {"author": "barbara", "text": "this is interesting"}, - {"author": "jenny", "text": "i like to play pinball", "votes": 10} - ] - }, - { - "_id": 3, - "title": "this is some other title", - "author": "jane", - "pageViews": 6, - "tags": "nasty", - "comments": [ - {"author": "will", "text": "i don't like the color"}, - {"author": "jenny", "text": "can i get that in green?"} - ] - } -]; - -assert.docEq(m2.cursor.firstBatch, m2result, 'm2 failed'); - -// group by tag, _id is a field reference -var g1 = db.runCommand({ - aggregate: "article", - pipeline: [ - {$project: {author: 1, tags: 1, pageViews: 1}}, - {$unwind: "$tags"}, - {$group: {_id: "$tags", docsByTag: {$sum: 1}, viewsByTag: {$sum: "$pageViews"}}}, - {$sort: {'_id': 1}} - ], - cursor: {} -}); - -var g1result = [ - {"_id": "filthy", "docsByTag": 1, "viewsByTag": 6}, - {"_id": "fun", "docsByTag": 3, "viewsByTag": 17}, - {"_id": "good", "docsByTag": 1, "viewsByTag": 5}, - {"_id": "nasty", "docsByTag": 2, "viewsByTag": 13}, -]; - -assert.docEq(g1.cursor.firstBatch, g1result, 'g1 failed'); - -// $max, and averaging in a final projection; _id is structured -var g2 = db.runCommand({ - aggregate: "article", - pipeline: [ - {$project: {author: 1, tags: 1, pageViews: 1}}, - {$unwind: "$tags"}, + ]; + + assert.docEq(p10.cursor.firstBatch, p10result, 'p10 failed'); + + // unwind on nested array + testDB.p11.drop(); + testDB.p11.save({ + _id: 5, + name: 'MongoDB', + items: {authors: ['jay', 'vivek', 'bjornar'], dbg: [17, 42]}, + favorites: ['pickles', 'ice cream', 'kettle chips'] + }); + + let p11 = testDB.runCommand({ + aggregate: "p11", + pipeline: [ + {$unwind: "$items.authors"}, + {$project: {name: 1, author: "$items.authors"}}, + {$sort: {author: 1}} + + ], + cursor: {} + }); + + let p11result = [ + {"_id": 5, "name": "MongoDB", "author": "bjornar"}, + {"_id": 5, "name": "MongoDB", "author": "jay"}, + {"_id": 5, "name": "MongoDB", "author": "vivek"}, + ]; + + assert.docEq(p11.cursor.firstBatch, p11result, 'p11 failed'); + + // multiply test + let p12 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + { + $project: { + theProduct: {$multiply: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]} + }, + }, + {$sort: {_id: 1}} + ], + cursor: {} + }); + + let p12result = + [{"_id": 1, "theProduct": 25}, {"_id": 2, "theProduct": 98}, {"_id": 3, "theProduct": 84}]; + + assert.docEq(p12.cursor.firstBatch, p12result, 'p12 failed'); + + // subtraction test + let p13 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + { + $project: { + theDifference: + {$subtract: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]} + } + }, + {$sort: {_id: 1}} + ], + cursor: {} + }); + + let p13result = [ + {"_id": 1, "theDifference": 0}, + {"_id": 2, "theDifference": -7}, + {"_id": 3, "theDifference": -8} + ]; + + assert.docEq(p13.cursor.firstBatch, p13result, 'p13 failed'); + + // mod test + let p14 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + { + $project: { + theRemainder: { + $mod: [ + {$ifNull: ["$other.foo", "$other.bar"]}, + "$pageViews", + ] + } + } + }, + {$sort: {_id: 1}} + ], + cursor: {} + }); + + let p14result = [ + {"_id": 1, "theRemainder": 0}, + {"_id": 2, "theRemainder": 0}, + {"_id": 3, "theRemainder": 2} + ]; + + assert.docEq(p14.cursor.firstBatch, p14result, 'p14 failed'); + + // toUpper test + let p15 = testDB.runCommand({ + aggregate: "article", + pipeline: [{$project: {author: {$toUpper: "$author"}, pageViews: 1}}, {$sort: {_id: 1}}], + cursor: {} + }); + + let p15result = [ + {"_id": 1, "author": "BOB", "pageViews": 5}, + {"_id": 2, "author": "DAVE", "pageViews": 7}, + {"_id": 3, "author": "JANE", "pageViews": 6} + ]; + + assert.docEq(p15.cursor.firstBatch, p15result, 'p15 failed'); + + // toLower test + let p16 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$project: {author: {$toUpper: "$author"}, pageViews: 1}}, + {$project: {author: {$toLower: "$author"}, pageViews: 1}}, + {$sort: {_id: 1}} + ], + cursor: {} + }); + + let p16result = [ { - $group: { - _id: {tags: "$tags"}, - docsByTag: {$sum: 1}, - viewsByTag: {$sum: "$pageViews"}, - mostViewsByTag: {$max: "$pageViews"}, - } + "_id": 1, + "author": "bob", + "pageViews": 5, }, { - $project: { - _id: false, - tag: "$_id.tags", - mostViewsByTag: 1, - docsByTag: 1, - viewsByTag: 1, - avgByTag: {$divide: ["$viewsByTag", "$docsByTag"]} - } + "_id": 2, + "author": "dave", + "pageViews": 7, }, - {$sort: {'docsByTag': 1, 'viewsByTag': 1}} - ], - cursor: {} -}); - -var g2result = [ - {"docsByTag": 1, "viewsByTag": 5, "mostViewsByTag": 5, "tag": "good", "avgByTag": 5}, - {"docsByTag": 1, "viewsByTag": 6, "mostViewsByTag": 6, "tag": "filthy", "avgByTag": 6}, - {"docsByTag": 2, "viewsByTag": 13, "mostViewsByTag": 7, "tag": "nasty", "avgByTag": 6.5}, - { - "docsByTag": 3, - "viewsByTag": 17, - "mostViewsByTag": 7, - "tag": "fun", - "avgByTag": 5.666666666666667 - } -]; - -assert.docEq(g2.cursor.firstBatch, g2result, 'g2 failed'); - -// $push as an accumulator; can pivot data -var g3 = db.runCommand({ - aggregate: "article", - pipeline: [ { - $project: { - author: 1, - tags: 1, - } + "_id": 3, + "author": "jane", + "pageViews": 6, + } + ]; + + assert.docEq(p16.cursor.firstBatch, p16result, 'p16 failed'); + + // substr test + let p17 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + { + $project: { + author: {$substrBytes: ["$author", 1, 2]}, + } + }, + {$sort: {_id: 1}} + ], + cursor: {} + }); + + let p17result = + [{"_id": 1, "author": "ob"}, {"_id": 2, "author": "av"}, {"_id": 3, "author": "an"}]; + + assert.docEq(p17.cursor.firstBatch, p17result, 'p17 failed'); + + // strcasecmp test + let p18 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + { + $project: { + tags: 1, + thisisalametest: {$strcasecmp: ["foo", "bar"]}, + thisisalamepass: {$strcasecmp: ["foo", "foo"]} + } + }, + {$sort: {_id: 1}} + ], + cursor: {} + }); + + let p18result = [ + {"_id": 1, "tags": ["fun", "good", "fun"], "thisisalametest": 1, "thisisalamepass": 0}, + {"_id": 2, "tags": ["fun", "nasty"], "thisisalametest": 1, "thisisalamepass": 0}, + {"_id": 3, "tags": ["nasty", "filthy"], "thisisalametest": 1, "thisisalamepass": 0} + ]; + + assert.docEq(p18.cursor.firstBatch, p18result, 'p18 failed'); + + // date tests + let p19 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + { + $project: { + authors: 1, + posted: 1, + seconds: {$second: "$posted"}, + minutes: {$minute: "$posted"}, + hour: {$hour: "$posted"}, + dayOfYear: {$dayOfYear: "$posted"}, + dayOfMonth: {$dayOfMonth: "$posted"}, + dayOfWeek: {$dayOfWeek: "$posted"}, + month: {$month: "$posted"}, + week: {$week: "$posted"}, + year: {$year: "$posted"} + } + }, + {$sort: {_id: 1}} + ], + cursor: {} + }); + + let p19result = [ + { + "_id": 1, + "posted": ISODate("2004-03-21T18:59:54Z"), + "seconds": 54, + "minutes": 59, + "hour": 18, + "dayOfYear": 81, + "dayOfMonth": 21, + "dayOfWeek": 1, + "month": 3, + "week": 12, + "year": 2004, }, - {$unwind: "$tags"}, - {$group: {_id: {tags: "$tags"}, authors: {$push: "$author"}}}, - {$sort: {'_id': 1}} - ], - cursor: {} -}); - -var g3result = [ - {"_id": {"tags": "filthy"}, "authors": ["jane"]}, - {"_id": {"tags": "fun"}, "authors": ["bob", "bob", "dave"]}, - {"_id": {"tags": "good"}, "authors": ["bob"]}, - {"_id": {"tags": "nasty"}, "authors": ["dave", "jane"]} -]; - -assert.docEq(g3.cursor.firstBatch, g3result, 'g3 failed'); - -// $avg, and averaging in a final projection -var g4 = db.runCommand({ - aggregate: "article", - pipeline: [ - {$project: {author: 1, tags: 1, pageViews: 1}}, - {$unwind: "$tags"}, { - $group: { - _id: {tags: "$tags"}, - docsByTag: {$sum: 1}, - viewsByTag: {$sum: "$pageViews"}, - avgByTag: {$avg: "$pageViews"}, - } + "_id": 2, + "posted": ISODate("2030-08-08T04:11:10Z"), + "seconds": 10, + "minutes": 11, + "hour": 4, + "dayOfYear": 220, + "dayOfMonth": 8, + "dayOfWeek": 5, + "month": 8, + "week": 31, + "year": 2030, }, - {$sort: {'_id': 1}} - ], - cursor: {} -}); - -var g4result = [ - {"_id": {"tags": "filthy"}, "docsByTag": 1, "viewsByTag": 6, "avgByTag": 6}, - {"_id": {"tags": "fun"}, "docsByTag": 3, "viewsByTag": 17, "avgByTag": 5.666666666666667}, - {"_id": {"tags": "good"}, "docsByTag": 1, "viewsByTag": 5, "avgByTag": 5}, - {"_id": {"tags": "nasty"}, "docsByTag": 2, "viewsByTag": 13, "avgByTag": 6.5} -]; - -assert.docEq(g4.cursor.firstBatch, g4result, 'g4 failed'); - -// $addToSet as an accumulator; can pivot data -var g5 = db.runCommand({ - aggregate: "article", - pipeline: [ { - $project: { - author: 1, - tags: 1, - } + "_id": 3, + "posted": ISODate("2000-12-31T05:17:14Z"), + "seconds": 14, + "minutes": 17, + "hour": 5, + "dayOfYear": 366, + "dayOfMonth": 31, + "dayOfWeek": 1, + "month": 12, + "week": 53, + "year": 2000, + } + ]; + + assert.docEq(p19.cursor.firstBatch, p19result, 'p19 failed'); + + testDB.lettype.drop(); + testDB.lettype.save({x: 17, y: "foo"}); + + // ternary conditional operator + let p21 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + { + $project: { + _id: 0, + author: 1, + pageViews: { + $cond: [ + {$eq: ["$author", "dave"]}, + {$add: ["$pageViews", 1000]}, + "$pageViews" + ] + } + } + }, + {$sort: {author: 1}} + ], + cursor: {} + }); + + let p21result = [ + {"author": "bob", "pageViews": 5}, + {"author": "dave", "pageViews": 1007}, + {"author": "jane", "pageViews": 6} + ]; + + assert.docEq(p21.cursor.firstBatch, p21result, 'p21 failed'); + + // simple matching + let m1 = testDB.runCommand( + {aggregate: "article", pipeline: [{$match: {author: "dave"}}], cursor: {}}); + + let m1result = [{ + "_id": 2, + "title": "this is your title", + "author": "dave", + "posted": ISODate("2030-08-08T04:11:10Z"), + "pageViews": 7, + "tags": ["fun", "nasty"], + "comments": [ + {"author": "barbara", "text": "this is interesting"}, + {"author": "jenny", "text": "i like to play pinball", "votes": 10} + ], + "other": {"bar": 14} + }]; + + assert.docEq(m1.cursor.firstBatch, m1result, 'm1 failed'); + + // combining matching with a projection + let m2 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$project: {title: 1, author: 1, pageViews: 1, tags: 1, comments: 1}}, + {$unwind: "$tags"}, + {$match: {tags: "nasty"}}, + {$sort: {_id: 1}} + ], + cursor: {} + }); + + let m2result = [ + { + "_id": 2, + "title": "this is your title", + "author": "dave", + "pageViews": 7, + "tags": "nasty", + "comments": [ + {"author": "barbara", "text": "this is interesting"}, + {"author": "jenny", "text": "i like to play pinball", "votes": 10} + ] }, - {$unwind: "$tags"}, - {$group: {_id: {tags: "$tags"}, authors: {$addToSet: "$author"}}}, - {$sort: {'_id': 1}} - ], - cursor: {} -}); - -// $addToSet doesn't guarantee order so we shouldn't test for it. -g5.cursor.firstBatch.forEach(function(obj) { - obj.authors.sort(); -}); - -var g5result = [ - {"_id": {"tags": "filthy"}, "authors": ["jane"]}, - { - "_id": {"tags": "fun"}, - "authors": [ - "bob", - "dave", - ] - }, - {"_id": {"tags": "good"}, "authors": ["bob"]}, - { - "_id": {"tags": "nasty"}, - "authors": [ - "dave", - "jane", - ] - } -]; - -assert.docEq(g5.cursor.firstBatch, g5result, 'g5 failed'); - -// $first and $last accumulators, constant _id -var g6 = db.runCommand({ - aggregate: "article", - pipeline: [ - {$sort: {author: -1}}, { - $group: { - _id: "authors", /* constant string, *not* a field reference */ - firstAuthor: {$last: "$author"}, /* note reverse sort above */ - lastAuthor: {$first: "$author"}, /* note reverse sort above */ - count: {$sum: 1} - } + "_id": 3, + "title": "this is some other title", + "author": "jane", + "pageViews": 6, + "tags": "nasty", + "comments": [ + {"author": "will", "text": "i don't like the color"}, + {"author": "jenny", "text": "can i get that in green?"} + ] } - ], - cursor: {} -}); - -var g6result = [{"_id": "authors", firstAuthor: "bob", lastAuthor: "jane", count: 3}]; - -// Test unwind on an unused field -var g7 = db.runCommand({ - aggregate: "article", - pipeline: [ - {$unwind: '$tags'}, + ]; + + assert.docEq(m2.cursor.firstBatch, m2result, 'm2 failed'); + + // group by tag, _id is a field reference + let g1 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$project: {author: 1, tags: 1, pageViews: 1}}, + {$unwind: "$tags"}, + {$group: {_id: "$tags", docsByTag: {$sum: 1}, viewsByTag: {$sum: "$pageViews"}}}, + {$sort: {'_id': 1}} + ], + cursor: {} + }); + + let g1result = [ + {"_id": "filthy", "docsByTag": 1, "viewsByTag": 6}, + {"_id": "fun", "docsByTag": 3, "viewsByTag": 17}, + {"_id": "good", "docsByTag": 1, "viewsByTag": 5}, + {"_id": "nasty", "docsByTag": 2, "viewsByTag": 13}, + ]; + + assert.docEq(g1.cursor.firstBatch, g1result, 'g1 failed'); + + // $max, and averaging in a final projection; _id is structured + let g2 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$project: {author: 1, tags: 1, pageViews: 1}}, + {$unwind: "$tags"}, + { + $group: { + _id: {tags: "$tags"}, + docsByTag: {$sum: 1}, + viewsByTag: {$sum: "$pageViews"}, + mostViewsByTag: {$max: "$pageViews"}, + } + }, + { + $project: { + _id: false, + tag: "$_id.tags", + mostViewsByTag: 1, + docsByTag: 1, + viewsByTag: 1, + avgByTag: {$divide: ["$viewsByTag", "$docsByTag"]} + } + }, + {$sort: {'docsByTag': 1, 'viewsByTag': 1}} + ], + cursor: {} + }); + + let g2result = [ + {"docsByTag": 1, "viewsByTag": 5, "mostViewsByTag": 5, "tag": "good", "avgByTag": 5}, + {"docsByTag": 1, "viewsByTag": 6, "mostViewsByTag": 6, "tag": "filthy", "avgByTag": 6}, + {"docsByTag": 2, "viewsByTag": 13, "mostViewsByTag": 7, "tag": "nasty", "avgByTag": 6.5}, { - $group: { - _id: "tag_count", /* constant string, *not* a field reference */ - count: {$sum: 1} - } + "docsByTag": 3, + "viewsByTag": 17, + "mostViewsByTag": 7, + "tag": "fun", + "avgByTag": 5.666666666666667 } - ], - cursor: {} -}); -assert.eq(g7.cursor.firstBatch[0].count, 7); + ]; + + assert.docEq(g2.cursor.firstBatch, g2result, 'g2 failed'); + + // $push as an accumulator; can pivot data + let g3 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + { + $project: { + author: 1, + tags: 1, + } + }, + {$unwind: "$tags"}, + {$sort: {author: 1}}, + {$group: {_id: {tags: "$tags"}, authors: {$push: "$author"}}}, + {$sort: {'_id': 1}} + ], + cursor: {} + }); + + let g3result = [ + {"_id": {"tags": "filthy"}, "authors": ["jane"]}, + {"_id": {"tags": "fun"}, "authors": ["bob", "bob", "dave"]}, + {"_id": {"tags": "good"}, "authors": ["bob"]}, + {"_id": {"tags": "nasty"}, "authors": ["dave", "jane"]} + ]; + + assert.docEq(g3.cursor.firstBatch, g3result, 'g3 failed'); + + // $avg, and averaging in a final projection + let g4 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$project: {author: 1, tags: 1, pageViews: 1}}, + {$unwind: "$tags"}, + { + $group: { + _id: {tags: "$tags"}, + docsByTag: {$sum: 1}, + viewsByTag: {$sum: "$pageViews"}, + avgByTag: {$avg: "$pageViews"}, + } + }, + {$sort: {'_id': 1}} + ], + cursor: {} + }); + + let g4result = [ + {"_id": {"tags": "filthy"}, "docsByTag": 1, "viewsByTag": 6, "avgByTag": 6}, + {"_id": {"tags": "fun"}, "docsByTag": 3, "viewsByTag": 17, "avgByTag": 5.666666666666667}, + {"_id": {"tags": "good"}, "docsByTag": 1, "viewsByTag": 5, "avgByTag": 5}, + {"_id": {"tags": "nasty"}, "docsByTag": 2, "viewsByTag": 13, "avgByTag": 6.5} + ]; + + assert.docEq(g4.cursor.firstBatch, g4result, 'g4 failed'); + + // $addToSet as an accumulator; can pivot data + let g5 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + { + $project: { + author: 1, + tags: 1, + } + }, + {$unwind: "$tags"}, + {$group: {_id: {tags: "$tags"}, authors: {$addToSet: "$author"}}}, + {$sort: {'_id': 1}} + ], + cursor: {} + }); + + // $addToSet doesn't guarantee order so we shouldn't test for it. + g5.cursor.firstBatch.forEach(function(obj) { + obj.authors.sort(); + }); + + let g5result = [ + {"_id": {"tags": "filthy"}, "authors": ["jane"]}, + { + "_id": {"tags": "fun"}, + "authors": [ + "bob", + "dave", + ] + }, + {"_id": {"tags": "good"}, "authors": ["bob"]}, + { + "_id": {"tags": "nasty"}, + "authors": [ + "dave", + "jane", + ] + } + ]; + + assert.docEq(g5.cursor.firstBatch, g5result, 'g5 failed'); + + // $first and $last accumulators, constant _id + let g6 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$sort: {author: -1}}, + { + $group: { + _id: "authors", /* constant string, *not* a field reference */ + firstAuthor: {$last: "$author"}, /* note reverse sort above */ + lastAuthor: {$first: "$author"}, /* note reverse sort above */ + count: {$sum: 1} + } + } + ], + cursor: {} + }); + + let g6result = [{"_id": "authors", firstAuthor: "bob", lastAuthor: "jane", count: 3}]; + + // Test unwind on an unused field + let g7 = testDB.runCommand({ + aggregate: "article", + pipeline: [ + {$unwind: '$tags'}, + { + $group: { + _id: "tag_count", /* constant string, *not* a field reference */ + count: {$sum: 1} + } + } + ], + cursor: {} + }); + assert.eq(g7.cursor.firstBatch[0].count, 7); +}()); |