summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMathias Stearn <mathias@10gen.com>2012-06-21 18:10:49 -0400
committerMathias Stearn <mathias@10gen.com>2012-06-29 13:41:23 -0400
commit22ec5a0e0fc388b2226d51b41b1657367f5f1438 (patch)
treec224622799baf97fb4d417b3e7dead14c27e5912
parenteb7e375dd3c6331f0cc30a42fb6cb86d557b68ac (diff)
downloadmongo-22ec5a0e0fc388b2226d51b41b1657367f5f1438.tar.gz
Convert to unix line endings
Sorry about the huge diff, but it is just line-ending changes so you can skip it
-rw-r--r--jstests/aggregation/testall.js3198
-rwxr-xr-xsrc/mongo/db/commands/document_source_cursor.cpp372
-rwxr-xr-xsrc/mongo/db/commands/pipeline.cpp976
-rwxr-xr-xsrc/mongo/db/commands/pipeline_command.cpp602
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator.cpp158
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator.h516
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_add_to_set.cpp158
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_avg.cpp246
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_first.cpp98
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_last.cpp96
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_min_max.cpp134
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_push.cpp146
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_single_value.cpp64
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_sum.cpp148
-rwxr-xr-xsrc/mongo/db/pipeline/builder.cpp234
-rwxr-xr-xsrc/mongo/db/pipeline/builder.h190
-rwxr-xr-xsrc/mongo/db/pipeline/doc_mem_monitor.cpp136
-rwxr-xr-xsrc/mongo/db/pipeline/doc_mem_monitor.h188
-rwxr-xr-xsrc/mongo/db/pipeline/document.cpp454
-rwxr-xr-xsrc/mongo/db/pipeline/document_source.cpp172
-rwxr-xr-xsrc/mongo/db/pipeline/document_source.h2528
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_bson_array.cpp186
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_command_futures.cpp274
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_filter.cpp210
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_filter_base.cpp178
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_group.cpp796
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_match.cpp188
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_out.cpp134
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_project.cpp578
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_sort.cpp452
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_unwind.cpp498
-rwxr-xr-xsrc/mongo/db/pipeline/expression.h2764
-rwxr-xr-xsrc/mongo/db/pipeline/expression_context.cpp96
-rwxr-xr-xsrc/mongo/db/pipeline/expression_context.h156
-rwxr-xr-xsrc/mongo/db/pipeline/field_path.cpp180
-rwxr-xr-xsrc/mongo/db/pipeline/field_path.h218
-rwxr-xr-xsrc/mongo/db/pipeline/value.h938
-rwxr-xr-xsrc/mongo/util/intrusive_counter.cpp60
-rwxr-xr-xsrc/mongo/util/intrusive_counter.h158
-rwxr-xr-xsrc/mongo/util/string_writer.h56
40 files changed, 9467 insertions, 9467 deletions
diff --git a/jstests/aggregation/testall.js b/jstests/aggregation/testall.js
index bdf3c72e613..f503e179982 100644
--- a/jstests/aggregation/testall.js
+++ b/jstests/aggregation/testall.js
@@ -1,1599 +1,1599 @@
-/*
- Run all the aggregation tests
-*/
-
-/* load the test documents */
-load('jstests/aggregation/data/articles.js');
-
-/* load the test utilities */
-load('jstests/aggregation/extras/utils.js');
-
-// make sure we're using the right db; this is the same as "use mydb;" in shell
-db = db.getSiblingDB("aggdb");
-
-
-// just passing through fields
-var p1 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- tags : 1,
- pageViews : 1
- }}
-]});
-
-var p1result = [
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "pageViews" : 5,
- "tags" : [
- "fun",
- "good",
- "fun"
- ]
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
- "pageViews" : 7,
- "tags" : [
- "fun",
- "nasty"
- ]
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
- "pageViews" : 6,
- "tags" : [
- "nasty",
- "filthy"
- ]
- }
-];
-
-assert(arrayEq(p1.result, p1result), 'p1 failed');
-
-
-// a simple array unwinding
-var u1 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $unwind : "$tags" }
-]});
-
-var u1result = [
- {
- "_id" : ObjectId("4e7bdfa4909a512bf221a8fe"),
- "title" : "this is my title",
- "author" : "bob",
- "posted" : ISODate("2004-03-21T18:59:54Z"),
- "pageViews" : 5,
- "tags" : "fun",
- "comments" : [
- {
- "author" : "joe",
- "text" : "this is cool"
- },
- {
- "author" : "sam",
- "text" : "this is bad"
- }
- ],
- "other" : {
- "foo" : 5
- }
- },
- {
- "_id" : ObjectId("4e7bdfa4909a512bf221a8fe"),
- "title" : "this is my title",
- "author" : "bob",
- "posted" : ISODate("2004-03-21T18:59:54Z"),
- "pageViews" : 5,
- "tags" : "good",
- "comments" : [
- {
- "author" : "joe",
- "text" : "this is cool"
- },
- {
- "author" : "sam",
- "text" : "this is bad"
- }
- ],
- "other" : {
- "foo" : 5
- }
- },
- {
- "_id" : ObjectId("4e7bdfa4909a512bf221a8fe"),
- "title" : "this is my title",
- "author" : "bob",
- "posted" : ISODate("2004-03-21T18:59:54Z"),
- "pageViews" : 5,
- "tags" : "fun",
- "comments" : [
- {
- "author" : "joe",
- "text" : "this is cool"
- },
- {
- "author" : "sam",
- "text" : "this is bad"
- }
- ],
- "other" : {
- "foo" : 5
- }
- },
- {
- "_id" : ObjectId("4e7bdfa4909a512bf221a8ff"),
- "title" : "this is your title",
- "author" : "dave",
- "posted" : ISODate("2100-08-08T04:11:10Z"),
- "pageViews" : 7,
- "tags" : "fun",
- "comments" : [
- {
- "author" : "barbara",
- "text" : "this is interesting"
- },
- {
- "author" : "jenny",
- "text" : "i like to play pinball",
- "votes" : 10
- }
- ],
- "other" : {
- "bar" : 14
- }
- },
- {
- "_id" : ObjectId("4e7bdfa4909a512bf221a8ff"),
- "title" : "this is your title",
- "author" : "dave",
- "posted" : ISODate("2100-08-08T04:11:10Z"),
- "pageViews" : 7,
- "tags" : "nasty",
- "comments" : [
- {
- "author" : "barbara",
- "text" : "this is interesting"
- },
- {
- "author" : "jenny",
- "text" : "i like to play pinball",
- "votes" : 10
- }
- ],
- "other" : {
- "bar" : 14
- }
- },
- {
- "_id" : ObjectId("4e7bdfa4909a512bf221a900"),
- "title" : "this is some other title",
- "author" : "jane",
- "posted" : ISODate("2000-12-31T05:17:14Z"),
- "pageViews" : 6,
- "tags" : "nasty",
- "comments" : [
- {
- "author" : "will",
- "text" : "i don't like the color"
- },
- {
- "author" : "jenny",
- "text" : "can i get that in green?"
- }
- ],
- "other" : {
- "bar" : 14
- }
- },
- {
- "_id" : ObjectId("4e7bdfa4909a512bf221a900"),
- "title" : "this is some other title",
- "author" : "jane",
- "posted" : ISODate("2000-12-31T05:17:14Z"),
- "pageViews" : 6,
- "tags" : "filthy",
- "comments" : [
- {
- "author" : "will",
- "text" : "i don't like the color"
- },
- {
- "author" : "jenny",
- "text" : "can i get that in green?"
- }
- ],
- "other" : {
- "bar" : 14
- }
- }
-];
-
-assert(arrayEq(u1.result, u1result), 'u1 failed');
-
-// unwind an array at the end of a dotted path
-db.ut.drop();
-db.ut.save({a:1, b:{e:7, f:[4, 3, 2, 1]}, c:12, d:17});
-var u2 = db.runCommand(
-{ aggregate : "ut", pipeline : [
- { $unwind : "$b.f" }
-]});
-
-var u2result = [
- {
- "_id" : ObjectId("4e7be21a702bfc656111df9b"),
- "a" : 1,
- "b" : {
- "e" : 7,
- "f" : 4
- },
- "c" : 12,
- "d" : 17
- },
- {
- "_id" : ObjectId("4e7be21a702bfc656111df9b"),
- "a" : 1,
- "b" : {
- "e" : 7,
- "f" : 3
- },
- "c" : 12,
- "d" : 17
- },
- {
- "_id" : ObjectId("4e7be21a702bfc656111df9b"),
- "a" : 1,
- "b" : {
- "e" : 7,
- "f" : 2
- },
- "c" : 12,
- "d" : 17
- },
- {
- "_id" : ObjectId("4e7be21a702bfc656111df9b"),
- "a" : 1,
- "b" : {
- "e" : 7,
- "f" : 1
- },
- "c" : 12,
- "d" : 17
- }
-];
-
-assert(arrayEq(u2.result, u2result), 'u2 failed');
-
-
-// combining a projection with unwinding an array
-var p2 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tags : 1,
- pageViews : 1
- }},
- { $unwind : "$tags" }
-]});
-
-var p2result = [
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "author" : "bob",
- "pageViews" : 5,
- "tags" : "fun"
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "author" : "bob",
- "pageViews" : 5,
- "tags" : "good"
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "author" : "bob",
- "pageViews" : 5,
- "tags" : "fun"
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
- "author" : "dave",
- "pageViews" : 7,
- "tags" : "fun"
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
- "author" : "dave",
- "pageViews" : 7,
- "tags" : "nasty"
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
- "author" : "jane",
- "pageViews" : 6,
- "tags" : "nasty"
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
- "author" : "jane",
- "pageViews" : 6,
- "tags" : "filthy"
- }
-];
-
-assert(arrayEq(p2.result, p2result), 'p2 failed');
-
-
-// pulling values out of subdocuments
-var p3 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- otherfoo : "$other.foo",
- otherbar : "$other.bar"
- }}
-]});
-
-var p3result = [
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "otherfoo" : 5
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
- "otherbar" : 14
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
- "otherbar" : 14
- }
-];
-
-assert(arrayEq(p3.result, p3result), 'p3 failed');
-
-
-// projection includes a computed value
-var p4 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- daveWroteIt : { $eq:["$author", "dave"] }
- }}
-]});
-
-var p4result = [
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "author" : "bob",
- "daveWroteIt" : false
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
- "author" : "dave",
- "daveWroteIt" : true
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
- "author" : "jane",
- "daveWroteIt" : false
- }
-];
-
-assert(arrayEq(p4.result, p4result), 'p4 failed');
-
-
-// projection includes a virtual (fabricated) document
-var p5 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- pageViews : 1,
- tags : 1
- }},
- { $unwind : "$tags" },
- { $project : {
- author : 1,
- subDocument : { foo : "$pageViews", bar : "$tags" }
- }}
-]});
-
-var p5result = [
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "author" : "bob",
- "subDocument" : {
- "foo" : 5,
- "bar" : "fun"
- }
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "author" : "bob",
- "subDocument" : {
- "foo" : 5,
- "bar" : "good"
- }
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "author" : "bob",
- "subDocument" : {
- "foo" : 5,
- "bar" : "fun"
- }
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
- "author" : "dave",
- "subDocument" : {
- "foo" : 7,
- "bar" : "fun"
- }
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
- "author" : "dave",
- "subDocument" : {
- "foo" : 7,
- "bar" : "nasty"
- }
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
- "author" : "jane",
- "subDocument" : {
- "foo" : 6,
- "bar" : "nasty"
- }
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
- "author" : "jane",
- "subDocument" : {
- "foo" : 6,
- "bar" : "filthy"
- }
- }
-];
-
-assert(arrayEq(p5.result, p5result), 'p5 failed');
-
-
-// multi-step aggregate
-// nested expressions in computed fields
-var p6 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tags : 1,
- pageViews : 1
- }},
- { $unwind : "$tags" },
- { $project : {
- author : 1,
- tag : "$tags",
- pageViews : 1,
- daveWroteIt : { $eq:["$author", "dave"] },
- weLikeIt : { $or:[ { $eq:["$author", "dave"] },
- { $eq:["$tags", "good"] } ] }
- }}
-]});
-
-var p6result = [
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "author" : "bob",
- "pageViews" : 5,
- "tag" : "fun",
- "daveWroteIt" : false,
- "weLikeIt" : false
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "author" : "bob",
- "pageViews" : 5,
- "tag" : "good",
- "daveWroteIt" : false,
- "weLikeIt" : true
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "author" : "bob",
- "pageViews" : 5,
- "tag" : "fun",
- "daveWroteIt" : false,
- "weLikeIt" : false
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
- "author" : "dave",
- "pageViews" : 7,
- "tag" : "fun",
- "daveWroteIt" : true,
- "weLikeIt" : true
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
- "author" : "dave",
- "pageViews" : 7,
- "tag" : "nasty",
- "daveWroteIt" : true,
- "weLikeIt" : true
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
- "author" : "jane",
- "pageViews" : 6,
- "tag" : "nasty",
- "daveWroteIt" : false,
- "weLikeIt" : false
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
- "author" : "jane",
- "pageViews" : 6,
- "tag" : "filthy",
- "daveWroteIt" : false,
- "weLikeIt" : false
- }
-];
-
-assert(arrayEq(p6.result, p6result), 'p6 failed');
-
-
-// slightly more complex computed expression; $ifNull
-var p7 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- theSum : { $add:["$pageViews",
- { $ifNull:["$other.foo",
- "$other.bar"] } ] }
- }}
-]});
-
-var p7result = [
- {
- "_id" : ObjectId("4de54958bf1505139918fce6"),
- "theSum" : 10
- },
- {
- "_id" : ObjectId("4de54958bf1505139918fce7"),
- "theSum" : 21
- },
- {
- "_id" : ObjectId("4de54958bf1505139918fce8"),
- "theSum" : 20
- }
-];
-
-assert(arrayEq(p7.result, p7result), 'p7 failed');
-
-
-// dotted path inclusion; _id exclusion
-var p8 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- _id : 0,
- author : 1,
- tags : 1,
- "comments.author" : 1
- }},
- { $unwind : "$tags" }
-]});
-
-var p8result = [
- {
- "author" : "bob",
- "comments" : [
- {
- "author" : "joe"
- },
- {
- "author" : "sam"
- }
- ],
- "tags" : "fun"
- },
- {
- "author" : "bob",
- "comments" : [
- {
- "author" : "joe"
- },
- {
- "author" : "sam"
- }
- ],
- "tags" : "good"
- },
- {
- "author" : "bob",
- "comments" : [
- {
- "author" : "joe"
- },
- {
- "author" : "sam"
- }
- ],
- "tags" : "fun"
- },
- {
- "author" : "dave",
- "comments" : [
- {
- "author" : "barbara"
- },
- {
- "author" : "jenny"
- }
- ],
- "tags" : "fun"
- },
- {
- "author" : "dave",
- "comments" : [
- {
- "author" : "barbara"
- },
- {
- "author" : "jenny"
- }
- ],
- "tags" : "nasty"
- },
- {
- "author" : "jane",
- "comments" : [
- {
- "author" : "will"
- },
- {
- "author" : "jenny"
- }
- ],
- "tags" : "nasty"
- },
- {
- "author" : "jane",
- "comments" : [
- {
- "author" : "will"
- },
- {
- "author" : "jenny"
- }
- ],
- "tags" : "filthy"
- }
-];
-
-assert(arrayEq(p8.result, p8result), 'p8 failed');
-
-
-// collapse a dotted path with an intervening array
-var p9 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- _id : 0,
- author : 1,
- commentsAuthor : "$comments.author"
- }}
-]});
-
-var p9result = [
- {
- "author" : "bob",
- "commentsAuthor" : [
- "joe",
- "sam"
- ]
- },
- {
- "author" : "dave",
- "commentsAuthor" : [
- "barbara",
- "jenny"
- ]
- },
- {
- "author" : "jane",
- "commentsAuthor" : [
- "will",
- "jenny"
- ]
- }
-];
-
-assert(arrayEq(p9.result, p9result), 'p9 failed');
-
-
-// simple sort
-var p10 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $sort : { title : 1 }
- }
-]});
-
-var p10result = [
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
- "title" : "this is my title",
- "author" : "bob",
- "posted" : ISODate("2011-05-03T22:21:33.251Z"),
- "pageViews" : 5,
- "tags" : [
- "fun",
- "good",
- "fun"
- ],
- "comments" : [
- {
- "author" : "joe",
- "text" : "this is cool"
- },
- {
- "author" : "sam",
- "text" : "this is bad"
- }
- ],
- "other" : {
- "foo" : 5
- }
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
- "title" : "this is some other title",
- "author" : "jane",
- "posted" : ISODate("2011-05-03T22:21:33.252Z"),
- "pageViews" : 6,
- "tags" : [
- "nasty",
- "filthy"
- ],
- "comments" : [
- {
- "author" : "will",
- "text" : "i don't like the color"
- },
- {
- "author" : "jenny",
- "text" : "can i get that in green?"
- }
- ],
- "other" : {
- "bar" : 14
- }
- },
- {
- "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
- "title" : "this is your title",
- "author" : "dave",
- "posted" : ISODate("2011-05-03T22:21:33.251Z"),
- "pageViews" : 7,
- "tags" : [
- "fun",
- "nasty"
- ],
- "comments" : [
- {
- "author" : "barbara",
- "text" : "this is interesting"
- },
- {
- "author" : "jenny",
- "text" : "i like to play pinball",
- "votes" : 10
- }
- ],
- "other" : {
- "bar" : 14
- }
- }
-];
-
-assert(orderedArrayEq(p10.result, p10result), 'p10 failed');
-
-
-// unwind on nested array
-db.p11.drop();
-db.p11.save( {
- name : 'MongoDB',
- items : {
- authors : ['jay', 'vivek', 'bjornar'],
- dbg : [17, 42]
- },
- favorites : ['pickles', 'ice cream', 'kettle chips']
-});
-
-var p11 = db.runCommand(
-{ aggregate : "p11", pipeline : [
- { $unwind : "$items.authors" },
- { $project : {
- name : 1,
- author : "$items.authors"
- }},
-]});
-
-p11result = [
- {
- "_id" : ObjectId("4ded2e7d4a0eb8caae28044d"),
- "name" : "MongoDB",
- "author" : "jay"
- },
- {
- "_id" : ObjectId("4ded2e7d4a0eb8caae28044d"),
- "name" : "MongoDB",
- "author" : "vivek"
- },
- {
- "_id" : ObjectId("4ded2e7d4a0eb8caae28044d"),
- "name" : "MongoDB",
- "author" : "bjornar"
- }
-];
-
-assert(arrayEq(p11.result, p11result), 'p11 failed');
-
-
-// multiply test
-var p12 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- theProduct : { $multiply:["$pageViews",
- { $ifNull:["$other.foo",
- "$other.bar"] } ] }
- }}
-]});
-
-var p12result = [
- {
- "_id" : ObjectId("4de54958bf1505139918fce6"),
- "theProduct" : 25
- },
- {
- "_id" : ObjectId("4de54958bf1505139918fce7"),
- "theProduct" : 98
- },
- {
- "_id" : ObjectId("4de54958bf1505139918fce8"),
- "theProduct" : 84
- }
-];
-
-assert(arrayEq(p12.result, p12result), 'p12 failed');
-
-
-// subtraction test
-var p13 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- theDifference : { $subtract:["$pageViews",
- { $ifNull:["$other.foo",
- "$other.bar"] } ] }
- }}
-]});
-
-var p13result = [
- {
- "_id" : ObjectId("4de54958bf1505139918fce6"),
- "theDifference" : 0
- },
- {
- "_id" : ObjectId("4de54958bf1505139918fce7"),
- "theDifference" : -7
- },
- {
- "_id" : ObjectId("4de54958bf1505139918fce8"),
- "theDifference" : -8
- }
-];
-
-assert(arrayEq(p13.result, p13result), 'p13 failed');
-
-
-// mod test
-var p14 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- theRemainder : { $mod:[
- { $ifNull:["$other.foo",
- "$other.bar"] },
- "$pageViews", ] }
- }}
-]});
-
-var p14result = [
- {
- "_id" : ObjectId("4de54958bf1505139918fce6"),
- "theRemainder" : 0
- },
- {
- "_id" : ObjectId("4de54958bf1505139918fce7"),
- "theRemainder" : 0
- },
- {
- "_id" : ObjectId("4de54958bf1505139918fce8"),
- "theRemainder" : 2
- }
-];
-
-assert(arrayEq(p14.result, p14result), 'p14 failed');
-
-
-// toUpper test
-var p15 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : {$toUpper: "$author"},
- pageViews : 1
- }}
-]});
-
-var p15result = [
- {
- "_id" : ObjectId("4e09d403278071aa11bd1560"),
- "pageViews" : 5,
- "author" : "BOB"
- },
- {
- "_id" : ObjectId("4e09d656c705acb9967683c4"),
- "pageViews" : 7,
- "author" : "DAVE"
- },
- {
- "_id" : ObjectId("4e09d656c705acb9967683c5"),
- "pageViews" : 6,
- "author" : "JANE"
- }
-];
-
-assert(arrayEq(p15.result, p15result), 'p15 failed');
-
-
-// toLower test
-var p16 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : {$toUpper: "$author"},
- pageViews : 1
- }},
- { $project : {
- author : {$toLower: "$author"},
- pageViews : 1
- }}
-]});
-
-var p16result = [
- {
- "_id" : ObjectId("4e09d403278071aa11bd1560"),
- "pageViews" : 5,
- "author" : "bob"
- },
- {
- "_id" : ObjectId("4e09d656c705acb9967683c4"),
- "pageViews" : 7,
- "author" : "dave"
- },
- {
- "_id" : ObjectId("4e09d656c705acb9967683c5"),
- "pageViews" : 6,
- "author" : "jane"
- }
-];
-
-assert(arrayEq(p16.result, p16result), 'p16 failed');
-
-
-// substr test
-var p17 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : {$substr: ["$author", 1, 2]},
- }}
-]});
-
-var p17result = [
- {
- "_id" : ObjectId("4e09d403278071aa11bd1560"),
- "author" : "ob"
- },
- {
- "_id" : ObjectId("4e09d656c705acb9967683c4"),
- "author" : "av"
- },
- {
- "_id" : ObjectId("4e09d656c705acb9967683c5"),
- "author" : "an"
- }
-];
-
-assert(arrayEq(p17.result, p17result), 'p17 failed');
-
-
-// strcasecmp test
-var p18 = db.runCommand(
-{aggregate : "article", pipeline : [
- { $project : {
- tags : 1,
- thisisalametest : {$strcasecmp: ["foo","bar"]},
- thisisalamepass : {$strcasecmp: ["foo","foo"]}
- }}
-]});
-
-var p18result = [
- {
- "_id" : ObjectId("4e09ee2d75f2a257194c996e"),
- "tags" : [
- "fun",
- "good",
- "fun"
- ],
- "thisisalametest" : 1,
- "thisisalamepass" : 0
- },
- {
- "_id" : ObjectId("4e09ee2d75f2a257194c996f"),
- "tags" : [
- "fun",
- "nasty"
- ],
- "thisisalametest" : 1,
- "thisisalamepass" : 0
- },
- {
- "_id" : ObjectId("4e09ee2d75f2a257194c9970"),
- "tags" : [
- "nasty",
- "filthy"
- ],
- "thisisalametest" : 1,
- "thisisalamepass" : 0
- }
-];
-
-assert(arrayEq(p18.result, p18result), 'p18 failed');
-
-
-// date tests
-var p19 = db.runCommand({aggregate : "article", pipeline : [
- { $project : {
- authors: 1,
- posted: 1,
- seconds: {$second: "$posted"},
- minutes: {$minute: "$posted"},
- hour: {$hour: "$posted"},
- dayOfYear: {$dayOfYear: "$posted"},
- dayOfMonth: {$dayOfMonth: "$posted"},
- dayOfWeek: {$dayOfWeek: "$posted"},
- month: {$month: "$posted"},
- week: {$week: "$posted"},
- year: {$year: "$posted"}
- }},
- { $project : {
- authors: 1,
- posted: 1,
- seconds: 1,
- minutes: 1,
- hour: 1,
- dayOfYear: 1,
- dayOfMonth: 1,
- dayOfWeek: 1,
- month: 1,
- week: 1,
- year: 1,
- testDate: {$isoDate:{
- year: "$year", month: "$month", dayOfMonth: "$dayOfMonth",
- hour: "$hour", minute: "$minutes", second: "$seconds"}}
- }},
- { $project : {
- authors: 1,
- posted: 1,
- seconds: 1,
- minutes: 1,
- hour: 1,
- dayOfYear: 1,
- dayOfMonth: 1,
- dayOfWeek: 1,
- month: 1,
- week: 1,
- year: 1,
- testDate: 1,
- isEqual: {$eq:["$posted", "$testDate"]}
- }}
-]});
-
-var p19result = [
- {
- "_id" : ObjectId("4f44151eda0a3d90cf03ccb5"),
- "posted" : ISODate("2004-03-21T18:59:54Z"),
- "seconds" : 54,
- "minutes" : 59,
- "hour" : 18,
- "dayOfYear" : 81,
- "dayOfMonth" : 21,
- "dayOfWeek" : 1,
- "month" : 3,
- "week" : 12,
- "year" : 2004,
- "testDate" : ISODate("2004-03-21T18:59:54Z"),
- "isEqual" : true
- },
- {
- "_id" : ObjectId("4f44151eda0a3d90cf03ccb6"),
- "posted" : ISODate("2030-08-08T04:11:10Z"),
- "seconds" : 10,
- "minutes" : 11,
- "hour" : 4,
- "dayOfYear" : 220,
- "dayOfMonth" : 8,
- "dayOfWeek" : 5,
- "month" : 8,
- "week" : 31,
- "year" : 2030,
- "testDate" : ISODate("2030-08-08T04:11:10Z"),
- "isEqual" : true
- },
- {
- "_id" : ObjectId("4f44151eda0a3d90cf03ccb7"),
- "posted" : ISODate("2000-12-31T05:17:14Z"),
- "seconds" : 14,
- "minutes" : 17,
- "hour" : 5,
- "dayOfYear" : 366,
- "dayOfMonth" : 31,
- "dayOfWeek" : 1,
- "month" : 12,
- "week" : 53,
- "year" : 2000,
- "testDate" : ISODate("2000-12-31T05:17:14Z"),
- "isEqual" : true
- }
-];
-
-assert(arrayEq(p19.result, p19result), 'p19 failed');
-
-
-db.vartype.drop();
-db.vartype.save({ x : 17, y : "foo"});
-
-// just passing through fields
-var p20 = db.runCommand(
-{ aggregate : "vartype", pipeline : [
- { $project : {
- all_numbers : { $add:[1, "$x", 2, "$x"] },
- string_fields : { $add:[3, "$y", 4, "$y"] },
- number_fields : { $add:["a", "$x", "b", "$x"] },
- all_strings : { $add:["c", "$y", "d", "$y"] },
- potpourri_1 : { $add:[5, "$y", "e", "$x"] },
- potpourri_2 : { $add:[6, "$x", "f", "$y"] },
- potpourri_3 : { $add:["g", "$y", 7, "$x"] },
- potpourri_4 : { $add:["h", "$x", 8, "$y"] },
- _id: 0
- }}
-]});
-
-var p20result = [
- {
- "all_numbers" : 37,
- "string_fields" : "3foo4foo",
- "number_fields" : "a17b17",
- "all_strings" : "cfoodfoo",
- "potpourri_1" : "5fooe17",
- "potpourri_2" : "617ffoo",
- "potpourri_3" : "gfoo717",
- "potpourri_4" : "h178foo"
- }
-];
-
-assert(arrayEq(p20.result, p20result), 'p20 failed');
-
-// ternary conditional operator
-var p21 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- _id : 0,
- author : 1,
- pageViews : { $cond : [ {$eq:["$author", "dave"]},
- {$add:["$pageViews", 1000]}, "$pageViews" ]
- }
- }}
-]});
-
-var p21result = [
- {
- "author" : "bob",
- "pageViews" : 5
- },
- {
- "author" : "dave",
- "pageViews" : 1007
- },
- {
- "author" : "jane",
- "pageViews" : 6
- }
-];
-
-assert(arrayEq(p21.result, p21result), 'p21 failed');
-
-
-// simple matching
-var m1 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $match : { author : "dave" } }
-]});
-
-var m1result = [
- {
- "_id" : ObjectId("4de54958bf1505139918fce7"),
- "title" : "this is your title",
- "author" : "dave",
- "posted" : ISODate("2011-05-31T20:02:32.256Z"),
- "pageViews" : 7,
- "tags" : [
- "fun",
- "nasty"
- ],
- "comments" : [
- {
- "author" : "barbara",
- "text" : "this is interesting"
- },
- {
- "author" : "jenny",
- "text" : "i like to play pinball",
- "votes" : 10
- }
- ],
- "other" : {
- "bar" : 14
- }
- }
-];
-
-assert(arrayEq(m1.result, m1result), 'm1 failed');
-
-
-// combining matching with a projection
-var m2 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- title : 1,
- author : 1,
- pageViews : 1,
- tags : 1,
- comments : 1
- }},
- { $unwind : "$tags" },
- { $match : { tags : "nasty" } }
-]});
-
-var m2result = [
- {
- "_id" : ObjectId("4de54958bf1505139918fce7"),
- "title" : "this is your title",
- "author" : "dave",
- "pageViews" : 7,
- "comments" : [
- {
- "author" : "barbara",
- "text" : "this is interesting"
- },
- {
- "author" : "jenny",
- "text" : "i like to play pinball",
- "votes" : 10
- }
- ],
- "tags" : "nasty"
- },
- {
- "_id" : ObjectId("4de54958bf1505139918fce8"),
- "title" : "this is some other title",
- "author" : "jane",
- "pageViews" : 6,
- "comments" : [
- {
- "author" : "will",
- "text" : "i don't like the color"
- },
- {
- "author" : "jenny",
- "text" : "can i get that in green?"
- }
- ],
- "tags" : "nasty"
- }
-];
-
-assert(arrayEq(m2.result, m2result), 'm2 failed');
-
-
-// group by tag, _id is a field reference
-var g1 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tags : 1,
- pageViews : 1
- }},
- { $unwind : "$tags" },
- { $group : {
- _id : "$tags",
- docsByTag : { $sum : 1 },
- viewsByTag : { $sum : "$pageViews" }
- }}
-]});
-
-var g1result = [
- {
- "_id" : "filthy",
- "docsByTag" : 1,
- "viewsByTag" : 6
- },
- {
- "_id" : "fun",
- "docsByTag" : 3,
- "viewsByTag" : 17
- },
- {
- "_id" : "good",
- "docsByTag" : 1,
- "viewsByTag" : 5
- },
- {
- "_id" : "nasty",
- "docsByTag" : 2,
- "viewsByTag" : 13
- }
-];
-
-assert(arrayEq(g1.result, g1result), 'g1 failed');
-
-
-// $max, and averaging in a final projection; _id is structured
-var g2 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tags : 1,
- pageViews : 1
- }},
- { $unwind : "$tags" },
- { $group : {
- _id: { tags : 1 },
- docsByTag : { $sum : 1 },
- viewsByTag : { $sum : "$pageViews" },
- mostViewsByTag : { $max : "$pageViews" },
- }},
- { $project : {
- _id: false,
- tag : "$_id.tags",
- mostViewsByTag : 1,
- docsByTag : 1,
- viewsByTag : 1,
- avgByTag : { $divide:["$viewsByTag", "$docsByTag"] }
- }}
-]});
-
-var g2result = [
- {
- "docsByTag" : 1,
- "viewsByTag" : 6,
- "mostViewsByTag" : 6,
- "tag" : "filthy",
- "avgByTag" : 6
- },
- {
- "docsByTag" : 3,
- "viewsByTag" : 17,
- "mostViewsByTag" : 7,
- "tag" : "fun",
- "avgByTag" : 5.666666666666667
- },
- {
- "docsByTag" : 1,
- "viewsByTag" : 5,
- "mostViewsByTag" : 5,
- "tag" : "good",
- "avgByTag" : 5
- },
- {
- "docsByTag" : 2,
- "viewsByTag" : 13,
- "mostViewsByTag" : 7,
- "tag" : "nasty",
- "avgByTag" : 6.5
- }
-];
-
-assert(arrayEq(g2.result, g2result), 'g2 failed');
-
-
-// $push as an accumulator; can pivot data
-var g3 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tags : 1,
- }},
- { $unwind : "$tags" },
- { $group : {
- _id : { tags : 1 },
- authors : { $push : "$author" }
- }}
-]});
-
-var g3result = [
- {
- "_id" : {
- "tags" : "filthy"
- },
- "authors" : [
- "jane"
- ]
- },
- {
- "_id" : {
- "tags" : "fun"
- },
- "authors" : [
- "bob",
- "bob",
- "dave"
- ]
- },
- {
- "_id" : {
- "tags" : "good"
- },
- "authors" : [
- "bob"
- ]
- },
- {
- "_id" : {
- "tags" : "nasty"
- },
- "authors" : [
- "dave",
- "jane"
- ]
- }
-];
-
-assert(arrayEq(g3.result, g3result), 'g3 failed');
-
-
-// $avg, and averaging in a final projection
-var g4 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tags : 1,
- pageViews : 1
- }},
- { $unwind : "$tags" },
- { $group : {
- _id: { tags : 1 },
- docsByTag : { $sum : 1 },
- viewsByTag : { $sum : "$pageViews" },
- avgByTag : { $avg : "$pageViews" },
- }}
-]});
-
-var g4result = [
- {
- "_id" : {
- "tags" : "filthy"
- },
- "docsByTag" : 1,
- "viewsByTag" : 6,
- "avgByTag" : 6
- },
- {
- "_id" : {
- "tags" : "fun"
- },
- "docsByTag" : 3,
- "viewsByTag" : 17,
- "avgByTag" : 5.666666666666667
- },
- {
- "_id" : {
- "tags" : "good"
- },
- "docsByTag" : 1,
- "viewsByTag" : 5,
- "avgByTag" : 5
- },
- {
- "_id" : {
- "tags" : "nasty"
- },
- "docsByTag" : 2,
- "viewsByTag" : 13,
- "avgByTag" : 6.5
- }
-];
-
-assert(arrayEq(g4.result, g4result), 'g4 failed');
-
-
-// $addToSet as an accumulator; can pivot data
-var g5 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tags : 1,
- }},
- { $unwind : "$tags" },
- { $group : {
- _id : { tags : 1 },
- authors : { $addToSet : "$author" }
- }}
-]});
-
-var g5result = [
- {
- "_id" : {
- "tags" : "filthy"
- },
- "authors" : [
- "jane"
- ]
- },
- {
- "_id" : {
- "tags" : "fun"
- },
- "authors" : [
- "bob",
- "dave"
- ]
- },
- {
- "_id" : {
- "tags" : "good"
- },
- "authors" : [
- "bob"
- ]
- },
- {
- "_id" : {
- "tags" : "nasty"
- },
- "authors" : [
- "dave",
- "jane"
- ]
- }
-];
-
-assert(arrayEq(g5.result, g5result), 'g5 failed');
-
-
-// $first and $last accumulators, constant _id
-var g6 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $sort : { author : -1 } },
- { $group : {
- _id : "authors", /* constant string, *not* a field reference */
- firstAuthor : { $last : "$author" }, /* note reverse sort above */
- lastAuthor : { $first : "$author" }, /* note reverse sort above */
- count : { $sum : 1 }
- }}
-]});
-
-var g6result = [
- {
- "_id" : "authors",
- firstAuthor : "bob",
- lastAuthor : "jane",
- count : 3
- }
-];
-
-assert(arrayEq(g6.result, g6result), 'g6 failed');
+/*
+ Run all the aggregation tests
+*/
+
+/* load the test documents */
+load('jstests/aggregation/data/articles.js');
+
+/* load the test utilities */
+load('jstests/aggregation/extras/utils.js');
+
+// make sure we're using the right db; this is the same as "use mydb;" in shell
+db = db.getSiblingDB("aggdb");
+
+
+// just passing through fields
+var p1 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ tags : 1,
+ pageViews : 1
+ }}
+]});
+
+var p1result = [
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "pageViews" : 5,
+ "tags" : [
+ "fun",
+ "good",
+ "fun"
+ ]
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
+ "pageViews" : 7,
+ "tags" : [
+ "fun",
+ "nasty"
+ ]
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
+ "pageViews" : 6,
+ "tags" : [
+ "nasty",
+ "filthy"
+ ]
+ }
+];
+
+assert(arrayEq(p1.result, p1result), 'p1 failed');
+
+
+// a simple array unwinding
+var u1 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $unwind : "$tags" }
+]});
+
+var u1result = [
+ {
+ "_id" : ObjectId("4e7bdfa4909a512bf221a8fe"),
+ "title" : "this is my title",
+ "author" : "bob",
+ "posted" : ISODate("2004-03-21T18:59:54Z"),
+ "pageViews" : 5,
+ "tags" : "fun",
+ "comments" : [
+ {
+ "author" : "joe",
+ "text" : "this is cool"
+ },
+ {
+ "author" : "sam",
+ "text" : "this is bad"
+ }
+ ],
+ "other" : {
+ "foo" : 5
+ }
+ },
+ {
+ "_id" : ObjectId("4e7bdfa4909a512bf221a8fe"),
+ "title" : "this is my title",
+ "author" : "bob",
+ "posted" : ISODate("2004-03-21T18:59:54Z"),
+ "pageViews" : 5,
+ "tags" : "good",
+ "comments" : [
+ {
+ "author" : "joe",
+ "text" : "this is cool"
+ },
+ {
+ "author" : "sam",
+ "text" : "this is bad"
+ }
+ ],
+ "other" : {
+ "foo" : 5
+ }
+ },
+ {
+ "_id" : ObjectId("4e7bdfa4909a512bf221a8fe"),
+ "title" : "this is my title",
+ "author" : "bob",
+ "posted" : ISODate("2004-03-21T18:59:54Z"),
+ "pageViews" : 5,
+ "tags" : "fun",
+ "comments" : [
+ {
+ "author" : "joe",
+ "text" : "this is cool"
+ },
+ {
+ "author" : "sam",
+ "text" : "this is bad"
+ }
+ ],
+ "other" : {
+ "foo" : 5
+ }
+ },
+ {
+ "_id" : ObjectId("4e7bdfa4909a512bf221a8ff"),
+ "title" : "this is your title",
+ "author" : "dave",
+ "posted" : ISODate("2100-08-08T04:11:10Z"),
+ "pageViews" : 7,
+ "tags" : "fun",
+ "comments" : [
+ {
+ "author" : "barbara",
+ "text" : "this is interesting"
+ },
+ {
+ "author" : "jenny",
+ "text" : "i like to play pinball",
+ "votes" : 10
+ }
+ ],
+ "other" : {
+ "bar" : 14
+ }
+ },
+ {
+ "_id" : ObjectId("4e7bdfa4909a512bf221a8ff"),
+ "title" : "this is your title",
+ "author" : "dave",
+ "posted" : ISODate("2100-08-08T04:11:10Z"),
+ "pageViews" : 7,
+ "tags" : "nasty",
+ "comments" : [
+ {
+ "author" : "barbara",
+ "text" : "this is interesting"
+ },
+ {
+ "author" : "jenny",
+ "text" : "i like to play pinball",
+ "votes" : 10
+ }
+ ],
+ "other" : {
+ "bar" : 14
+ }
+ },
+ {
+ "_id" : ObjectId("4e7bdfa4909a512bf221a900"),
+ "title" : "this is some other title",
+ "author" : "jane",
+ "posted" : ISODate("2000-12-31T05:17:14Z"),
+ "pageViews" : 6,
+ "tags" : "nasty",
+ "comments" : [
+ {
+ "author" : "will",
+ "text" : "i don't like the color"
+ },
+ {
+ "author" : "jenny",
+ "text" : "can i get that in green?"
+ }
+ ],
+ "other" : {
+ "bar" : 14
+ }
+ },
+ {
+ "_id" : ObjectId("4e7bdfa4909a512bf221a900"),
+ "title" : "this is some other title",
+ "author" : "jane",
+ "posted" : ISODate("2000-12-31T05:17:14Z"),
+ "pageViews" : 6,
+ "tags" : "filthy",
+ "comments" : [
+ {
+ "author" : "will",
+ "text" : "i don't like the color"
+ },
+ {
+ "author" : "jenny",
+ "text" : "can i get that in green?"
+ }
+ ],
+ "other" : {
+ "bar" : 14
+ }
+ }
+];
+
+assert(arrayEq(u1.result, u1result), 'u1 failed');
+
+// unwind an array at the end of a dotted path
+db.ut.drop();
+db.ut.save({a:1, b:{e:7, f:[4, 3, 2, 1]}, c:12, d:17});
+var u2 = db.runCommand(
+{ aggregate : "ut", pipeline : [
+ { $unwind : "$b.f" }
+]});
+
+var u2result = [
+ {
+ "_id" : ObjectId("4e7be21a702bfc656111df9b"),
+ "a" : 1,
+ "b" : {
+ "e" : 7,
+ "f" : 4
+ },
+ "c" : 12,
+ "d" : 17
+ },
+ {
+ "_id" : ObjectId("4e7be21a702bfc656111df9b"),
+ "a" : 1,
+ "b" : {
+ "e" : 7,
+ "f" : 3
+ },
+ "c" : 12,
+ "d" : 17
+ },
+ {
+ "_id" : ObjectId("4e7be21a702bfc656111df9b"),
+ "a" : 1,
+ "b" : {
+ "e" : 7,
+ "f" : 2
+ },
+ "c" : 12,
+ "d" : 17
+ },
+ {
+ "_id" : ObjectId("4e7be21a702bfc656111df9b"),
+ "a" : 1,
+ "b" : {
+ "e" : 7,
+ "f" : 1
+ },
+ "c" : 12,
+ "d" : 17
+ }
+];
+
+assert(arrayEq(u2.result, u2result), 'u2 failed');
+
+
+// combining a projection with unwinding an array
+var p2 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : 1,
+ tags : 1,
+ pageViews : 1
+ }},
+ { $unwind : "$tags" }
+]});
+
+var p2result = [
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "author" : "bob",
+ "pageViews" : 5,
+ "tags" : "fun"
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "author" : "bob",
+ "pageViews" : 5,
+ "tags" : "good"
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "author" : "bob",
+ "pageViews" : 5,
+ "tags" : "fun"
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
+ "author" : "dave",
+ "pageViews" : 7,
+ "tags" : "fun"
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
+ "author" : "dave",
+ "pageViews" : 7,
+ "tags" : "nasty"
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
+ "author" : "jane",
+ "pageViews" : 6,
+ "tags" : "nasty"
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
+ "author" : "jane",
+ "pageViews" : 6,
+ "tags" : "filthy"
+ }
+];
+
+assert(arrayEq(p2.result, p2result), 'p2 failed');
+
+
+// pulling values out of subdocuments
+var p3 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ otherfoo : "$other.foo",
+ otherbar : "$other.bar"
+ }}
+]});
+
+var p3result = [
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "otherfoo" : 5
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
+ "otherbar" : 14
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
+ "otherbar" : 14
+ }
+];
+
+assert(arrayEq(p3.result, p3result), 'p3 failed');
+
+
+// projection includes a computed value
+var p4 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : 1,
+ daveWroteIt : { $eq:["$author", "dave"] }
+ }}
+]});
+
+var p4result = [
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "author" : "bob",
+ "daveWroteIt" : false
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
+ "author" : "dave",
+ "daveWroteIt" : true
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
+ "author" : "jane",
+ "daveWroteIt" : false
+ }
+];
+
+assert(arrayEq(p4.result, p4result), 'p4 failed');
+
+
+// projection includes a virtual (fabricated) document
+var p5 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : 1,
+ pageViews : 1,
+ tags : 1
+ }},
+ { $unwind : "$tags" },
+ { $project : {
+ author : 1,
+ subDocument : { foo : "$pageViews", bar : "$tags" }
+ }}
+]});
+
+var p5result = [
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "author" : "bob",
+ "subDocument" : {
+ "foo" : 5,
+ "bar" : "fun"
+ }
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "author" : "bob",
+ "subDocument" : {
+ "foo" : 5,
+ "bar" : "good"
+ }
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "author" : "bob",
+ "subDocument" : {
+ "foo" : 5,
+ "bar" : "fun"
+ }
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
+ "author" : "dave",
+ "subDocument" : {
+ "foo" : 7,
+ "bar" : "fun"
+ }
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
+ "author" : "dave",
+ "subDocument" : {
+ "foo" : 7,
+ "bar" : "nasty"
+ }
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
+ "author" : "jane",
+ "subDocument" : {
+ "foo" : 6,
+ "bar" : "nasty"
+ }
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
+ "author" : "jane",
+ "subDocument" : {
+ "foo" : 6,
+ "bar" : "filthy"
+ }
+ }
+];
+
+assert(arrayEq(p5.result, p5result), 'p5 failed');
+
+
+// multi-step aggregate
+// nested expressions in computed fields
+var p6 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : 1,
+ tags : 1,
+ pageViews : 1
+ }},
+ { $unwind : "$tags" },
+ { $project : {
+ author : 1,
+ tag : "$tags",
+ pageViews : 1,
+ daveWroteIt : { $eq:["$author", "dave"] },
+ weLikeIt : { $or:[ { $eq:["$author", "dave"] },
+ { $eq:["$tags", "good"] } ] }
+ }}
+]});
+
+var p6result = [
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "author" : "bob",
+ "pageViews" : 5,
+ "tag" : "fun",
+ "daveWroteIt" : false,
+ "weLikeIt" : false
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "author" : "bob",
+ "pageViews" : 5,
+ "tag" : "good",
+ "daveWroteIt" : false,
+ "weLikeIt" : true
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "author" : "bob",
+ "pageViews" : 5,
+ "tag" : "fun",
+ "daveWroteIt" : false,
+ "weLikeIt" : false
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
+ "author" : "dave",
+ "pageViews" : 7,
+ "tag" : "fun",
+ "daveWroteIt" : true,
+ "weLikeIt" : true
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
+ "author" : "dave",
+ "pageViews" : 7,
+ "tag" : "nasty",
+ "daveWroteIt" : true,
+ "weLikeIt" : true
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
+ "author" : "jane",
+ "pageViews" : 6,
+ "tag" : "nasty",
+ "daveWroteIt" : false,
+ "weLikeIt" : false
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
+ "author" : "jane",
+ "pageViews" : 6,
+ "tag" : "filthy",
+ "daveWroteIt" : false,
+ "weLikeIt" : false
+ }
+];
+
+assert(arrayEq(p6.result, p6result), 'p6 failed');
+
+
+// slightly more complex computed expression; $ifNull
+var p7 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ theSum : { $add:["$pageViews",
+ { $ifNull:["$other.foo",
+ "$other.bar"] } ] }
+ }}
+]});
+
+var p7result = [
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce6"),
+ "theSum" : 10
+ },
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce7"),
+ "theSum" : 21
+ },
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce8"),
+ "theSum" : 20
+ }
+];
+
+assert(arrayEq(p7.result, p7result), 'p7 failed');
+
+
+// dotted path inclusion; _id exclusion
+var p8 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ _id : 0,
+ author : 1,
+ tags : 1,
+ "comments.author" : 1
+ }},
+ { $unwind : "$tags" }
+]});
+
+var p8result = [
+ {
+ "author" : "bob",
+ "comments" : [
+ {
+ "author" : "joe"
+ },
+ {
+ "author" : "sam"
+ }
+ ],
+ "tags" : "fun"
+ },
+ {
+ "author" : "bob",
+ "comments" : [
+ {
+ "author" : "joe"
+ },
+ {
+ "author" : "sam"
+ }
+ ],
+ "tags" : "good"
+ },
+ {
+ "author" : "bob",
+ "comments" : [
+ {
+ "author" : "joe"
+ },
+ {
+ "author" : "sam"
+ }
+ ],
+ "tags" : "fun"
+ },
+ {
+ "author" : "dave",
+ "comments" : [
+ {
+ "author" : "barbara"
+ },
+ {
+ "author" : "jenny"
+ }
+ ],
+ "tags" : "fun"
+ },
+ {
+ "author" : "dave",
+ "comments" : [
+ {
+ "author" : "barbara"
+ },
+ {
+ "author" : "jenny"
+ }
+ ],
+ "tags" : "nasty"
+ },
+ {
+ "author" : "jane",
+ "comments" : [
+ {
+ "author" : "will"
+ },
+ {
+ "author" : "jenny"
+ }
+ ],
+ "tags" : "nasty"
+ },
+ {
+ "author" : "jane",
+ "comments" : [
+ {
+ "author" : "will"
+ },
+ {
+ "author" : "jenny"
+ }
+ ],
+ "tags" : "filthy"
+ }
+];
+
+assert(arrayEq(p8.result, p8result), 'p8 failed');
+
+
+// collapse a dotted path with an intervening array
+var p9 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ _id : 0,
+ author : 1,
+ commentsAuthor : "$comments.author"
+ }}
+]});
+
+var p9result = [
+ {
+ "author" : "bob",
+ "commentsAuthor" : [
+ "joe",
+ "sam"
+ ]
+ },
+ {
+ "author" : "dave",
+ "commentsAuthor" : [
+ "barbara",
+ "jenny"
+ ]
+ },
+ {
+ "author" : "jane",
+ "commentsAuthor" : [
+ "will",
+ "jenny"
+ ]
+ }
+];
+
+assert(arrayEq(p9.result, p9result), 'p9 failed');
+
+
+// simple sort
+var p10 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $sort : { title : 1 }
+ }
+]});
+
+var p10result = [
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066d"),
+ "title" : "this is my title",
+ "author" : "bob",
+ "posted" : ISODate("2011-05-03T22:21:33.251Z"),
+ "pageViews" : 5,
+ "tags" : [
+ "fun",
+ "good",
+ "fun"
+ ],
+ "comments" : [
+ {
+ "author" : "joe",
+ "text" : "this is cool"
+ },
+ {
+ "author" : "sam",
+ "text" : "this is bad"
+ }
+ ],
+ "other" : {
+ "foo" : 5
+ }
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066f"),
+ "title" : "this is some other title",
+ "author" : "jane",
+ "posted" : ISODate("2011-05-03T22:21:33.252Z"),
+ "pageViews" : 6,
+ "tags" : [
+ "nasty",
+ "filthy"
+ ],
+ "comments" : [
+ {
+ "author" : "will",
+ "text" : "i don't like the color"
+ },
+ {
+ "author" : "jenny",
+ "text" : "can i get that in green?"
+ }
+ ],
+ "other" : {
+ "bar" : 14
+ }
+ },
+ {
+ "_id" : ObjectId("4dc07fedd8420ab8d0d4066e"),
+ "title" : "this is your title",
+ "author" : "dave",
+ "posted" : ISODate("2011-05-03T22:21:33.251Z"),
+ "pageViews" : 7,
+ "tags" : [
+ "fun",
+ "nasty"
+ ],
+ "comments" : [
+ {
+ "author" : "barbara",
+ "text" : "this is interesting"
+ },
+ {
+ "author" : "jenny",
+ "text" : "i like to play pinball",
+ "votes" : 10
+ }
+ ],
+ "other" : {
+ "bar" : 14
+ }
+ }
+];
+
+assert(orderedArrayEq(p10.result, p10result), 'p10 failed');
+
+
+// unwind on nested array
+db.p11.drop();
+db.p11.save( {
+ name : 'MongoDB',
+ items : {
+ authors : ['jay', 'vivek', 'bjornar'],
+ dbg : [17, 42]
+ },
+ favorites : ['pickles', 'ice cream', 'kettle chips']
+});
+
+var p11 = db.runCommand(
+{ aggregate : "p11", pipeline : [
+ { $unwind : "$items.authors" },
+ { $project : {
+ name : 1,
+ author : "$items.authors"
+ }},
+]});
+
+p11result = [
+ {
+ "_id" : ObjectId("4ded2e7d4a0eb8caae28044d"),
+ "name" : "MongoDB",
+ "author" : "jay"
+ },
+ {
+ "_id" : ObjectId("4ded2e7d4a0eb8caae28044d"),
+ "name" : "MongoDB",
+ "author" : "vivek"
+ },
+ {
+ "_id" : ObjectId("4ded2e7d4a0eb8caae28044d"),
+ "name" : "MongoDB",
+ "author" : "bjornar"
+ }
+];
+
+assert(arrayEq(p11.result, p11result), 'p11 failed');
+
+
+// multiply test
+var p12 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ theProduct : { $multiply:["$pageViews",
+ { $ifNull:["$other.foo",
+ "$other.bar"] } ] }
+ }}
+]});
+
+var p12result = [
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce6"),
+ "theProduct" : 25
+ },
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce7"),
+ "theProduct" : 98
+ },
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce8"),
+ "theProduct" : 84
+ }
+];
+
+assert(arrayEq(p12.result, p12result), 'p12 failed');
+
+
+// subtraction test
+var p13 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ theDifference : { $subtract:["$pageViews",
+ { $ifNull:["$other.foo",
+ "$other.bar"] } ] }
+ }}
+]});
+
+var p13result = [
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce6"),
+ "theDifference" : 0
+ },
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce7"),
+ "theDifference" : -7
+ },
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce8"),
+ "theDifference" : -8
+ }
+];
+
+assert(arrayEq(p13.result, p13result), 'p13 failed');
+
+
+// mod test
+var p14 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ theRemainder : { $mod:[
+ { $ifNull:["$other.foo",
+ "$other.bar"] },
+ "$pageViews", ] }
+ }}
+]});
+
+var p14result = [
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce6"),
+ "theRemainder" : 0
+ },
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce7"),
+ "theRemainder" : 0
+ },
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce8"),
+ "theRemainder" : 2
+ }
+];
+
+assert(arrayEq(p14.result, p14result), 'p14 failed');
+
+
+// toUpper test
+var p15 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : {$toUpper: "$author"},
+ pageViews : 1
+ }}
+]});
+
+var p15result = [
+ {
+ "_id" : ObjectId("4e09d403278071aa11bd1560"),
+ "pageViews" : 5,
+ "author" : "BOB"
+ },
+ {
+ "_id" : ObjectId("4e09d656c705acb9967683c4"),
+ "pageViews" : 7,
+ "author" : "DAVE"
+ },
+ {
+ "_id" : ObjectId("4e09d656c705acb9967683c5"),
+ "pageViews" : 6,
+ "author" : "JANE"
+ }
+];
+
+assert(arrayEq(p15.result, p15result), 'p15 failed');
+
+
+// toLower test
+var p16 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : {$toUpper: "$author"},
+ pageViews : 1
+ }},
+ { $project : {
+ author : {$toLower: "$author"},
+ pageViews : 1
+ }}
+]});
+
+var p16result = [
+ {
+ "_id" : ObjectId("4e09d403278071aa11bd1560"),
+ "pageViews" : 5,
+ "author" : "bob"
+ },
+ {
+ "_id" : ObjectId("4e09d656c705acb9967683c4"),
+ "pageViews" : 7,
+ "author" : "dave"
+ },
+ {
+ "_id" : ObjectId("4e09d656c705acb9967683c5"),
+ "pageViews" : 6,
+ "author" : "jane"
+ }
+];
+
+assert(arrayEq(p16.result, p16result), 'p16 failed');
+
+
+// substr test
+var p17 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : {$substr: ["$author", 1, 2]},
+ }}
+]});
+
+var p17result = [
+ {
+ "_id" : ObjectId("4e09d403278071aa11bd1560"),
+ "author" : "ob"
+ },
+ {
+ "_id" : ObjectId("4e09d656c705acb9967683c4"),
+ "author" : "av"
+ },
+ {
+ "_id" : ObjectId("4e09d656c705acb9967683c5"),
+ "author" : "an"
+ }
+];
+
+assert(arrayEq(p17.result, p17result), 'p17 failed');
+
+
+// strcasecmp test
+var p18 = db.runCommand(
+{aggregate : "article", pipeline : [
+ { $project : {
+ tags : 1,
+ thisisalametest : {$strcasecmp: ["foo","bar"]},
+ thisisalamepass : {$strcasecmp: ["foo","foo"]}
+ }}
+]});
+
+var p18result = [
+ {
+ "_id" : ObjectId("4e09ee2d75f2a257194c996e"),
+ "tags" : [
+ "fun",
+ "good",
+ "fun"
+ ],
+ "thisisalametest" : 1,
+ "thisisalamepass" : 0
+ },
+ {
+ "_id" : ObjectId("4e09ee2d75f2a257194c996f"),
+ "tags" : [
+ "fun",
+ "nasty"
+ ],
+ "thisisalametest" : 1,
+ "thisisalamepass" : 0
+ },
+ {
+ "_id" : ObjectId("4e09ee2d75f2a257194c9970"),
+ "tags" : [
+ "nasty",
+ "filthy"
+ ],
+ "thisisalametest" : 1,
+ "thisisalamepass" : 0
+ }
+];
+
+assert(arrayEq(p18.result, p18result), 'p18 failed');
+
+
+// date tests
+var p19 = db.runCommand({aggregate : "article", pipeline : [
+ { $project : {
+ authors: 1,
+ posted: 1,
+ seconds: {$second: "$posted"},
+ minutes: {$minute: "$posted"},
+ hour: {$hour: "$posted"},
+ dayOfYear: {$dayOfYear: "$posted"},
+ dayOfMonth: {$dayOfMonth: "$posted"},
+ dayOfWeek: {$dayOfWeek: "$posted"},
+ month: {$month: "$posted"},
+ week: {$week: "$posted"},
+ year: {$year: "$posted"}
+ }},
+ { $project : {
+ authors: 1,
+ posted: 1,
+ seconds: 1,
+ minutes: 1,
+ hour: 1,
+ dayOfYear: 1,
+ dayOfMonth: 1,
+ dayOfWeek: 1,
+ month: 1,
+ week: 1,
+ year: 1,
+ testDate: {$isoDate:{
+ year: "$year", month: "$month", dayOfMonth: "$dayOfMonth",
+ hour: "$hour", minute: "$minutes", second: "$seconds"}}
+ }},
+ { $project : {
+ authors: 1,
+ posted: 1,
+ seconds: 1,
+ minutes: 1,
+ hour: 1,
+ dayOfYear: 1,
+ dayOfMonth: 1,
+ dayOfWeek: 1,
+ month: 1,
+ week: 1,
+ year: 1,
+ testDate: 1,
+ isEqual: {$eq:["$posted", "$testDate"]}
+ }}
+]});
+
+var p19result = [
+ {
+ "_id" : ObjectId("4f44151eda0a3d90cf03ccb5"),
+ "posted" : ISODate("2004-03-21T18:59:54Z"),
+ "seconds" : 54,
+ "minutes" : 59,
+ "hour" : 18,
+ "dayOfYear" : 81,
+ "dayOfMonth" : 21,
+ "dayOfWeek" : 1,
+ "month" : 3,
+ "week" : 12,
+ "year" : 2004,
+ "testDate" : ISODate("2004-03-21T18:59:54Z"),
+ "isEqual" : true
+ },
+ {
+ "_id" : ObjectId("4f44151eda0a3d90cf03ccb6"),
+ "posted" : ISODate("2030-08-08T04:11:10Z"),
+ "seconds" : 10,
+ "minutes" : 11,
+ "hour" : 4,
+ "dayOfYear" : 220,
+ "dayOfMonth" : 8,
+ "dayOfWeek" : 5,
+ "month" : 8,
+ "week" : 31,
+ "year" : 2030,
+ "testDate" : ISODate("2030-08-08T04:11:10Z"),
+ "isEqual" : true
+ },
+ {
+ "_id" : ObjectId("4f44151eda0a3d90cf03ccb7"),
+ "posted" : ISODate("2000-12-31T05:17:14Z"),
+ "seconds" : 14,
+ "minutes" : 17,
+ "hour" : 5,
+ "dayOfYear" : 366,
+ "dayOfMonth" : 31,
+ "dayOfWeek" : 1,
+ "month" : 12,
+ "week" : 53,
+ "year" : 2000,
+ "testDate" : ISODate("2000-12-31T05:17:14Z"),
+ "isEqual" : true
+ }
+];
+
+assert(arrayEq(p19.result, p19result), 'p19 failed');
+
+
+db.vartype.drop();
+db.vartype.save({ x : 17, y : "foo"});
+
+// just passing through fields
+var p20 = db.runCommand(
+{ aggregate : "vartype", pipeline : [
+ { $project : {
+ all_numbers : { $add:[1, "$x", 2, "$x"] },
+ string_fields : { $add:[3, "$y", 4, "$y"] },
+ number_fields : { $add:["a", "$x", "b", "$x"] },
+ all_strings : { $add:["c", "$y", "d", "$y"] },
+ potpourri_1 : { $add:[5, "$y", "e", "$x"] },
+ potpourri_2 : { $add:[6, "$x", "f", "$y"] },
+ potpourri_3 : { $add:["g", "$y", 7, "$x"] },
+ potpourri_4 : { $add:["h", "$x", 8, "$y"] },
+ _id: 0
+ }}
+]});
+
+var p20result = [
+ {
+ "all_numbers" : 37,
+ "string_fields" : "3foo4foo",
+ "number_fields" : "a17b17",
+ "all_strings" : "cfoodfoo",
+ "potpourri_1" : "5fooe17",
+ "potpourri_2" : "617ffoo",
+ "potpourri_3" : "gfoo717",
+ "potpourri_4" : "h178foo"
+ }
+];
+
+assert(arrayEq(p20.result, p20result), 'p20 failed');
+
+// ternary conditional operator
+var p21 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ _id : 0,
+ author : 1,
+ pageViews : { $cond : [ {$eq:["$author", "dave"]},
+ {$add:["$pageViews", 1000]}, "$pageViews" ]
+ }
+ }}
+]});
+
+var p21result = [
+ {
+ "author" : "bob",
+ "pageViews" : 5
+ },
+ {
+ "author" : "dave",
+ "pageViews" : 1007
+ },
+ {
+ "author" : "jane",
+ "pageViews" : 6
+ }
+];
+
+assert(arrayEq(p21.result, p21result), 'p21 failed');
+
+
+// simple matching
+var m1 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $match : { author : "dave" } }
+]});
+
+var m1result = [
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce7"),
+ "title" : "this is your title",
+ "author" : "dave",
+ "posted" : ISODate("2011-05-31T20:02:32.256Z"),
+ "pageViews" : 7,
+ "tags" : [
+ "fun",
+ "nasty"
+ ],
+ "comments" : [
+ {
+ "author" : "barbara",
+ "text" : "this is interesting"
+ },
+ {
+ "author" : "jenny",
+ "text" : "i like to play pinball",
+ "votes" : 10
+ }
+ ],
+ "other" : {
+ "bar" : 14
+ }
+ }
+];
+
+assert(arrayEq(m1.result, m1result), 'm1 failed');
+
+
+// combining matching with a projection
+var m2 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ title : 1,
+ author : 1,
+ pageViews : 1,
+ tags : 1,
+ comments : 1
+ }},
+ { $unwind : "$tags" },
+ { $match : { tags : "nasty" } }
+]});
+
+var m2result = [
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce7"),
+ "title" : "this is your title",
+ "author" : "dave",
+ "pageViews" : 7,
+ "comments" : [
+ {
+ "author" : "barbara",
+ "text" : "this is interesting"
+ },
+ {
+ "author" : "jenny",
+ "text" : "i like to play pinball",
+ "votes" : 10
+ }
+ ],
+ "tags" : "nasty"
+ },
+ {
+ "_id" : ObjectId("4de54958bf1505139918fce8"),
+ "title" : "this is some other title",
+ "author" : "jane",
+ "pageViews" : 6,
+ "comments" : [
+ {
+ "author" : "will",
+ "text" : "i don't like the color"
+ },
+ {
+ "author" : "jenny",
+ "text" : "can i get that in green?"
+ }
+ ],
+ "tags" : "nasty"
+ }
+];
+
+assert(arrayEq(m2.result, m2result), 'm2 failed');
+
+
+// group by tag, _id is a field reference
+var g1 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : 1,
+ tags : 1,
+ pageViews : 1
+ }},
+ { $unwind : "$tags" },
+ { $group : {
+ _id : "$tags",
+ docsByTag : { $sum : 1 },
+ viewsByTag : { $sum : "$pageViews" }
+ }}
+]});
+
+var g1result = [
+ {
+ "_id" : "filthy",
+ "docsByTag" : 1,
+ "viewsByTag" : 6
+ },
+ {
+ "_id" : "fun",
+ "docsByTag" : 3,
+ "viewsByTag" : 17
+ },
+ {
+ "_id" : "good",
+ "docsByTag" : 1,
+ "viewsByTag" : 5
+ },
+ {
+ "_id" : "nasty",
+ "docsByTag" : 2,
+ "viewsByTag" : 13
+ }
+];
+
+assert(arrayEq(g1.result, g1result), 'g1 failed');
+
+
+// $max, and averaging in a final projection; _id is structured
+var g2 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : 1,
+ tags : 1,
+ pageViews : 1
+ }},
+ { $unwind : "$tags" },
+ { $group : {
+ _id: { tags : 1 },
+ docsByTag : { $sum : 1 },
+ viewsByTag : { $sum : "$pageViews" },
+ mostViewsByTag : { $max : "$pageViews" },
+ }},
+ { $project : {
+ _id: false,
+ tag : "$_id.tags",
+ mostViewsByTag : 1,
+ docsByTag : 1,
+ viewsByTag : 1,
+ avgByTag : { $divide:["$viewsByTag", "$docsByTag"] }
+ }}
+]});
+
+var g2result = [
+ {
+ "docsByTag" : 1,
+ "viewsByTag" : 6,
+ "mostViewsByTag" : 6,
+ "tag" : "filthy",
+ "avgByTag" : 6
+ },
+ {
+ "docsByTag" : 3,
+ "viewsByTag" : 17,
+ "mostViewsByTag" : 7,
+ "tag" : "fun",
+ "avgByTag" : 5.666666666666667
+ },
+ {
+ "docsByTag" : 1,
+ "viewsByTag" : 5,
+ "mostViewsByTag" : 5,
+ "tag" : "good",
+ "avgByTag" : 5
+ },
+ {
+ "docsByTag" : 2,
+ "viewsByTag" : 13,
+ "mostViewsByTag" : 7,
+ "tag" : "nasty",
+ "avgByTag" : 6.5
+ }
+];
+
+assert(arrayEq(g2.result, g2result), 'g2 failed');
+
+
+// $push as an accumulator; can pivot data
+var g3 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : 1,
+ tags : 1,
+ }},
+ { $unwind : "$tags" },
+ { $group : {
+ _id : { tags : 1 },
+ authors : { $push : "$author" }
+ }}
+]});
+
+var g3result = [
+ {
+ "_id" : {
+ "tags" : "filthy"
+ },
+ "authors" : [
+ "jane"
+ ]
+ },
+ {
+ "_id" : {
+ "tags" : "fun"
+ },
+ "authors" : [
+ "bob",
+ "bob",
+ "dave"
+ ]
+ },
+ {
+ "_id" : {
+ "tags" : "good"
+ },
+ "authors" : [
+ "bob"
+ ]
+ },
+ {
+ "_id" : {
+ "tags" : "nasty"
+ },
+ "authors" : [
+ "dave",
+ "jane"
+ ]
+ }
+];
+
+assert(arrayEq(g3.result, g3result), 'g3 failed');
+
+
+// $avg, and averaging in a final projection
+var g4 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : 1,
+ tags : 1,
+ pageViews : 1
+ }},
+ { $unwind : "$tags" },
+ { $group : {
+ _id: { tags : 1 },
+ docsByTag : { $sum : 1 },
+ viewsByTag : { $sum : "$pageViews" },
+ avgByTag : { $avg : "$pageViews" },
+ }}
+]});
+
+var g4result = [
+ {
+ "_id" : {
+ "tags" : "filthy"
+ },
+ "docsByTag" : 1,
+ "viewsByTag" : 6,
+ "avgByTag" : 6
+ },
+ {
+ "_id" : {
+ "tags" : "fun"
+ },
+ "docsByTag" : 3,
+ "viewsByTag" : 17,
+ "avgByTag" : 5.666666666666667
+ },
+ {
+ "_id" : {
+ "tags" : "good"
+ },
+ "docsByTag" : 1,
+ "viewsByTag" : 5,
+ "avgByTag" : 5
+ },
+ {
+ "_id" : {
+ "tags" : "nasty"
+ },
+ "docsByTag" : 2,
+ "viewsByTag" : 13,
+ "avgByTag" : 6.5
+ }
+];
+
+assert(arrayEq(g4.result, g4result), 'g4 failed');
+
+
+// $addToSet as an accumulator; can pivot data
+var g5 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $project : {
+ author : 1,
+ tags : 1,
+ }},
+ { $unwind : "$tags" },
+ { $group : {
+ _id : { tags : 1 },
+ authors : { $addToSet : "$author" }
+ }}
+]});
+
+var g5result = [
+ {
+ "_id" : {
+ "tags" : "filthy"
+ },
+ "authors" : [
+ "jane"
+ ]
+ },
+ {
+ "_id" : {
+ "tags" : "fun"
+ },
+ "authors" : [
+ "bob",
+ "dave"
+ ]
+ },
+ {
+ "_id" : {
+ "tags" : "good"
+ },
+ "authors" : [
+ "bob"
+ ]
+ },
+ {
+ "_id" : {
+ "tags" : "nasty"
+ },
+ "authors" : [
+ "dave",
+ "jane"
+ ]
+ }
+];
+
+assert(arrayEq(g5.result, g5result), 'g5 failed');
+
+
+// $first and $last accumulators, constant _id
+var g6 = db.runCommand(
+{ aggregate : "article", pipeline : [
+ { $sort : { author : -1 } },
+ { $group : {
+ _id : "authors", /* constant string, *not* a field reference */
+ firstAuthor : { $last : "$author" }, /* note reverse sort above */
+ lastAuthor : { $first : "$author" }, /* note reverse sort above */
+ count : { $sum : 1 }
+ }}
+]});
+
+var g6result = [
+ {
+ "_id" : "authors",
+ firstAuthor : "bob",
+ lastAuthor : "jane",
+ count : 3
+ }
+];
+
+assert(arrayEq(g6.result, g6result), 'g6 failed');
diff --git a/src/mongo/db/commands/document_source_cursor.cpp b/src/mongo/db/commands/document_source_cursor.cpp
index 5ac0e460b14..3fc2e4d4228 100755
--- a/src/mongo/db/commands/document_source_cursor.cpp
+++ b/src/mongo/db/commands/document_source_cursor.cpp
@@ -1,186 +1,186 @@
-/**
- * Copyright 2011 (c) 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "mongo/pch.h"
-
-#include "mongo/db/pipeline/document_source.h"
-
-#include "mongo/client/dbclientcursor.h"
-#include "mongo/db/clientcursor.h"
-#include "mongo/db/cursor.h"
-#include "mongo/db/instance.h"
-#include "mongo/db/pipeline/document.h"
-
-namespace mongo {
-
- DocumentSourceCursor::~DocumentSourceCursor() {
- }
-
- void DocumentSourceCursor::releaseCursor() {
- // note the order here; the cursor holder has to go first
- pClientCursor.reset();
- pCursor.reset();
- }
-
- bool DocumentSourceCursor::eof() {
- /* if we haven't gotten the first one yet, do so now */
- if (!pCurrent.get())
- findNext();
-
- return (pCurrent.get() == NULL);
- }
-
- bool DocumentSourceCursor::advance() {
- DocumentSource::advance(); // check for interrupts
-
- /* if we haven't gotten the first one yet, do so now */
- if (!pCurrent.get())
- findNext();
-
- findNext();
- return (pCurrent.get() != NULL);
- }
-
- intrusive_ptr<Document> DocumentSourceCursor::getCurrent() {
- /* if we haven't gotten the first one yet, do so now */
- if (!pCurrent.get())
- findNext();
-
- return pCurrent;
- }
-
- void DocumentSourceCursor::advanceAndYield() {
- pCursor->advance();
- /*
- TODO ask for index key pattern in order to determine which index
- was used for this particular document; that will allow us to
- sometimes use ClientCursor::MaybeCovered.
- See https://jira.mongodb.org/browse/SERVER-5224 .
- */
- bool cursorOk = pClientCursor->yieldSometimes(ClientCursor::WillNeed);
- if (!cursorOk) {
- uassert(16028,
- "collection or database disappeared when cursor yielded",
- false);
- }
- }
-
- void DocumentSourceCursor::findNext() {
- /* standard cursor usage pattern */
- while(pCursor->ok()) {
- CoveredIndexMatcher *pCIM; // save intermediate result
- if ((!(pCIM = pCursor->matcher()) ||
- pCIM->matchesCurrent(pCursor.get())) &&
- !pCursor->getsetdup(pCursor->currLoc())) {
-
- /* grab the matching document */
- BSONObj documentObj(pCursor->current());
- pCurrent = Document::createFromBsonObj(
- &documentObj, NULL /* LATER pDependencies.get()*/);
- advanceAndYield();
- return;
- }
-
- advanceAndYield();
- }
-
- /* if we got here, there aren't any more documents */
- pCurrent.reset();
- }
-
- void DocumentSourceCursor::setSource(DocumentSource *pSource) {
- /* this doesn't take a source */
- verify(false);
- }
-
- void DocumentSourceCursor::sourceToBson(
- BSONObjBuilder *pBuilder, bool explain) const {
-
- /* this has no analog in the BSON world, so only allow it for explain */
- if (explain)
- {
- BSONObj bsonObj;
-
- pBuilder->append("query", *pQuery);
-
- if (pSort.get())
- {
- pBuilder->append("sort", *pSort);
- }
-
- // construct query for explain
- BSONObjBuilder queryBuilder;
- queryBuilder.append("$query", *pQuery);
- if (pSort.get())
- queryBuilder.append("$orderby", *pSort);
- queryBuilder.append("$explain", 1);
- Query query(queryBuilder.obj());
-
- DBDirectClient directClient;
- BSONObj explainResult(directClient.findOne(ns, query));
-
- pBuilder->append("cursor", explainResult);
- }
- }
-
- DocumentSourceCursor::DocumentSourceCursor(
- const shared_ptr<Cursor> &pTheCursor,
- const string &ns,
- const intrusive_ptr<ExpressionContext> &pCtx):
- DocumentSource(pCtx),
- pCurrent(),
- bsonDependencies(),
- pCursor(pTheCursor),
- pClientCursor(),
- pDependencies() {
- pClientCursor.reset(
- new ClientCursor(QueryOption_NoCursorTimeout, pTheCursor, ns));
- }
-
- intrusive_ptr<DocumentSourceCursor> DocumentSourceCursor::create(
- const shared_ptr<Cursor> &pCursor,
- const string &ns,
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- verify(pCursor.get());
- intrusive_ptr<DocumentSourceCursor> pSource(
- new DocumentSourceCursor(pCursor, ns, pExpCtx));
- return pSource;
- }
-
- void DocumentSourceCursor::setNamespace(const string &n) {
- ns = n;
- }
-
- void DocumentSourceCursor::setQuery(const shared_ptr<BSONObj> &pBsonObj) {
- pQuery = pBsonObj;
- }
-
- void DocumentSourceCursor::setSort(const shared_ptr<BSONObj> &pBsonObj) {
- pSort = pBsonObj;
- }
-
- void DocumentSourceCursor::addBsonDependency(
- const shared_ptr<BSONObj> &pBsonObj) {
- bsonDependencies.push_back(pBsonObj);
- }
-
- void DocumentSourceCursor::manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker) {
- /* hang on to the tracker */
- pDependencies = pTracker;
- }
-
-}
+/**
+ * Copyright 2011 (c) 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "mongo/pch.h"
+
+#include "mongo/db/pipeline/document_source.h"
+
+#include "mongo/client/dbclientcursor.h"
+#include "mongo/db/clientcursor.h"
+#include "mongo/db/cursor.h"
+#include "mongo/db/instance.h"
+#include "mongo/db/pipeline/document.h"
+
+namespace mongo {
+
+ DocumentSourceCursor::~DocumentSourceCursor() {
+ }
+
+ void DocumentSourceCursor::releaseCursor() {
+ // note the order here; the cursor holder has to go first
+ pClientCursor.reset();
+ pCursor.reset();
+ }
+
+ bool DocumentSourceCursor::eof() {
+ /* if we haven't gotten the first one yet, do so now */
+ if (!pCurrent.get())
+ findNext();
+
+ return (pCurrent.get() == NULL);
+ }
+
+ bool DocumentSourceCursor::advance() {
+ DocumentSource::advance(); // check for interrupts
+
+ /* if we haven't gotten the first one yet, do so now */
+ if (!pCurrent.get())
+ findNext();
+
+ findNext();
+ return (pCurrent.get() != NULL);
+ }
+
+ intrusive_ptr<Document> DocumentSourceCursor::getCurrent() {
+ /* if we haven't gotten the first one yet, do so now */
+ if (!pCurrent.get())
+ findNext();
+
+ return pCurrent;
+ }
+
+ void DocumentSourceCursor::advanceAndYield() {
+ pCursor->advance();
+ /*
+ TODO ask for index key pattern in order to determine which index
+ was used for this particular document; that will allow us to
+ sometimes use ClientCursor::MaybeCovered.
+ See https://jira.mongodb.org/browse/SERVER-5224 .
+ */
+ bool cursorOk = pClientCursor->yieldSometimes(ClientCursor::WillNeed);
+ if (!cursorOk) {
+ uassert(16028,
+ "collection or database disappeared when cursor yielded",
+ false);
+ }
+ }
+
+ void DocumentSourceCursor::findNext() {
+ /* standard cursor usage pattern */
+ while(pCursor->ok()) {
+ CoveredIndexMatcher *pCIM; // save intermediate result
+ if ((!(pCIM = pCursor->matcher()) ||
+ pCIM->matchesCurrent(pCursor.get())) &&
+ !pCursor->getsetdup(pCursor->currLoc())) {
+
+ /* grab the matching document */
+ BSONObj documentObj(pCursor->current());
+ pCurrent = Document::createFromBsonObj(
+ &documentObj, NULL /* LATER pDependencies.get()*/);
+ advanceAndYield();
+ return;
+ }
+
+ advanceAndYield();
+ }
+
+ /* if we got here, there aren't any more documents */
+ pCurrent.reset();
+ }
+
+ void DocumentSourceCursor::setSource(DocumentSource *pSource) {
+ /* this doesn't take a source */
+ verify(false);
+ }
+
+ void DocumentSourceCursor::sourceToBson(
+ BSONObjBuilder *pBuilder, bool explain) const {
+
+ /* this has no analog in the BSON world, so only allow it for explain */
+ if (explain)
+ {
+ BSONObj bsonObj;
+
+ pBuilder->append("query", *pQuery);
+
+ if (pSort.get())
+ {
+ pBuilder->append("sort", *pSort);
+ }
+
+ // construct query for explain
+ BSONObjBuilder queryBuilder;
+ queryBuilder.append("$query", *pQuery);
+ if (pSort.get())
+ queryBuilder.append("$orderby", *pSort);
+ queryBuilder.append("$explain", 1);
+ Query query(queryBuilder.obj());
+
+ DBDirectClient directClient;
+ BSONObj explainResult(directClient.findOne(ns, query));
+
+ pBuilder->append("cursor", explainResult);
+ }
+ }
+
+ DocumentSourceCursor::DocumentSourceCursor(
+ const shared_ptr<Cursor> &pTheCursor,
+ const string &ns,
+ const intrusive_ptr<ExpressionContext> &pCtx):
+ DocumentSource(pCtx),
+ pCurrent(),
+ bsonDependencies(),
+ pCursor(pTheCursor),
+ pClientCursor(),
+ pDependencies() {
+ pClientCursor.reset(
+ new ClientCursor(QueryOption_NoCursorTimeout, pTheCursor, ns));
+ }
+
+ intrusive_ptr<DocumentSourceCursor> DocumentSourceCursor::create(
+ const shared_ptr<Cursor> &pCursor,
+ const string &ns,
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ verify(pCursor.get());
+ intrusive_ptr<DocumentSourceCursor> pSource(
+ new DocumentSourceCursor(pCursor, ns, pExpCtx));
+ return pSource;
+ }
+
+ void DocumentSourceCursor::setNamespace(const string &n) {
+ ns = n;
+ }
+
+ void DocumentSourceCursor::setQuery(const shared_ptr<BSONObj> &pBsonObj) {
+ pQuery = pBsonObj;
+ }
+
+ void DocumentSourceCursor::setSort(const shared_ptr<BSONObj> &pBsonObj) {
+ pSort = pBsonObj;
+ }
+
+ void DocumentSourceCursor::addBsonDependency(
+ const shared_ptr<BSONObj> &pBsonObj) {
+ bsonDependencies.push_back(pBsonObj);
+ }
+
+ void DocumentSourceCursor::manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker) {
+ /* hang on to the tracker */
+ pDependencies = pTracker;
+ }
+
+}
diff --git a/src/mongo/db/commands/pipeline.cpp b/src/mongo/db/commands/pipeline.cpp
index 3ee80c42d7a..951c642162d 100755
--- a/src/mongo/db/commands/pipeline.cpp
+++ b/src/mongo/db/commands/pipeline.cpp
@@ -1,500 +1,500 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "db/commands/pipeline.h"
-
-#include "db/cursor.h"
-#include "db/pipeline/accumulator.h"
-#include "db/pipeline/dependency_tracker.h"
-#include "db/pipeline/document.h"
-#include "db/pipeline/document_source.h"
-#include "db/pipeline/expression.h"
-#include "db/pipeline/expression_context.h"
-#include "db/pdfile.h"
-#include "util/mongoutils/str.h"
-
-namespace mongo {
-
- const char Pipeline::commandName[] = "aggregate";
- const char Pipeline::pipelineName[] = "pipeline";
- const char Pipeline::explainName[] = "explain";
- const char Pipeline::fromRouterName[] = "fromRouter";
- const char Pipeline::splitMongodPipelineName[] = "splitMongodPipeline";
- const char Pipeline::serverPipelineName[] = "serverPipeline";
- const char Pipeline::mongosPipelineName[] = "mongosPipeline";
-
- Pipeline::~Pipeline() {
- }
-
- Pipeline::Pipeline(const intrusive_ptr<ExpressionContext> &pTheCtx):
- collectionName(),
- sourceVector(),
- explain(false),
- splitMongodPipeline(false),
- pCtx(pTheCtx) {
- }
-
-
- /* this structure is used to make a lookup table of operators */
- struct StageDesc {
- const char *pName;
- intrusive_ptr<DocumentSource> (*pFactory)(
- BSONElement *, const intrusive_ptr<ExpressionContext> &);
- };
-
- /* this table must be in alphabetical order by name for bsearch() */
- static const StageDesc stageDesc[] = {
-#ifdef NEVER /* disabled for now in favor of $match */
- {DocumentSourceFilter::filterName,
- DocumentSourceFilter::createFromBson},
-#endif
- {DocumentSourceGroup::groupName,
- DocumentSourceGroup::createFromBson},
- {DocumentSourceLimit::limitName,
- DocumentSourceLimit::createFromBson},
- {DocumentSourceMatch::matchName,
- DocumentSourceMatch::createFromBson},
-#ifdef LATER /* https://jira.mongodb.org/browse/SERVER-3253 */
- {DocumentSourceOut::outName,
- DocumentSourceOut::createFromBson},
-#endif
- {DocumentSourceProject::projectName,
- DocumentSourceProject::createFromBson},
- {DocumentSourceSkip::skipName,
- DocumentSourceSkip::createFromBson},
- {DocumentSourceSort::sortName,
- DocumentSourceSort::createFromBson},
- {DocumentSourceUnwind::unwindName,
- DocumentSourceUnwind::createFromBson},
- };
- static const size_t nStageDesc = sizeof(stageDesc) / sizeof(StageDesc);
-
- static int stageDescCmp(const void *pL, const void *pR) {
- return strcmp(((const StageDesc *)pL)->pName,
- ((const StageDesc *)pR)->pName);
- }
-
- intrusive_ptr<Pipeline> Pipeline::parseCommand(
- string &errmsg, BSONObj &cmdObj,
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<Pipeline> pPipeline(new Pipeline(pCtx));
- vector<BSONElement> pipeline;
-
- /* gather the specification for the aggregation */
- for(BSONObj::iterator cmdIterator = cmdObj.begin();
- cmdIterator.more(); ) {
- BSONElement cmdElement(cmdIterator.next());
- const char *pFieldName = cmdElement.fieldName();
-
- /* look for the aggregation command */
- if (!strcmp(pFieldName, commandName)) {
- pPipeline->collectionName = cmdElement.String();
- continue;
- }
-
- /* check for the collection name */
- if (!strcmp(pFieldName, pipelineName)) {
- pipeline = cmdElement.Array();
- continue;
- }
-
- /* check for explain option */
- if (!strcmp(pFieldName, explainName)) {
- pPipeline->explain = cmdElement.Bool();
- continue;
- }
-
- /* if the request came from the router, we're in a shard */
- if (!strcmp(pFieldName, fromRouterName)) {
- pCtx->setInShard(cmdElement.Bool());
- continue;
- }
-
- /* check for debug options */
- if (!strcmp(pFieldName, splitMongodPipelineName)) {
- pPipeline->splitMongodPipeline = true;
- continue;
- }
-
- /* we didn't recognize a field in the command */
- ostringstream sb;
- sb <<
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "db/commands/pipeline.h"
+
+#include "db/cursor.h"
+#include "db/pipeline/accumulator.h"
+#include "db/pipeline/dependency_tracker.h"
+#include "db/pipeline/document.h"
+#include "db/pipeline/document_source.h"
+#include "db/pipeline/expression.h"
+#include "db/pipeline/expression_context.h"
+#include "db/pdfile.h"
+#include "util/mongoutils/str.h"
+
+namespace mongo {
+
+ const char Pipeline::commandName[] = "aggregate";
+ const char Pipeline::pipelineName[] = "pipeline";
+ const char Pipeline::explainName[] = "explain";
+ const char Pipeline::fromRouterName[] = "fromRouter";
+ const char Pipeline::splitMongodPipelineName[] = "splitMongodPipeline";
+ const char Pipeline::serverPipelineName[] = "serverPipeline";
+ const char Pipeline::mongosPipelineName[] = "mongosPipeline";
+
+ Pipeline::~Pipeline() {
+ }
+
+ Pipeline::Pipeline(const intrusive_ptr<ExpressionContext> &pTheCtx):
+ collectionName(),
+ sourceVector(),
+ explain(false),
+ splitMongodPipeline(false),
+ pCtx(pTheCtx) {
+ }
+
+
+ /* this structure is used to make a lookup table of operators */
+ struct StageDesc {
+ const char *pName;
+ intrusive_ptr<DocumentSource> (*pFactory)(
+ BSONElement *, const intrusive_ptr<ExpressionContext> &);
+ };
+
+ /* this table must be in alphabetical order by name for bsearch() */
+ static const StageDesc stageDesc[] = {
+#ifdef NEVER /* disabled for now in favor of $match */
+ {DocumentSourceFilter::filterName,
+ DocumentSourceFilter::createFromBson},
+#endif
+ {DocumentSourceGroup::groupName,
+ DocumentSourceGroup::createFromBson},
+ {DocumentSourceLimit::limitName,
+ DocumentSourceLimit::createFromBson},
+ {DocumentSourceMatch::matchName,
+ DocumentSourceMatch::createFromBson},
+#ifdef LATER /* https://jira.mongodb.org/browse/SERVER-3253 */
+ {DocumentSourceOut::outName,
+ DocumentSourceOut::createFromBson},
+#endif
+ {DocumentSourceProject::projectName,
+ DocumentSourceProject::createFromBson},
+ {DocumentSourceSkip::skipName,
+ DocumentSourceSkip::createFromBson},
+ {DocumentSourceSort::sortName,
+ DocumentSourceSort::createFromBson},
+ {DocumentSourceUnwind::unwindName,
+ DocumentSourceUnwind::createFromBson},
+ };
+ static const size_t nStageDesc = sizeof(stageDesc) / sizeof(StageDesc);
+
+ static int stageDescCmp(const void *pL, const void *pR) {
+ return strcmp(((const StageDesc *)pL)->pName,
+ ((const StageDesc *)pR)->pName);
+ }
+
+ intrusive_ptr<Pipeline> Pipeline::parseCommand(
+ string &errmsg, BSONObj &cmdObj,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<Pipeline> pPipeline(new Pipeline(pCtx));
+ vector<BSONElement> pipeline;
+
+ /* gather the specification for the aggregation */
+ for(BSONObj::iterator cmdIterator = cmdObj.begin();
+ cmdIterator.more(); ) {
+ BSONElement cmdElement(cmdIterator.next());
+ const char *pFieldName = cmdElement.fieldName();
+
+ /* look for the aggregation command */
+ if (!strcmp(pFieldName, commandName)) {
+ pPipeline->collectionName = cmdElement.String();
+ continue;
+ }
+
+ /* check for the collection name */
+ if (!strcmp(pFieldName, pipelineName)) {
+ pipeline = cmdElement.Array();
+ continue;
+ }
+
+ /* check for explain option */
+ if (!strcmp(pFieldName, explainName)) {
+ pPipeline->explain = cmdElement.Bool();
+ continue;
+ }
+
+ /* if the request came from the router, we're in a shard */
+ if (!strcmp(pFieldName, fromRouterName)) {
+ pCtx->setInShard(cmdElement.Bool());
+ continue;
+ }
+
+ /* check for debug options */
+ if (!strcmp(pFieldName, splitMongodPipelineName)) {
+ pPipeline->splitMongodPipeline = true;
+ continue;
+ }
+
+ /* we didn't recognize a field in the command */
+ ostringstream sb;
+ sb <<
"unrecognized field \"" <<
- cmdElement.fieldName();
- errmsg = sb.str();
- return intrusive_ptr<Pipeline>();
- }
-
- /*
- If we get here, we've harvested the fields we expect for a pipeline.
-
- Set up the specified document source pipeline.
- */
- SourceVector *pSourceVector = &pPipeline->sourceVector; // shorthand
-
- /* iterate over the steps in the pipeline */
- const size_t nSteps = pipeline.size();
- for(size_t iStep = 0; iStep < nSteps; ++iStep) {
- /* pull out the pipeline element as an object */
- BSONElement pipeElement(pipeline[iStep]);
- uassert(15942, str::stream() << "pipeline element " <<
- iStep << " is not an object",
- pipeElement.type() == Object);
- BSONObj bsonObj(pipeElement.Obj());
-
- intrusive_ptr<DocumentSource> pSource;
-
- /* use the object to add a DocumentSource to the processing chain */
- BSONObjIterator bsonIterator(bsonObj);
- while(bsonIterator.more()) {
- BSONElement bsonElement(bsonIterator.next());
- const char *pFieldName = bsonElement.fieldName();
-
- /* select the appropriate operation and instantiate */
- StageDesc key;
- key.pName = pFieldName;
- const StageDesc *pDesc = (const StageDesc *)
- bsearch(&key, stageDesc, nStageDesc, sizeof(StageDesc),
- stageDescCmp);
- if (pDesc) {
- pSource = (*pDesc->pFactory)(&bsonElement, pCtx);
- pSource->setPipelineStep(iStep);
- }
- else {
- ostringstream sb;
- sb <<
- "Pipeline::run(): unrecognized pipeline op \"" <<
- pFieldName;
- errmsg = sb.str();
- return intrusive_ptr<Pipeline>();
- }
- }
-
- pSourceVector->push_back(pSource);
- }
-
- /* if there aren't any pipeline stages, there's nothing more to do */
- if (!pSourceVector->size())
- return pPipeline;
-
- /*
- Move filters up where possible.
-
- CW TODO -- move filter past projections where possible, and noting
- corresponding field renaming.
- */
-
- /*
- Wherever there is a match immediately following a sort, swap them.
- This means we sort fewer items. Neither changes the documents in
- the stream, so this transformation shouldn't affect the result.
-
- We do this first, because then when we coalesce operators below,
- any adjacent matches will be combined.
- */
- for(size_t srcn = pSourceVector->size(), srci = 1;
- srci < srcn; ++srci) {
- intrusive_ptr<DocumentSource> &pSource = pSourceVector->at(srci);
- if (dynamic_cast<DocumentSourceMatch *>(pSource.get())) {
- intrusive_ptr<DocumentSource> &pPrevious =
- pSourceVector->at(srci - 1);
- if (dynamic_cast<DocumentSourceSort *>(pPrevious.get())) {
- /* swap this item with the previous */
- intrusive_ptr<DocumentSource> pTemp(pPrevious);
- pPrevious = pSource;
- pSource = pTemp;
- }
- }
- }
-
- /*
- Coalesce adjacent filters where possible. Two adjacent filters
- are equivalent to one filter whose predicate is the conjunction of
- the two original filters' predicates. For now, capture this by
- giving any DocumentSource the option to absorb it's successor; this
- will also allow adjacent projections to coalesce when possible.
-
- Run through the DocumentSources, and give each one the opportunity
- to coalesce with its successor. If successful, remove the
- successor.
-
- Move all document sources to a temporary list.
- */
- SourceVector tempVector(*pSourceVector);
- pSourceVector->clear();
-
- /* move the first one to the final list */
- pSourceVector->push_back(tempVector[0]);
-
- /* run through the sources, coalescing them or keeping them */
- for(size_t tempn = tempVector.size(), tempi = 1;
- tempi < tempn; ++tempi) {
- /*
- If we can't coalesce the source with the last, then move it
- to the final list, and make it the new last. (If we succeeded,
- then we're still on the same last, and there's no need to move
- or do anything with the source -- the destruction of tempVector
- will take care of the rest.)
- */
- intrusive_ptr<DocumentSource> &pLastSource = pSourceVector->back();
- intrusive_ptr<DocumentSource> &pTemp = tempVector.at(tempi);
+ cmdElement.fieldName();
+ errmsg = sb.str();
+ return intrusive_ptr<Pipeline>();
+ }
+
+ /*
+ If we get here, we've harvested the fields we expect for a pipeline.
+
+ Set up the specified document source pipeline.
+ */
+ SourceVector *pSourceVector = &pPipeline->sourceVector; // shorthand
+
+ /* iterate over the steps in the pipeline */
+ const size_t nSteps = pipeline.size();
+ for(size_t iStep = 0; iStep < nSteps; ++iStep) {
+ /* pull out the pipeline element as an object */
+ BSONElement pipeElement(pipeline[iStep]);
+ uassert(15942, str::stream() << "pipeline element " <<
+ iStep << " is not an object",
+ pipeElement.type() == Object);
+ BSONObj bsonObj(pipeElement.Obj());
+
+ intrusive_ptr<DocumentSource> pSource;
+
+ /* use the object to add a DocumentSource to the processing chain */
+ BSONObjIterator bsonIterator(bsonObj);
+ while(bsonIterator.more()) {
+ BSONElement bsonElement(bsonIterator.next());
+ const char *pFieldName = bsonElement.fieldName();
+
+ /* select the appropriate operation and instantiate */
+ StageDesc key;
+ key.pName = pFieldName;
+ const StageDesc *pDesc = (const StageDesc *)
+ bsearch(&key, stageDesc, nStageDesc, sizeof(StageDesc),
+ stageDescCmp);
+ if (pDesc) {
+ pSource = (*pDesc->pFactory)(&bsonElement, pCtx);
+ pSource->setPipelineStep(iStep);
+ }
+ else {
+ ostringstream sb;
+ sb <<
+ "Pipeline::run(): unrecognized pipeline op \"" <<
+ pFieldName;
+ errmsg = sb.str();
+ return intrusive_ptr<Pipeline>();
+ }
+ }
+
+ pSourceVector->push_back(pSource);
+ }
+
+ /* if there aren't any pipeline stages, there's nothing more to do */
+ if (!pSourceVector->size())
+ return pPipeline;
+
+ /*
+ Move filters up where possible.
+
+ CW TODO -- move filter past projections where possible, and noting
+ corresponding field renaming.
+ */
+
+ /*
+ Wherever there is a match immediately following a sort, swap them.
+ This means we sort fewer items. Neither changes the documents in
+ the stream, so this transformation shouldn't affect the result.
+
+ We do this first, because then when we coalesce operators below,
+ any adjacent matches will be combined.
+ */
+ for(size_t srcn = pSourceVector->size(), srci = 1;
+ srci < srcn; ++srci) {
+ intrusive_ptr<DocumentSource> &pSource = pSourceVector->at(srci);
+ if (dynamic_cast<DocumentSourceMatch *>(pSource.get())) {
+ intrusive_ptr<DocumentSource> &pPrevious =
+ pSourceVector->at(srci - 1);
+ if (dynamic_cast<DocumentSourceSort *>(pPrevious.get())) {
+ /* swap this item with the previous */
+ intrusive_ptr<DocumentSource> pTemp(pPrevious);
+ pPrevious = pSource;
+ pSource = pTemp;
+ }
+ }
+ }
+
+ /*
+ Coalesce adjacent filters where possible. Two adjacent filters
+ are equivalent to one filter whose predicate is the conjunction of
+ the two original filters' predicates. For now, capture this by
+ giving any DocumentSource the option to absorb it's successor; this
+ will also allow adjacent projections to coalesce when possible.
+
+ Run through the DocumentSources, and give each one the opportunity
+ to coalesce with its successor. If successful, remove the
+ successor.
+
+ Move all document sources to a temporary list.
+ */
+ SourceVector tempVector(*pSourceVector);
+ pSourceVector->clear();
+
+ /* move the first one to the final list */
+ pSourceVector->push_back(tempVector[0]);
+
+ /* run through the sources, coalescing them or keeping them */
+ for(size_t tempn = tempVector.size(), tempi = 1;
+ tempi < tempn; ++tempi) {
+ /*
+ If we can't coalesce the source with the last, then move it
+ to the final list, and make it the new last. (If we succeeded,
+ then we're still on the same last, and there's no need to move
+ or do anything with the source -- the destruction of tempVector
+ will take care of the rest.)
+ */
+ intrusive_ptr<DocumentSource> &pLastSource = pSourceVector->back();
+ intrusive_ptr<DocumentSource> &pTemp = tempVector.at(tempi);
if (!pTemp || !pLastSource) {
errmsg = "Pipeline received empty document as argument";
return intrusive_ptr<Pipeline>();
}
- if (!pLastSource->coalesce(pTemp))
- pSourceVector->push_back(pTemp);
- }
-
- /* optimize the elements in the pipeline */
- for(SourceVector::iterator iter(pSourceVector->begin()),
+ if (!pLastSource->coalesce(pTemp))
+ pSourceVector->push_back(pTemp);
+ }
+
+ /* optimize the elements in the pipeline */
+ for(SourceVector::iterator iter(pSourceVector->begin()),
listEnd(pSourceVector->end()); iter != listEnd; ++iter) {
if (!*iter) {
errmsg = "Pipeline received empty document as argument";
return intrusive_ptr<Pipeline>();
}
- (*iter)->optimize();
+ (*iter)->optimize();
}
-
- return pPipeline;
- }
-
- intrusive_ptr<Pipeline> Pipeline::splitForSharded() {
- /* create an initialize the shard spec we'll return */
- intrusive_ptr<Pipeline> pShardPipeline(new Pipeline(pCtx));
- pShardPipeline->collectionName = collectionName;
- pShardPipeline->explain = explain;
-
- /* put the source list aside */
- SourceVector tempVector(sourceVector);
- sourceVector.clear();
-
- /*
- Run through the pipeline, looking for points to split it into
- shard pipelines, and the rest.
- */
- while(!tempVector.empty()) {
- intrusive_ptr<DocumentSource> &pSource = tempVector.front();
-
- /* hang on to this in advance, in case it is a group */
- DocumentSourceGroup *pGroup =
- dynamic_cast<DocumentSourceGroup *>(pSource.get());
-
- /* move the source from the tempVector to the shard sourceVector */
- pShardPipeline->sourceVector.push_back(pSource);
- tempVector.erase(tempVector.begin());
-
- /*
- If we found a group, that's a split point.
- */
- if (pGroup) {
- /* start this pipeline with the group merger */
- sourceVector.push_back(pGroup->createMerger());
-
- /* and then add everything that remains and quit */
- for(size_t tempn = tempVector.size(), tempi = 0;
- tempi < tempn; ++tempi)
- sourceVector.push_back(tempVector[tempi]);
- break;
- }
- }
-
- return pShardPipeline;
- }
-
- bool Pipeline::getInitialQuery(BSONObjBuilder *pQueryBuilder) const
- {
- if (!sourceVector.size())
- return false;
-
- /* look for an initial $match */
- const intrusive_ptr<DocumentSource> &pMC = sourceVector.front();
- const DocumentSourceMatch *pMatch =
- dynamic_cast<DocumentSourceMatch *>(pMC.get());
-
- if (!pMatch)
- return false;
-
- /* build the query */
- pMatch->toMatcherBson(pQueryBuilder);
-
- return true;
- }
-
- void Pipeline::toBson(BSONObjBuilder *pBuilder) const {
- /* create an array out of the pipeline operations */
- BSONArrayBuilder arrayBuilder;
- for(SourceVector::const_iterator iter(sourceVector.begin()),
- listEnd(sourceVector.end()); iter != listEnd; ++iter) {
- intrusive_ptr<DocumentSource> pSource(*iter);
- pSource->addToBsonArray(&arrayBuilder);
- }
-
- /* add the top-level items to the command */
- pBuilder->append(commandName, getCollectionName());
- pBuilder->append(pipelineName, arrayBuilder.arr());
-
- if (explain) {
- pBuilder->append(explainName, explain);
- }
-
- bool btemp;
- if ((btemp = getSplitMongodPipeline())) {
- pBuilder->append(splitMongodPipelineName, btemp);
- }
-
- if ((btemp = pCtx->getInRouter())) {
- pBuilder->append(fromRouterName, btemp);
- }
- }
-
- bool Pipeline::run(BSONObjBuilder &result, string &errmsg,
- const intrusive_ptr<DocumentSource> &pInputSource) {
- /*
- Analyze dependency information.
-
- This pushes dependencies from the end of the pipeline back to the
- front of it, and finally passes that to the input source before we
- execute the pipeline.
- */
- intrusive_ptr<DependencyTracker> pTracker(new DependencyTracker());
- for(SourceVector::reverse_iterator iter(sourceVector.rbegin()),
- listBeg(sourceVector.rend()); iter != listBeg; ++iter) {
- intrusive_ptr<DocumentSource> pTemp(*iter);
- pTemp->manageDependencies(pTracker);
- }
-
- pInputSource->manageDependencies(pTracker);
-
- /* chain together the sources we found */
- DocumentSource *pSource = pInputSource.get();
- for(SourceVector::iterator iter(sourceVector.begin()),
- listEnd(sourceVector.end()); iter != listEnd; ++iter) {
- intrusive_ptr<DocumentSource> pTemp(*iter);
- pTemp->setSource(pSource);
- pSource = pTemp.get();
- }
- /* pSource is left pointing at the last source in the chain */
-
- /*
- Iterate through the resulting documents, and add them to the result.
- We do this even if we're doing an explain, in order to capture
- the document counts and other stats. However, we don't capture
- the result documents for explain.
-
- We wrap all the BSONObjBuilder calls with a try/catch in case the
- objects get too large and cause an exception.
- */
- try {
- if (explain) {
- if (!pCtx->getInRouter())
- writeExplainShard(result, pInputSource);
- else {
- writeExplainMongos(result, pInputSource);
- }
- }
- else
- {
- BSONArrayBuilder resultArray; // where we'll stash the results
- for(bool hasDocument = !pSource->eof(); hasDocument;
- hasDocument = pSource->advance()) {
- intrusive_ptr<Document> pDocument(pSource->getCurrent());
-
- /* add the document to the result set */
- BSONObjBuilder documentBuilder;
- pDocument->toBson(&documentBuilder);
- resultArray.append(documentBuilder.done());
- }
-
- result.appendArray("result", resultArray.arr());
- }
- } catch(AssertionException &ae) {
- /*
- If its not the "object too large" error, rethrow.
- At time of writing, that error code comes from
- mongo/src/mongo/bson/util/builder.h
- */
- if (ae.getCode() != 13548)
- throw;
-
- /* throw the nicer human-readable error */
- uassert(16029, str::stream() <<
- "aggregation result exceeds maximum document size limit ("
- << (BSONObjMaxUserSize / (1024 * 1024)) << "MB)",
- false);
- }
-
- return true;
- }
-
- void Pipeline::writeExplainOps(BSONArrayBuilder *pArrayBuilder) const {
- for(SourceVector::const_iterator iter(sourceVector.begin()),
- listEnd(sourceVector.end()); iter != listEnd; ++iter) {
- intrusive_ptr<DocumentSource> pSource(*iter);
-
- pSource->addToBsonArray(pArrayBuilder, true);
- }
- }
-
- void Pipeline::writeExplainShard(
- BSONObjBuilder &result,
- const intrusive_ptr<DocumentSource> &pInputSource) const {
- BSONArrayBuilder opArray; // where we'll put the pipeline ops
-
- // first the cursor, which isn't in the opArray
- pInputSource->addToBsonArray(&opArray, true);
-
- // next, add the pipeline operators
- writeExplainOps(&opArray);
-
- result.appendArray(serverPipelineName, opArray.arr());
- }
-
- void Pipeline::writeExplainMongos(
- BSONObjBuilder &result,
- const intrusive_ptr<DocumentSource> &pInputSource) const {
-
- /*
- For now, this should be a BSON source array.
- In future, we might have a more clever way of getting this, when
- we have more interleaved fetching between shards. The DocumentSource
- interface will have to change to accomodate that.
- */
- DocumentSourceBsonArray *pSourceBsonArray =
- dynamic_cast<DocumentSourceBsonArray *>(pInputSource.get());
- verify(pSourceBsonArray);
-
- BSONArrayBuilder shardOpArray; // where we'll put the pipeline ops
- for(bool hasDocument = !pSourceBsonArray->eof(); hasDocument;
- hasDocument = pSourceBsonArray->advance()) {
- intrusive_ptr<Document> pDocument(
- pSourceBsonArray->getCurrent());
- BSONObjBuilder opBuilder;
- pDocument->toBson(&opBuilder);
- shardOpArray.append(opBuilder.obj());
- }
-
- BSONArrayBuilder mongosOpArray; // where we'll put the pipeline ops
- writeExplainOps(&mongosOpArray);
-
- // now we combine the shard pipelines with the one here
- result.append(serverPipelineName, shardOpArray.arr());
- result.append(mongosPipelineName, mongosOpArray.arr());
- }
-
-} // namespace mongo
+
+ return pPipeline;
+ }
+
+ intrusive_ptr<Pipeline> Pipeline::splitForSharded() {
+ /* create an initialize the shard spec we'll return */
+ intrusive_ptr<Pipeline> pShardPipeline(new Pipeline(pCtx));
+ pShardPipeline->collectionName = collectionName;
+ pShardPipeline->explain = explain;
+
+ /* put the source list aside */
+ SourceVector tempVector(sourceVector);
+ sourceVector.clear();
+
+ /*
+ Run through the pipeline, looking for points to split it into
+ shard pipelines, and the rest.
+ */
+ while(!tempVector.empty()) {
+ intrusive_ptr<DocumentSource> &pSource = tempVector.front();
+
+ /* hang on to this in advance, in case it is a group */
+ DocumentSourceGroup *pGroup =
+ dynamic_cast<DocumentSourceGroup *>(pSource.get());
+
+ /* move the source from the tempVector to the shard sourceVector */
+ pShardPipeline->sourceVector.push_back(pSource);
+ tempVector.erase(tempVector.begin());
+
+ /*
+ If we found a group, that's a split point.
+ */
+ if (pGroup) {
+ /* start this pipeline with the group merger */
+ sourceVector.push_back(pGroup->createMerger());
+
+ /* and then add everything that remains and quit */
+ for(size_t tempn = tempVector.size(), tempi = 0;
+ tempi < tempn; ++tempi)
+ sourceVector.push_back(tempVector[tempi]);
+ break;
+ }
+ }
+
+ return pShardPipeline;
+ }
+
+ bool Pipeline::getInitialQuery(BSONObjBuilder *pQueryBuilder) const
+ {
+ if (!sourceVector.size())
+ return false;
+
+ /* look for an initial $match */
+ const intrusive_ptr<DocumentSource> &pMC = sourceVector.front();
+ const DocumentSourceMatch *pMatch =
+ dynamic_cast<DocumentSourceMatch *>(pMC.get());
+
+ if (!pMatch)
+ return false;
+
+ /* build the query */
+ pMatch->toMatcherBson(pQueryBuilder);
+
+ return true;
+ }
+
+ void Pipeline::toBson(BSONObjBuilder *pBuilder) const {
+ /* create an array out of the pipeline operations */
+ BSONArrayBuilder arrayBuilder;
+ for(SourceVector::const_iterator iter(sourceVector.begin()),
+ listEnd(sourceVector.end()); iter != listEnd; ++iter) {
+ intrusive_ptr<DocumentSource> pSource(*iter);
+ pSource->addToBsonArray(&arrayBuilder);
+ }
+
+ /* add the top-level items to the command */
+ pBuilder->append(commandName, getCollectionName());
+ pBuilder->append(pipelineName, arrayBuilder.arr());
+
+ if (explain) {
+ pBuilder->append(explainName, explain);
+ }
+
+ bool btemp;
+ if ((btemp = getSplitMongodPipeline())) {
+ pBuilder->append(splitMongodPipelineName, btemp);
+ }
+
+ if ((btemp = pCtx->getInRouter())) {
+ pBuilder->append(fromRouterName, btemp);
+ }
+ }
+
+ bool Pipeline::run(BSONObjBuilder &result, string &errmsg,
+ const intrusive_ptr<DocumentSource> &pInputSource) {
+ /*
+ Analyze dependency information.
+
+ This pushes dependencies from the end of the pipeline back to the
+ front of it, and finally passes that to the input source before we
+ execute the pipeline.
+ */
+ intrusive_ptr<DependencyTracker> pTracker(new DependencyTracker());
+ for(SourceVector::reverse_iterator iter(sourceVector.rbegin()),
+ listBeg(sourceVector.rend()); iter != listBeg; ++iter) {
+ intrusive_ptr<DocumentSource> pTemp(*iter);
+ pTemp->manageDependencies(pTracker);
+ }
+
+ pInputSource->manageDependencies(pTracker);
+
+ /* chain together the sources we found */
+ DocumentSource *pSource = pInputSource.get();
+ for(SourceVector::iterator iter(sourceVector.begin()),
+ listEnd(sourceVector.end()); iter != listEnd; ++iter) {
+ intrusive_ptr<DocumentSource> pTemp(*iter);
+ pTemp->setSource(pSource);
+ pSource = pTemp.get();
+ }
+ /* pSource is left pointing at the last source in the chain */
+
+ /*
+ Iterate through the resulting documents, and add them to the result.
+ We do this even if we're doing an explain, in order to capture
+ the document counts and other stats. However, we don't capture
+ the result documents for explain.
+
+ We wrap all the BSONObjBuilder calls with a try/catch in case the
+ objects get too large and cause an exception.
+ */
+ try {
+ if (explain) {
+ if (!pCtx->getInRouter())
+ writeExplainShard(result, pInputSource);
+ else {
+ writeExplainMongos(result, pInputSource);
+ }
+ }
+ else
+ {
+ BSONArrayBuilder resultArray; // where we'll stash the results
+ for(bool hasDocument = !pSource->eof(); hasDocument;
+ hasDocument = pSource->advance()) {
+ intrusive_ptr<Document> pDocument(pSource->getCurrent());
+
+ /* add the document to the result set */
+ BSONObjBuilder documentBuilder;
+ pDocument->toBson(&documentBuilder);
+ resultArray.append(documentBuilder.done());
+ }
+
+ result.appendArray("result", resultArray.arr());
+ }
+ } catch(AssertionException &ae) {
+ /*
+ If its not the "object too large" error, rethrow.
+ At time of writing, that error code comes from
+ mongo/src/mongo/bson/util/builder.h
+ */
+ if (ae.getCode() != 13548)
+ throw;
+
+ /* throw the nicer human-readable error */
+ uassert(16029, str::stream() <<
+ "aggregation result exceeds maximum document size limit ("
+ << (BSONObjMaxUserSize / (1024 * 1024)) << "MB)",
+ false);
+ }
+
+ return true;
+ }
+
+ void Pipeline::writeExplainOps(BSONArrayBuilder *pArrayBuilder) const {
+ for(SourceVector::const_iterator iter(sourceVector.begin()),
+ listEnd(sourceVector.end()); iter != listEnd; ++iter) {
+ intrusive_ptr<DocumentSource> pSource(*iter);
+
+ pSource->addToBsonArray(pArrayBuilder, true);
+ }
+ }
+
+ void Pipeline::writeExplainShard(
+ BSONObjBuilder &result,
+ const intrusive_ptr<DocumentSource> &pInputSource) const {
+ BSONArrayBuilder opArray; // where we'll put the pipeline ops
+
+ // first the cursor, which isn't in the opArray
+ pInputSource->addToBsonArray(&opArray, true);
+
+ // next, add the pipeline operators
+ writeExplainOps(&opArray);
+
+ result.appendArray(serverPipelineName, opArray.arr());
+ }
+
+ void Pipeline::writeExplainMongos(
+ BSONObjBuilder &result,
+ const intrusive_ptr<DocumentSource> &pInputSource) const {
+
+ /*
+ For now, this should be a BSON source array.
+ In future, we might have a more clever way of getting this, when
+ we have more interleaved fetching between shards. The DocumentSource
+ interface will have to change to accomodate that.
+ */
+ DocumentSourceBsonArray *pSourceBsonArray =
+ dynamic_cast<DocumentSourceBsonArray *>(pInputSource.get());
+ verify(pSourceBsonArray);
+
+ BSONArrayBuilder shardOpArray; // where we'll put the pipeline ops
+ for(bool hasDocument = !pSourceBsonArray->eof(); hasDocument;
+ hasDocument = pSourceBsonArray->advance()) {
+ intrusive_ptr<Document> pDocument(
+ pSourceBsonArray->getCurrent());
+ BSONObjBuilder opBuilder;
+ pDocument->toBson(&opBuilder);
+ shardOpArray.append(opBuilder.obj());
+ }
+
+ BSONArrayBuilder mongosOpArray; // where we'll put the pipeline ops
+ writeExplainOps(&mongosOpArray);
+
+ // now we combine the shard pipelines with the one here
+ result.append(serverPipelineName, shardOpArray.arr());
+ result.append(mongosPipelineName, mongosOpArray.arr());
+ }
+
+} // namespace mongo
diff --git a/src/mongo/db/commands/pipeline_command.cpp b/src/mongo/db/commands/pipeline_command.cpp
index 71449ea53c3..e79bf75fe68 100755
--- a/src/mongo/db/commands/pipeline_command.cpp
+++ b/src/mongo/db/commands/pipeline_command.cpp
@@ -1,301 +1,301 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-
-#include "db/commands/pipeline.h"
-#include "db/commands/pipeline_d.h"
-#include "db/cursor.h"
-#include "db/interrupt_status_mongod.h"
-#include "db/pdfile.h"
-#include "db/pipeline/accumulator.h"
-#include "db/pipeline/document.h"
-#include "db/pipeline/document_source.h"
-#include "db/pipeline/expression.h"
-#include "db/pipeline/expression_context.h"
-
-namespace mongo {
-
- /** mongodb "commands" (sent via db.$cmd.findOne(...))
- subclass to make a command. define a singleton object for it.
- */
- class PipelineCommand :
- public Command {
- public:
- // virtuals from Command
- virtual ~PipelineCommand();
- virtual bool run(const string &db, BSONObj &cmdObj, int options,
- string &errmsg, BSONObjBuilder &result, bool fromRepl);
- virtual LockType locktype() const;
- virtual bool slaveOk() const;
- virtual void help(stringstream &help) const;
-
- PipelineCommand();
-
- private:
- /*
- For the case of explain, we don't want to hold any lock at all,
- because it generates warnings about recursive locks. However,
- the getting the explain information for the underlying cursor uses
- the direct client cursor, and that gets a lock. Therefore, we need
- to take steps to avoid holding a lock while we use that. On the
- other hand, we need to have a READ lock for normal explain execution.
- Therefore, the lock is managed manually, and not through the virtual
- locktype() above.
-
- In order to achieve this, locktype() returns NONE, but the lock that
- would be managed for reading (for executing the pipeline in the
- regular way), will be managed manually here. This code came from
- dbcommands.cpp, where objects are constructed to hold the lock
- and automatically release it on destruction. The use of this
- pattern requires extra functions to hold the lock scope and from
- within which to execute the other steps of the explain.
-
- The arguments for these are all the same, and come from run(), but
- are passed in so that new blocks can be created to hold the
- automatic locking objects.
- */
-
- /*
- Execute the pipeline for the explain. This is common to both the
- locked and unlocked code path. However, the results are different.
- For an explain, with no lock, it really outputs the pipeline
- chain rather than fetching the data.
- */
- bool executePipeline(
- BSONObjBuilder &result, string &errmsg, const string &ns,
- intrusive_ptr<Pipeline> &pPipeline,
- intrusive_ptr<DocumentSourceCursor> &pSource,
- intrusive_ptr<ExpressionContext> &pCtx);
-
- /*
- The explain code path holds a lock while the original cursor is
- parsed; we still need to take that step, because that is how we
- determine whether or not indexes will allow the optimization of
- early $match and/or $sort.
-
- Once the Cursor is identified, it is released, and then the lock
- is released (automatically, via end of a block), and then the
- pipeline is executed.
- */
- bool runExplain(
- BSONObjBuilder &result, string &errmsg,
- const string &ns, const string &db,
- intrusive_ptr<Pipeline> &pPipeline,
- intrusive_ptr<ExpressionContext> &pCtx);
-
- /*
- The execute code path holds a READ lock for its entire duration.
- The Cursor is created, and then documents are pulled out of it until
- they are exhausted (or some other error occurs).
- */
- bool runExecute(
- BSONObjBuilder &result, string &errmsg,
- const string &ns, const string &db,
- intrusive_ptr<Pipeline> &pPipeline,
- intrusive_ptr<ExpressionContext> &pCtx);
- };
-
- // self-registering singleton static instance
- static PipelineCommand pipelineCommand;
-
- PipelineCommand::PipelineCommand():
- Command(Pipeline::commandName) {
- }
-
- Command::LockType PipelineCommand::locktype() const {
- /*
- The locks for this are managed manually. The problem is that the
- explain execution uses the direct client interface, and this
- causes recursive lock warnings if the lock is already held. As
- a result, there are two code paths for this. See the comments in
- the private section of PipelineCommand for more details.
- */
- return NONE;
- }
-
- bool PipelineCommand::slaveOk() const {
- return true;
- }
-
- void PipelineCommand::help(stringstream &help) const {
- help << "{ pipeline : [ { <data-pipe-op>: {...}}, ... ] }";
- }
-
- PipelineCommand::~PipelineCommand() {
- }
-
- bool PipelineCommand::runExplain(
- BSONObjBuilder &result, string &errmsg,
- const string &ns, const string &db,
- intrusive_ptr<Pipeline> &pPipeline,
- intrusive_ptr<ExpressionContext> &pCtx) {
-
- intrusive_ptr<DocumentSourceCursor> pSource;
-
- /*
- For EXPLAIN:
-
- This block is here to contain the scope of the lock. We need the lock
- while we prepare the cursor, but we need to have released it by the
- time the recursive call is made to get the explain information using
- the direct client interface under the execution phase.
- */
- {
- scoped_ptr<Lock::GlobalRead> lk;
- if(lockGlobally())
- lk.reset(new Lock::GlobalRead());
- Client::ReadContext ctx(ns, dbpath, requiresAuth()); // read lock
-
- pSource = PipelineD::prepareCursorSource(pPipeline, db, pCtx);
-
- /* release the Cursor before the lock gets released */
- pSource->releaseCursor();
- }
-
- /*
- For EXPLAIN this just uses the direct client to do an explain on
- what the underlying Cursor was, based on its query and sort
- settings, and then wraps it with JSON from the pipeline definition.
- That does not require the lock or cursor, both of which were
- released above.
- */
- return executePipeline(result, errmsg, ns, pPipeline, pSource, pCtx);
- }
-
- bool PipelineCommand::runExecute(
- BSONObjBuilder &result, string &errmsg,
- const string &ns, const string &db,
- intrusive_ptr<Pipeline> &pPipeline,
- intrusive_ptr<ExpressionContext> &pCtx) {
-
- scoped_ptr<Lock::GlobalRead> lk;
- if(lockGlobally())
- lk.reset(new Lock::GlobalRead());
- Client::ReadContext ctx(ns, dbpath, requiresAuth()); // read lock
-
- intrusive_ptr<DocumentSourceCursor> pSource(
- PipelineD::prepareCursorSource(pPipeline, db, pCtx));
- return executePipeline(result, errmsg, ns, pPipeline, pSource, pCtx);
- }
-
- bool PipelineCommand::executePipeline(
- BSONObjBuilder &result, string &errmsg, const string &ns,
- intrusive_ptr<Pipeline> &pPipeline,
- intrusive_ptr<DocumentSourceCursor> &pSource,
- intrusive_ptr<ExpressionContext> &pCtx) {
-
- /* this is the normal non-debug path */
- if (!pPipeline->getSplitMongodPipeline())
- return pPipeline->run(result, errmsg, pSource);
-
- /* setup as if we're in the router */
- pCtx->setInRouter(true);
-
- /*
- Here, we'll split the pipeline in the same way we would for sharding,
- for testing purposes.
-
- Run the shard pipeline first, then feed the results into the remains
- of the existing pipeline.
-
- Start by splitting the pipeline.
- */
- intrusive_ptr<Pipeline> pShardSplit(
- pPipeline->splitForSharded());
-
- /*
- Write the split pipeline as we would in order to transmit it to
- the shard servers.
- */
- BSONObjBuilder shardBuilder;
- pShardSplit->toBson(&shardBuilder);
- BSONObj shardBson(shardBuilder.done());
-
- DEV (log() << "\n---- shardBson\n" <<
- shardBson.jsonString(Strict, 1) << "\n----\n").flush();
-
- /* for debugging purposes, show what the pipeline now looks like */
- DEV {
- BSONObjBuilder pipelineBuilder;
- pPipeline->toBson(&pipelineBuilder);
- BSONObj pipelineBson(pipelineBuilder.done());
- (log() << "\n---- pipelineBson\n" <<
- pipelineBson.jsonString(Strict, 1) << "\n----\n").flush();
- }
-
- /* on the shard servers, create the local pipeline */
- intrusive_ptr<ExpressionContext> pShardCtx(
- ExpressionContext::create(&InterruptStatusMongod::status));
- intrusive_ptr<Pipeline> pShardPipeline(
- Pipeline::parseCommand(errmsg, shardBson, pShardCtx));
- if (!pShardPipeline.get()) {
- return false;
- }
-
- /* run the shard pipeline */
- BSONObjBuilder shardResultBuilder;
- string shardErrmsg;
- pShardPipeline->run(shardResultBuilder, shardErrmsg, pSource);
- BSONObj shardResult(shardResultBuilder.done());
-
- /* pick out the shard result, and prepare to read it */
- intrusive_ptr<DocumentSourceBsonArray> pShardSource;
- BSONObjIterator shardIter(shardResult);
- while(shardIter.more()) {
- BSONElement shardElement(shardIter.next());
- const char *pFieldName = shardElement.fieldName();
-
- if ((strcmp(pFieldName, "result") == 0) ||
- (strcmp(pFieldName, "serverPipeline") == 0)) {
- pShardSource = DocumentSourceBsonArray::create(
- &shardElement, pCtx);
-
- /*
- Connect the output of the shard pipeline with the mongos
- pipeline that will merge the results.
- */
- return pPipeline->run(result, errmsg, pShardSource);
- }
- }
-
- /* NOTREACHED */
- verify(false);
- return false;
- }
-
- bool PipelineCommand::run(const string &db, BSONObj &cmdObj,
- int options, string &errmsg,
- BSONObjBuilder &result, bool fromRepl) {
-
- intrusive_ptr<ExpressionContext> pCtx(
- ExpressionContext::create(&InterruptStatusMongod::status));
-
- /* try to parse the command; if this fails, then we didn't run */
- intrusive_ptr<Pipeline> pPipeline(
- Pipeline::parseCommand(errmsg, cmdObj, pCtx));
- if (!pPipeline.get())
- return false;
-
- string ns(parseNs(db, cmdObj));
-
- if (pPipeline->isExplain())
- return runExplain(result, errmsg, ns, db, pPipeline, pCtx);
- else
- return runExecute(result, errmsg, ns, db, pPipeline, pCtx);
- }
-
-} // namespace mongo
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+
+#include "db/commands/pipeline.h"
+#include "db/commands/pipeline_d.h"
+#include "db/cursor.h"
+#include "db/interrupt_status_mongod.h"
+#include "db/pdfile.h"
+#include "db/pipeline/accumulator.h"
+#include "db/pipeline/document.h"
+#include "db/pipeline/document_source.h"
+#include "db/pipeline/expression.h"
+#include "db/pipeline/expression_context.h"
+
+namespace mongo {
+
+ /** mongodb "commands" (sent via db.$cmd.findOne(...))
+ subclass to make a command. define a singleton object for it.
+ */
+ class PipelineCommand :
+ public Command {
+ public:
+ // virtuals from Command
+ virtual ~PipelineCommand();
+ virtual bool run(const string &db, BSONObj &cmdObj, int options,
+ string &errmsg, BSONObjBuilder &result, bool fromRepl);
+ virtual LockType locktype() const;
+ virtual bool slaveOk() const;
+ virtual void help(stringstream &help) const;
+
+ PipelineCommand();
+
+ private:
+ /*
+ For the case of explain, we don't want to hold any lock at all,
+ because it generates warnings about recursive locks. However,
+ the getting the explain information for the underlying cursor uses
+ the direct client cursor, and that gets a lock. Therefore, we need
+ to take steps to avoid holding a lock while we use that. On the
+ other hand, we need to have a READ lock for normal explain execution.
+ Therefore, the lock is managed manually, and not through the virtual
+ locktype() above.
+
+ In order to achieve this, locktype() returns NONE, but the lock that
+ would be managed for reading (for executing the pipeline in the
+ regular way), will be managed manually here. This code came from
+ dbcommands.cpp, where objects are constructed to hold the lock
+ and automatically release it on destruction. The use of this
+ pattern requires extra functions to hold the lock scope and from
+ within which to execute the other steps of the explain.
+
+ The arguments for these are all the same, and come from run(), but
+ are passed in so that new blocks can be created to hold the
+ automatic locking objects.
+ */
+
+ /*
+ Execute the pipeline for the explain. This is common to both the
+ locked and unlocked code path. However, the results are different.
+ For an explain, with no lock, it really outputs the pipeline
+ chain rather than fetching the data.
+ */
+ bool executePipeline(
+ BSONObjBuilder &result, string &errmsg, const string &ns,
+ intrusive_ptr<Pipeline> &pPipeline,
+ intrusive_ptr<DocumentSourceCursor> &pSource,
+ intrusive_ptr<ExpressionContext> &pCtx);
+
+ /*
+ The explain code path holds a lock while the original cursor is
+ parsed; we still need to take that step, because that is how we
+ determine whether or not indexes will allow the optimization of
+ early $match and/or $sort.
+
+ Once the Cursor is identified, it is released, and then the lock
+ is released (automatically, via end of a block), and then the
+ pipeline is executed.
+ */
+ bool runExplain(
+ BSONObjBuilder &result, string &errmsg,
+ const string &ns, const string &db,
+ intrusive_ptr<Pipeline> &pPipeline,
+ intrusive_ptr<ExpressionContext> &pCtx);
+
+ /*
+ The execute code path holds a READ lock for its entire duration.
+ The Cursor is created, and then documents are pulled out of it until
+ they are exhausted (or some other error occurs).
+ */
+ bool runExecute(
+ BSONObjBuilder &result, string &errmsg,
+ const string &ns, const string &db,
+ intrusive_ptr<Pipeline> &pPipeline,
+ intrusive_ptr<ExpressionContext> &pCtx);
+ };
+
+ // self-registering singleton static instance
+ static PipelineCommand pipelineCommand;
+
+ PipelineCommand::PipelineCommand():
+ Command(Pipeline::commandName) {
+ }
+
+ Command::LockType PipelineCommand::locktype() const {
+ /*
+ The locks for this are managed manually. The problem is that the
+ explain execution uses the direct client interface, and this
+ causes recursive lock warnings if the lock is already held. As
+ a result, there are two code paths for this. See the comments in
+ the private section of PipelineCommand for more details.
+ */
+ return NONE;
+ }
+
+ bool PipelineCommand::slaveOk() const {
+ return true;
+ }
+
+ void PipelineCommand::help(stringstream &help) const {
+ help << "{ pipeline : [ { <data-pipe-op>: {...}}, ... ] }";
+ }
+
+ PipelineCommand::~PipelineCommand() {
+ }
+
+ bool PipelineCommand::runExplain(
+ BSONObjBuilder &result, string &errmsg,
+ const string &ns, const string &db,
+ intrusive_ptr<Pipeline> &pPipeline,
+ intrusive_ptr<ExpressionContext> &pCtx) {
+
+ intrusive_ptr<DocumentSourceCursor> pSource;
+
+ /*
+ For EXPLAIN:
+
+ This block is here to contain the scope of the lock. We need the lock
+ while we prepare the cursor, but we need to have released it by the
+ time the recursive call is made to get the explain information using
+ the direct client interface under the execution phase.
+ */
+ {
+ scoped_ptr<Lock::GlobalRead> lk;
+ if(lockGlobally())
+ lk.reset(new Lock::GlobalRead());
+ Client::ReadContext ctx(ns, dbpath, requiresAuth()); // read lock
+
+ pSource = PipelineD::prepareCursorSource(pPipeline, db, pCtx);
+
+ /* release the Cursor before the lock gets released */
+ pSource->releaseCursor();
+ }
+
+ /*
+ For EXPLAIN this just uses the direct client to do an explain on
+ what the underlying Cursor was, based on its query and sort
+ settings, and then wraps it with JSON from the pipeline definition.
+ That does not require the lock or cursor, both of which were
+ released above.
+ */
+ return executePipeline(result, errmsg, ns, pPipeline, pSource, pCtx);
+ }
+
+ bool PipelineCommand::runExecute(
+ BSONObjBuilder &result, string &errmsg,
+ const string &ns, const string &db,
+ intrusive_ptr<Pipeline> &pPipeline,
+ intrusive_ptr<ExpressionContext> &pCtx) {
+
+ scoped_ptr<Lock::GlobalRead> lk;
+ if(lockGlobally())
+ lk.reset(new Lock::GlobalRead());
+ Client::ReadContext ctx(ns, dbpath, requiresAuth()); // read lock
+
+ intrusive_ptr<DocumentSourceCursor> pSource(
+ PipelineD::prepareCursorSource(pPipeline, db, pCtx));
+ return executePipeline(result, errmsg, ns, pPipeline, pSource, pCtx);
+ }
+
+ bool PipelineCommand::executePipeline(
+ BSONObjBuilder &result, string &errmsg, const string &ns,
+ intrusive_ptr<Pipeline> &pPipeline,
+ intrusive_ptr<DocumentSourceCursor> &pSource,
+ intrusive_ptr<ExpressionContext> &pCtx) {
+
+ /* this is the normal non-debug path */
+ if (!pPipeline->getSplitMongodPipeline())
+ return pPipeline->run(result, errmsg, pSource);
+
+ /* setup as if we're in the router */
+ pCtx->setInRouter(true);
+
+ /*
+ Here, we'll split the pipeline in the same way we would for sharding,
+ for testing purposes.
+
+ Run the shard pipeline first, then feed the results into the remains
+ of the existing pipeline.
+
+ Start by splitting the pipeline.
+ */
+ intrusive_ptr<Pipeline> pShardSplit(
+ pPipeline->splitForSharded());
+
+ /*
+ Write the split pipeline as we would in order to transmit it to
+ the shard servers.
+ */
+ BSONObjBuilder shardBuilder;
+ pShardSplit->toBson(&shardBuilder);
+ BSONObj shardBson(shardBuilder.done());
+
+ DEV (log() << "\n---- shardBson\n" <<
+ shardBson.jsonString(Strict, 1) << "\n----\n").flush();
+
+ /* for debugging purposes, show what the pipeline now looks like */
+ DEV {
+ BSONObjBuilder pipelineBuilder;
+ pPipeline->toBson(&pipelineBuilder);
+ BSONObj pipelineBson(pipelineBuilder.done());
+ (log() << "\n---- pipelineBson\n" <<
+ pipelineBson.jsonString(Strict, 1) << "\n----\n").flush();
+ }
+
+ /* on the shard servers, create the local pipeline */
+ intrusive_ptr<ExpressionContext> pShardCtx(
+ ExpressionContext::create(&InterruptStatusMongod::status));
+ intrusive_ptr<Pipeline> pShardPipeline(
+ Pipeline::parseCommand(errmsg, shardBson, pShardCtx));
+ if (!pShardPipeline.get()) {
+ return false;
+ }
+
+ /* run the shard pipeline */
+ BSONObjBuilder shardResultBuilder;
+ string shardErrmsg;
+ pShardPipeline->run(shardResultBuilder, shardErrmsg, pSource);
+ BSONObj shardResult(shardResultBuilder.done());
+
+ /* pick out the shard result, and prepare to read it */
+ intrusive_ptr<DocumentSourceBsonArray> pShardSource;
+ BSONObjIterator shardIter(shardResult);
+ while(shardIter.more()) {
+ BSONElement shardElement(shardIter.next());
+ const char *pFieldName = shardElement.fieldName();
+
+ if ((strcmp(pFieldName, "result") == 0) ||
+ (strcmp(pFieldName, "serverPipeline") == 0)) {
+ pShardSource = DocumentSourceBsonArray::create(
+ &shardElement, pCtx);
+
+ /*
+ Connect the output of the shard pipeline with the mongos
+ pipeline that will merge the results.
+ */
+ return pPipeline->run(result, errmsg, pShardSource);
+ }
+ }
+
+ /* NOTREACHED */
+ verify(false);
+ return false;
+ }
+
+ bool PipelineCommand::run(const string &db, BSONObj &cmdObj,
+ int options, string &errmsg,
+ BSONObjBuilder &result, bool fromRepl) {
+
+ intrusive_ptr<ExpressionContext> pCtx(
+ ExpressionContext::create(&InterruptStatusMongod::status));
+
+ /* try to parse the command; if this fails, then we didn't run */
+ intrusive_ptr<Pipeline> pPipeline(
+ Pipeline::parseCommand(errmsg, cmdObj, pCtx));
+ if (!pPipeline.get())
+ return false;
+
+ string ns(parseNs(db, cmdObj));
+
+ if (pPipeline->isExplain())
+ return runExplain(result, errmsg, ns, db, pPipeline, pCtx);
+ else
+ return runExecute(result, errmsg, ns, db, pPipeline, pCtx);
+ }
+
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/accumulator.cpp b/src/mongo/db/pipeline/accumulator.cpp
index 0f6b2253ffc..da605eda6e3 100755
--- a/src/mongo/db/pipeline/accumulator.cpp
+++ b/src/mongo/db/pipeline/accumulator.cpp
@@ -1,79 +1,79 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "db/pipeline/accumulator.h"
-
-#include "db/jsobj.h"
-#include "util/mongoutils/str.h"
-
-namespace mongo {
- using namespace mongoutils;
-
- void Accumulator::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- uassert(15943, str::stream() << "group accumulator " <<
- getOpName() << " only accepts one operand",
- vpOperand.size() < 1);
-
- ExpressionNary::addOperand(pExpression);
- }
-
- Accumulator::Accumulator():
- ExpressionNary() {
- }
-
- void Accumulator::opToBson(
- BSONObjBuilder *pBuilder, string opName,
- string fieldName) const {
- verify(vpOperand.size() == 1);
- BSONObjBuilder builder;
- vpOperand[0]->addToBsonObj(&builder, opName, false);
- pBuilder->append(fieldName, builder.done());
- }
-
- void Accumulator::addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName,
- bool requireExpression) const {
- opToBson(pBuilder, getOpName(), fieldName);
- }
-
- void Accumulator::addToBsonArray(BSONArrayBuilder *pBuilder) const {
- verify(false); // these can't appear in arrays
- }
-
- void agg_framework_reservedErrors() {
- uassert(16030, "reserved error", false);
- uassert(16031, "reserved error", false);
- uassert(16032, "reserved error", false);
- uassert(16033, "reserved error", false);
-
- uassert(16036, "reserved error", false);
- uassert(16037, "reserved error", false);
- uassert(16038, "reserved error", false);
- uassert(16039, "reserved error", false);
- uassert(16040, "reserved error", false);
- uassert(16041, "reserved error", false);
- uassert(16042, "reserved error", false);
- uassert(16043, "reserved error", false);
- uassert(16044, "reserved error", false);
- uassert(16045, "reserved error", false);
- uassert(16046, "reserved error", false);
- uassert(16047, "reserved error", false);
- uassert(16048, "reserved error", false);
- uassert(16049, "reserved error", false);
- }
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "db/pipeline/accumulator.h"
+
+#include "db/jsobj.h"
+#include "util/mongoutils/str.h"
+
+namespace mongo {
+ using namespace mongoutils;
+
+ void Accumulator::addOperand(
+ const intrusive_ptr<Expression> &pExpression) {
+ uassert(15943, str::stream() << "group accumulator " <<
+ getOpName() << " only accepts one operand",
+ vpOperand.size() < 1);
+
+ ExpressionNary::addOperand(pExpression);
+ }
+
+ Accumulator::Accumulator():
+ ExpressionNary() {
+ }
+
+ void Accumulator::opToBson(
+ BSONObjBuilder *pBuilder, string opName,
+ string fieldName) const {
+ verify(vpOperand.size() == 1);
+ BSONObjBuilder builder;
+ vpOperand[0]->addToBsonObj(&builder, opName, false);
+ pBuilder->append(fieldName, builder.done());
+ }
+
+ void Accumulator::addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName,
+ bool requireExpression) const {
+ opToBson(pBuilder, getOpName(), fieldName);
+ }
+
+ void Accumulator::addToBsonArray(BSONArrayBuilder *pBuilder) const {
+ verify(false); // these can't appear in arrays
+ }
+
+ void agg_framework_reservedErrors() {
+ uassert(16030, "reserved error", false);
+ uassert(16031, "reserved error", false);
+ uassert(16032, "reserved error", false);
+ uassert(16033, "reserved error", false);
+
+ uassert(16036, "reserved error", false);
+ uassert(16037, "reserved error", false);
+ uassert(16038, "reserved error", false);
+ uassert(16039, "reserved error", false);
+ uassert(16040, "reserved error", false);
+ uassert(16041, "reserved error", false);
+ uassert(16042, "reserved error", false);
+ uassert(16043, "reserved error", false);
+ uassert(16044, "reserved error", false);
+ uassert(16045, "reserved error", false);
+ uassert(16046, "reserved error", false);
+ uassert(16047, "reserved error", false);
+ uassert(16048, "reserved error", false);
+ uassert(16049, "reserved error", false);
+ }
+}
diff --git a/src/mongo/db/pipeline/accumulator.h b/src/mongo/db/pipeline/accumulator.h
index 74dcd403e65..f53682c957c 100755
--- a/src/mongo/db/pipeline/accumulator.h
+++ b/src/mongo/db/pipeline/accumulator.h
@@ -1,258 +1,258 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include "pch.h"
-
-#include <boost/unordered_set.hpp>
-#include "db/pipeline/value.h"
-#include "db/pipeline/expression.h"
-#include "bson/bsontypes.h"
-
-namespace mongo {
- class ExpressionContext;
-
- class Accumulator :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName,
- bool requireExpression) const;
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
-
- /*
- Get the accumulated value.
-
- @returns the accumulated value
- */
- virtual intrusive_ptr<const Value> getValue() const = 0;
-
- protected:
- Accumulator();
-
- /*
- Convenience method for doing this for accumulators. The pattern
- is always the same, so a common implementation works, but requires
- knowing the operator name.
-
- @param pBuilder the builder to add to
- @param fieldName the projected name
- @param opName the operator name
- */
- void opToBson(
- BSONObjBuilder *pBuilder, string fieldName, string opName) const;
- };
-
-
- class AccumulatorAddToSet :
- public Accumulator {
- public:
- // virtuals from Expression
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual intrusive_ptr<const Value> getValue() const;
- virtual const char *getOpName() const;
-
- /*
- Create an appending accumulator.
-
- @param pCtx the expression context
- @returns the created accumulator
- */
- static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
-
- private:
- AccumulatorAddToSet(const intrusive_ptr<ExpressionContext> &pTheCtx);
- typedef boost::unordered_set<intrusive_ptr<const Value>, Value::Hash > SetType;
- mutable SetType set;
- mutable SetType::iterator itr;
- intrusive_ptr<ExpressionContext> pCtx;
- };
-
-
- /*
- This isn't a finished accumulator, but rather a convenient base class
- for others such as $first, $last, $max, $min, and similar. It just
- provides a holder for a single Value, and the getter for that. The
- holder is protected so derived classes can manipulate it.
- */
- class AccumulatorSingleValue :
- public Accumulator {
- public:
- // virtuals from Expression
- virtual intrusive_ptr<const Value> getValue() const;
-
- protected:
- AccumulatorSingleValue();
-
- mutable intrusive_ptr<const Value> pValue; /* current min/max */
- };
-
-
- class AccumulatorFirst :
- public AccumulatorSingleValue {
- public:
- // virtuals from Expression
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
-
- /*
- Create the accumulator.
-
- @returns the created accumulator
- */
- static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
-
- private:
- AccumulatorFirst();
- };
-
-
- class AccumulatorLast :
- public AccumulatorSingleValue {
- public:
- // virtuals from Expression
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
-
- /*
- Create the accumulator.
-
- @returns the created accumulator
- */
- static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
-
- private:
- AccumulatorLast();
- };
-
-
- class AccumulatorSum :
- public Accumulator {
- public:
- // virtuals from Accumulator
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual intrusive_ptr<const Value> getValue() const;
- virtual const char *getOpName() const;
-
- /*
- Create a summing accumulator.
-
- @param pCtx the expression context
- @returns the created accumulator
- */
- static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
-
- protected: /* reused by AccumulatorAvg */
- AccumulatorSum();
-
- mutable BSONType totalType;
- mutable long long longTotal;
- mutable double doubleTotal;
- };
-
-
- class AccumulatorMinMax :
- public AccumulatorSingleValue {
- public:
- // virtuals from Expression
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
-
- /*
- Create either the max or min accumulator.
-
- @returns the created accumulator
- */
- static intrusive_ptr<Accumulator> createMin(
- const intrusive_ptr<ExpressionContext> &pCtx);
- static intrusive_ptr<Accumulator> createMax(
- const intrusive_ptr<ExpressionContext> &pCtx);
-
- private:
- AccumulatorMinMax(int theSense);
-
- int sense; /* 1 for min, -1 for max; used to "scale" comparison */
- };
-
-
- class AccumulatorPush :
- public Accumulator {
- public:
- // virtuals from Expression
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual intrusive_ptr<const Value> getValue() const;
- virtual const char *getOpName() const;
-
- /*
- Create an appending accumulator.
-
- @param pCtx the expression context
- @returns the created accumulator
- */
- static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
-
- private:
- AccumulatorPush(const intrusive_ptr<ExpressionContext> &pTheCtx);
-
- mutable vector<intrusive_ptr<const Value> > vpValue;
- intrusive_ptr<ExpressionContext> pCtx;
- };
-
-
- class AccumulatorAvg :
- public AccumulatorSum {
- typedef AccumulatorSum Super;
- public:
- // virtuals from Accumulator
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual intrusive_ptr<const Value> getValue() const;
- virtual const char *getOpName() const;
-
- /*
- Create an averaging accumulator.
-
- @param pCtx the expression context
- @returns the created accumulator
- */
- static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
-
- private:
- static const char subTotalName[];
- static const char countName[];
-
- AccumulatorAvg(const intrusive_ptr<ExpressionContext> &pCtx);
-
- mutable long long count;
- intrusive_ptr<ExpressionContext> pCtx;
- };
-
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#pragma once
+
+#include "pch.h"
+
+#include <boost/unordered_set.hpp>
+#include "db/pipeline/value.h"
+#include "db/pipeline/expression.h"
+#include "bson/bsontypes.h"
+
+namespace mongo {
+ class ExpressionContext;
+
+ class Accumulator :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName,
+ bool requireExpression) const;
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
+
+ /*
+ Get the accumulated value.
+
+ @returns the accumulated value
+ */
+ virtual intrusive_ptr<const Value> getValue() const = 0;
+
+ protected:
+ Accumulator();
+
+ /*
+ Convenience method for doing this for accumulators. The pattern
+ is always the same, so a common implementation works, but requires
+ knowing the operator name.
+
+ @param pBuilder the builder to add to
+ @param fieldName the projected name
+ @param opName the operator name
+ */
+ void opToBson(
+ BSONObjBuilder *pBuilder, string fieldName, string opName) const;
+ };
+
+
+ class AccumulatorAddToSet :
+ public Accumulator {
+ public:
+ // virtuals from Expression
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual intrusive_ptr<const Value> getValue() const;
+ virtual const char *getOpName() const;
+
+ /*
+ Create an appending accumulator.
+
+ @param pCtx the expression context
+ @returns the created accumulator
+ */
+ static intrusive_ptr<Accumulator> create(
+ const intrusive_ptr<ExpressionContext> &pCtx);
+
+ private:
+ AccumulatorAddToSet(const intrusive_ptr<ExpressionContext> &pTheCtx);
+ typedef boost::unordered_set<intrusive_ptr<const Value>, Value::Hash > SetType;
+ mutable SetType set;
+ mutable SetType::iterator itr;
+ intrusive_ptr<ExpressionContext> pCtx;
+ };
+
+
+ /*
+ This isn't a finished accumulator, but rather a convenient base class
+ for others such as $first, $last, $max, $min, and similar. It just
+ provides a holder for a single Value, and the getter for that. The
+ holder is protected so derived classes can manipulate it.
+ */
+ class AccumulatorSingleValue :
+ public Accumulator {
+ public:
+ // virtuals from Expression
+ virtual intrusive_ptr<const Value> getValue() const;
+
+ protected:
+ AccumulatorSingleValue();
+
+ mutable intrusive_ptr<const Value> pValue; /* current min/max */
+ };
+
+
+ class AccumulatorFirst :
+ public AccumulatorSingleValue {
+ public:
+ // virtuals from Expression
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+
+ /*
+ Create the accumulator.
+
+ @returns the created accumulator
+ */
+ static intrusive_ptr<Accumulator> create(
+ const intrusive_ptr<ExpressionContext> &pCtx);
+
+ private:
+ AccumulatorFirst();
+ };
+
+
+ class AccumulatorLast :
+ public AccumulatorSingleValue {
+ public:
+ // virtuals from Expression
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+
+ /*
+ Create the accumulator.
+
+ @returns the created accumulator
+ */
+ static intrusive_ptr<Accumulator> create(
+ const intrusive_ptr<ExpressionContext> &pCtx);
+
+ private:
+ AccumulatorLast();
+ };
+
+
+ class AccumulatorSum :
+ public Accumulator {
+ public:
+ // virtuals from Accumulator
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual intrusive_ptr<const Value> getValue() const;
+ virtual const char *getOpName() const;
+
+ /*
+ Create a summing accumulator.
+
+ @param pCtx the expression context
+ @returns the created accumulator
+ */
+ static intrusive_ptr<Accumulator> create(
+ const intrusive_ptr<ExpressionContext> &pCtx);
+
+ protected: /* reused by AccumulatorAvg */
+ AccumulatorSum();
+
+ mutable BSONType totalType;
+ mutable long long longTotal;
+ mutable double doubleTotal;
+ };
+
+
+ class AccumulatorMinMax :
+ public AccumulatorSingleValue {
+ public:
+ // virtuals from Expression
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+
+ /*
+ Create either the max or min accumulator.
+
+ @returns the created accumulator
+ */
+ static intrusive_ptr<Accumulator> createMin(
+ const intrusive_ptr<ExpressionContext> &pCtx);
+ static intrusive_ptr<Accumulator> createMax(
+ const intrusive_ptr<ExpressionContext> &pCtx);
+
+ private:
+ AccumulatorMinMax(int theSense);
+
+ int sense; /* 1 for min, -1 for max; used to "scale" comparison */
+ };
+
+
+ class AccumulatorPush :
+ public Accumulator {
+ public:
+ // virtuals from Expression
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual intrusive_ptr<const Value> getValue() const;
+ virtual const char *getOpName() const;
+
+ /*
+ Create an appending accumulator.
+
+ @param pCtx the expression context
+ @returns the created accumulator
+ */
+ static intrusive_ptr<Accumulator> create(
+ const intrusive_ptr<ExpressionContext> &pCtx);
+
+ private:
+ AccumulatorPush(const intrusive_ptr<ExpressionContext> &pTheCtx);
+
+ mutable vector<intrusive_ptr<const Value> > vpValue;
+ intrusive_ptr<ExpressionContext> pCtx;
+ };
+
+
+ class AccumulatorAvg :
+ public AccumulatorSum {
+ typedef AccumulatorSum Super;
+ public:
+ // virtuals from Accumulator
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual intrusive_ptr<const Value> getValue() const;
+ virtual const char *getOpName() const;
+
+ /*
+ Create an averaging accumulator.
+
+ @param pCtx the expression context
+ @returns the created accumulator
+ */
+ static intrusive_ptr<Accumulator> create(
+ const intrusive_ptr<ExpressionContext> &pCtx);
+
+ private:
+ static const char subTotalName[];
+ static const char countName[];
+
+ AccumulatorAvg(const intrusive_ptr<ExpressionContext> &pCtx);
+
+ mutable long long count;
+ intrusive_ptr<ExpressionContext> pCtx;
+ };
+
+}
diff --git a/src/mongo/db/pipeline/accumulator_add_to_set.cpp b/src/mongo/db/pipeline/accumulator_add_to_set.cpp
index 61a0ca5a39b..e91726345a1 100755
--- a/src/mongo/db/pipeline/accumulator_add_to_set.cpp
+++ b/src/mongo/db/pipeline/accumulator_add_to_set.cpp
@@ -1,79 +1,79 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "accumulator.h"
-
-#include "db/pipeline/expression_context.h"
-#include "db/pipeline/value.h"
-
-namespace mongo {
- intrusive_ptr<const Value> AccumulatorAddToSet::evaluate(
- const intrusive_ptr<Document> &pDocument) const {
- verify(vpOperand.size() == 1);
- intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
-
- if (prhs->getType() == Undefined)
- ; /* nothing to add to the array */
- else if (!pCtx->getInRouter())
- set.insert(prhs);
- else {
- /*
- If we're in the router, we need to take apart the arrays we
- receive and put their elements into the array we are collecting.
- If we didn't, then we'd get an array of arrays, with one array
- from each shard that responds.
- */
- verify(prhs->getType() == Array);
-
- intrusive_ptr<ValueIterator> pvi(prhs->getArray());
- while(pvi->more()) {
- intrusive_ptr<const Value> pElement(pvi->next());
- set.insert(pElement);
- }
- }
-
- return Value::getNull();
- }
-
- intrusive_ptr<const Value> AccumulatorAddToSet::getValue() const {
- vector<intrusive_ptr<const Value> > valVec;
-
- for (itr = set.begin(); itr != set.end(); ++itr) {
- valVec.push_back(*itr);
- }
- /* there is no issue of scope since createArray copy constructs */
- return Value::createArray(valVec);
- }
-
- AccumulatorAddToSet::AccumulatorAddToSet(
- const intrusive_ptr<ExpressionContext> &pTheCtx):
- Accumulator(),
- set(),
- pCtx(pTheCtx) {
- }
-
- intrusive_ptr<Accumulator> AccumulatorAddToSet::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorAddToSet> pAccumulator(
- new AccumulatorAddToSet(pCtx));
- return pAccumulator;
- }
-
- const char *AccumulatorAddToSet::getOpName() const {
- return "$addToSet";
- }
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "accumulator.h"
+
+#include "db/pipeline/expression_context.h"
+#include "db/pipeline/value.h"
+
+namespace mongo {
+ intrusive_ptr<const Value> AccumulatorAddToSet::evaluate(
+ const intrusive_ptr<Document> &pDocument) const {
+ verify(vpOperand.size() == 1);
+ intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
+
+ if (prhs->getType() == Undefined)
+ ; /* nothing to add to the array */
+ else if (!pCtx->getInRouter())
+ set.insert(prhs);
+ else {
+ /*
+ If we're in the router, we need to take apart the arrays we
+ receive and put their elements into the array we are collecting.
+ If we didn't, then we'd get an array of arrays, with one array
+ from each shard that responds.
+ */
+ verify(prhs->getType() == Array);
+
+ intrusive_ptr<ValueIterator> pvi(prhs->getArray());
+ while(pvi->more()) {
+ intrusive_ptr<const Value> pElement(pvi->next());
+ set.insert(pElement);
+ }
+ }
+
+ return Value::getNull();
+ }
+
+ intrusive_ptr<const Value> AccumulatorAddToSet::getValue() const {
+ vector<intrusive_ptr<const Value> > valVec;
+
+ for (itr = set.begin(); itr != set.end(); ++itr) {
+ valVec.push_back(*itr);
+ }
+ /* there is no issue of scope since createArray copy constructs */
+ return Value::createArray(valVec);
+ }
+
+ AccumulatorAddToSet::AccumulatorAddToSet(
+ const intrusive_ptr<ExpressionContext> &pTheCtx):
+ Accumulator(),
+ set(),
+ pCtx(pTheCtx) {
+ }
+
+ intrusive_ptr<Accumulator> AccumulatorAddToSet::create(
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorAddToSet> pAccumulator(
+ new AccumulatorAddToSet(pCtx));
+ return pAccumulator;
+ }
+
+ const char *AccumulatorAddToSet::getOpName() const {
+ return "$addToSet";
+ }
+}
diff --git a/src/mongo/db/pipeline/accumulator_avg.cpp b/src/mongo/db/pipeline/accumulator_avg.cpp
index f166c185f9a..7217de163d6 100755
--- a/src/mongo/db/pipeline/accumulator_avg.cpp
+++ b/src/mongo/db/pipeline/accumulator_avg.cpp
@@ -1,123 +1,123 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "accumulator.h"
-
-#include "db/pipeline/document.h"
-#include "db/pipeline/expression_context.h"
-#include "db/pipeline/value.h"
-
-namespace mongo {
-
- const char AccumulatorAvg::subTotalName[] = "subTotal";
- const char AccumulatorAvg::countName[] = "count";
-
- intrusive_ptr<const Value> AccumulatorAvg::evaluate(
- const intrusive_ptr<Document> &pDocument) const {
- if (!pCtx->getInRouter()) {
- Super::evaluate(pDocument);
- ++count;
- }
- else {
- /*
- If we're in the router, we expect an object that contains
- both a subtotal and a count. This is what getValue() produced
- below.
- */
- intrusive_ptr<const Value> prhs(
- vpOperand[0]->evaluate(pDocument));
- verify(prhs->getType() == Object);
- intrusive_ptr<Document> pShardDoc(prhs->getDocument());
-
- intrusive_ptr<const Value> pSubTotal(
- pShardDoc->getValue(subTotalName));
- verify(pSubTotal.get());
- BSONType subTotalType = pSubTotal->getType();
- if ((totalType == NumberLong) || (subTotalType == NumberLong))
- totalType = NumberLong;
- if ((totalType == NumberDouble) || (subTotalType == NumberDouble))
- totalType = NumberDouble;
-
- if (subTotalType == NumberInt) {
- int v = pSubTotal->getInt();
- longTotal += v;
- doubleTotal += v;
- }
- else if (subTotalType == NumberLong) {
- long long v = pSubTotal->getLong();
- longTotal += v;
- doubleTotal += v;
- }
- else {
- double v = pSubTotal->getDouble();
- doubleTotal += v;
- }
-
- intrusive_ptr<const Value> pCount(pShardDoc->getValue(countName));
- count += pCount->getLong();
- }
-
- return Value::getZero();
- }
-
- intrusive_ptr<Accumulator> AccumulatorAvg::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorAvg> pA(new AccumulatorAvg(pCtx));
- return pA;
- }
-
- intrusive_ptr<const Value> AccumulatorAvg::getValue() const {
- if (!pCtx->getInShard()) {
- double avg = 0;
- if (count) {
- if (totalType != NumberDouble)
- avg = static_cast<double>(longTotal / count);
- else
- avg = doubleTotal / count;
- }
-
- return Value::createDouble(avg);
- }
-
- intrusive_ptr<Document> pDocument(Document::create());
-
- intrusive_ptr<const Value> pSubTotal;
- if (totalType == NumberInt)
- pSubTotal = Value::createInt((int)longTotal);
- else if (totalType == NumberLong)
- pSubTotal = Value::createLong(longTotal);
- else
- pSubTotal = Value::createDouble(doubleTotal);
- pDocument->addField(subTotalName, pSubTotal);
-
- intrusive_ptr<const Value> pCount(Value::createLong(count));
- pDocument->addField(countName, pCount);
-
- return Value::createDocument(pDocument);
- }
-
- AccumulatorAvg::AccumulatorAvg(
- const intrusive_ptr<ExpressionContext> &pTheCtx):
- AccumulatorSum(),
- count(0),
- pCtx(pTheCtx) {
- }
-
- const char *AccumulatorAvg::getOpName() const {
- return "$avg";
- }
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "accumulator.h"
+
+#include "db/pipeline/document.h"
+#include "db/pipeline/expression_context.h"
+#include "db/pipeline/value.h"
+
+namespace mongo {
+
+ const char AccumulatorAvg::subTotalName[] = "subTotal";
+ const char AccumulatorAvg::countName[] = "count";
+
+ intrusive_ptr<const Value> AccumulatorAvg::evaluate(
+ const intrusive_ptr<Document> &pDocument) const {
+ if (!pCtx->getInRouter()) {
+ Super::evaluate(pDocument);
+ ++count;
+ }
+ else {
+ /*
+ If we're in the router, we expect an object that contains
+ both a subtotal and a count. This is what getValue() produced
+ below.
+ */
+ intrusive_ptr<const Value> prhs(
+ vpOperand[0]->evaluate(pDocument));
+ verify(prhs->getType() == Object);
+ intrusive_ptr<Document> pShardDoc(prhs->getDocument());
+
+ intrusive_ptr<const Value> pSubTotal(
+ pShardDoc->getValue(subTotalName));
+ verify(pSubTotal.get());
+ BSONType subTotalType = pSubTotal->getType();
+ if ((totalType == NumberLong) || (subTotalType == NumberLong))
+ totalType = NumberLong;
+ if ((totalType == NumberDouble) || (subTotalType == NumberDouble))
+ totalType = NumberDouble;
+
+ if (subTotalType == NumberInt) {
+ int v = pSubTotal->getInt();
+ longTotal += v;
+ doubleTotal += v;
+ }
+ else if (subTotalType == NumberLong) {
+ long long v = pSubTotal->getLong();
+ longTotal += v;
+ doubleTotal += v;
+ }
+ else {
+ double v = pSubTotal->getDouble();
+ doubleTotal += v;
+ }
+
+ intrusive_ptr<const Value> pCount(pShardDoc->getValue(countName));
+ count += pCount->getLong();
+ }
+
+ return Value::getZero();
+ }
+
+ intrusive_ptr<Accumulator> AccumulatorAvg::create(
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorAvg> pA(new AccumulatorAvg(pCtx));
+ return pA;
+ }
+
+ intrusive_ptr<const Value> AccumulatorAvg::getValue() const {
+ if (!pCtx->getInShard()) {
+ double avg = 0;
+ if (count) {
+ if (totalType != NumberDouble)
+ avg = static_cast<double>(longTotal / count);
+ else
+ avg = doubleTotal / count;
+ }
+
+ return Value::createDouble(avg);
+ }
+
+ intrusive_ptr<Document> pDocument(Document::create());
+
+ intrusive_ptr<const Value> pSubTotal;
+ if (totalType == NumberInt)
+ pSubTotal = Value::createInt((int)longTotal);
+ else if (totalType == NumberLong)
+ pSubTotal = Value::createLong(longTotal);
+ else
+ pSubTotal = Value::createDouble(doubleTotal);
+ pDocument->addField(subTotalName, pSubTotal);
+
+ intrusive_ptr<const Value> pCount(Value::createLong(count));
+ pDocument->addField(countName, pCount);
+
+ return Value::createDocument(pDocument);
+ }
+
+ AccumulatorAvg::AccumulatorAvg(
+ const intrusive_ptr<ExpressionContext> &pTheCtx):
+ AccumulatorSum(),
+ count(0),
+ pCtx(pTheCtx) {
+ }
+
+ const char *AccumulatorAvg::getOpName() const {
+ return "$avg";
+ }
+}
diff --git a/src/mongo/db/pipeline/accumulator_first.cpp b/src/mongo/db/pipeline/accumulator_first.cpp
index 937b260f136..53d8f9595e9 100755
--- a/src/mongo/db/pipeline/accumulator_first.cpp
+++ b/src/mongo/db/pipeline/accumulator_first.cpp
@@ -1,49 +1,49 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "accumulator.h"
-
-#include "db/pipeline/value.h"
-
-namespace mongo {
-
- intrusive_ptr<const Value> AccumulatorFirst::evaluate(
- const intrusive_ptr<Document> &pDocument) const {
- verify(vpOperand.size() == 1);
-
- /* only remember the first value seen */
- if (!pValue.get())
- pValue = vpOperand[0]->evaluate(pDocument);
-
- return pValue;
- }
-
- AccumulatorFirst::AccumulatorFirst():
- AccumulatorSingleValue() {
- }
-
- intrusive_ptr<Accumulator> AccumulatorFirst::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorFirst> pAccumulator(
- new AccumulatorFirst());
- return pAccumulator;
- }
-
- const char *AccumulatorFirst::getOpName() const {
- return "$first";
- }
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "accumulator.h"
+
+#include "db/pipeline/value.h"
+
+namespace mongo {
+
+ intrusive_ptr<const Value> AccumulatorFirst::evaluate(
+ const intrusive_ptr<Document> &pDocument) const {
+ verify(vpOperand.size() == 1);
+
+ /* only remember the first value seen */
+ if (!pValue.get())
+ pValue = vpOperand[0]->evaluate(pDocument);
+
+ return pValue;
+ }
+
+ AccumulatorFirst::AccumulatorFirst():
+ AccumulatorSingleValue() {
+ }
+
+ intrusive_ptr<Accumulator> AccumulatorFirst::create(
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorFirst> pAccumulator(
+ new AccumulatorFirst());
+ return pAccumulator;
+ }
+
+ const char *AccumulatorFirst::getOpName() const {
+ return "$first";
+ }
+}
diff --git a/src/mongo/db/pipeline/accumulator_last.cpp b/src/mongo/db/pipeline/accumulator_last.cpp
index 820907a1151..d934e64111b 100755
--- a/src/mongo/db/pipeline/accumulator_last.cpp
+++ b/src/mongo/db/pipeline/accumulator_last.cpp
@@ -1,48 +1,48 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "accumulator.h"
-
-#include "db/pipeline/value.h"
-
-namespace mongo {
-
- intrusive_ptr<const Value> AccumulatorLast::evaluate(
- const intrusive_ptr<Document> &pDocument) const {
- verify(vpOperand.size() == 1);
-
- /* always remember the last value seen */
- pValue = vpOperand[0]->evaluate(pDocument);
-
- return pValue;
- }
-
- AccumulatorLast::AccumulatorLast():
- AccumulatorSingleValue() {
- }
-
- intrusive_ptr<Accumulator> AccumulatorLast::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorLast> pAccumulator(
- new AccumulatorLast());
- return pAccumulator;
- }
-
- const char *AccumulatorLast::getOpName() const {
- return "$last";
- }
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "accumulator.h"
+
+#include "db/pipeline/value.h"
+
+namespace mongo {
+
+ intrusive_ptr<const Value> AccumulatorLast::evaluate(
+ const intrusive_ptr<Document> &pDocument) const {
+ verify(vpOperand.size() == 1);
+
+ /* always remember the last value seen */
+ pValue = vpOperand[0]->evaluate(pDocument);
+
+ return pValue;
+ }
+
+ AccumulatorLast::AccumulatorLast():
+ AccumulatorSingleValue() {
+ }
+
+ intrusive_ptr<Accumulator> AccumulatorLast::create(
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorLast> pAccumulator(
+ new AccumulatorLast());
+ return pAccumulator;
+ }
+
+ const char *AccumulatorLast::getOpName() const {
+ return "$last";
+ }
+}
diff --git a/src/mongo/db/pipeline/accumulator_min_max.cpp b/src/mongo/db/pipeline/accumulator_min_max.cpp
index 902f910dcb8..aec461bab02 100755
--- a/src/mongo/db/pipeline/accumulator_min_max.cpp
+++ b/src/mongo/db/pipeline/accumulator_min_max.cpp
@@ -1,67 +1,67 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "accumulator.h"
-
-#include "db/pipeline/value.h"
-
-namespace mongo {
-
- intrusive_ptr<const Value> AccumulatorMinMax::evaluate(
- const intrusive_ptr<Document> &pDocument) const {
- verify(vpOperand.size() == 1);
- intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
-
- /* if this is the first value, just use it */
- if (!pValue.get())
- pValue = prhs;
- else {
- /* compare with the current value; swap if appropriate */
- int cmp = Value::compare(pValue, prhs) * sense;
- if (cmp > 0)
- pValue = prhs;
- }
-
- return pValue;
- }
-
- AccumulatorMinMax::AccumulatorMinMax(int theSense):
- AccumulatorSingleValue(),
- sense(theSense) {
- verify((sense == 1) || (sense == -1));
- }
-
- intrusive_ptr<Accumulator> AccumulatorMinMax::createMin(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorMinMax> pAccumulator(
- new AccumulatorMinMax(1));
- return pAccumulator;
- }
-
- intrusive_ptr<Accumulator> AccumulatorMinMax::createMax(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorMinMax> pAccumulator(
- new AccumulatorMinMax(-1));
- return pAccumulator;
- }
-
- const char *AccumulatorMinMax::getOpName() const {
- if (sense == 1)
- return "$min";
- return "$max";
- }
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "accumulator.h"
+
+#include "db/pipeline/value.h"
+
+namespace mongo {
+
+ intrusive_ptr<const Value> AccumulatorMinMax::evaluate(
+ const intrusive_ptr<Document> &pDocument) const {
+ verify(vpOperand.size() == 1);
+ intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
+
+ /* if this is the first value, just use it */
+ if (!pValue.get())
+ pValue = prhs;
+ else {
+ /* compare with the current value; swap if appropriate */
+ int cmp = Value::compare(pValue, prhs) * sense;
+ if (cmp > 0)
+ pValue = prhs;
+ }
+
+ return pValue;
+ }
+
+ AccumulatorMinMax::AccumulatorMinMax(int theSense):
+ AccumulatorSingleValue(),
+ sense(theSense) {
+ verify((sense == 1) || (sense == -1));
+ }
+
+ intrusive_ptr<Accumulator> AccumulatorMinMax::createMin(
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorMinMax> pAccumulator(
+ new AccumulatorMinMax(1));
+ return pAccumulator;
+ }
+
+ intrusive_ptr<Accumulator> AccumulatorMinMax::createMax(
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorMinMax> pAccumulator(
+ new AccumulatorMinMax(-1));
+ return pAccumulator;
+ }
+
+ const char *AccumulatorMinMax::getOpName() const {
+ if (sense == 1)
+ return "$min";
+ return "$max";
+ }
+}
diff --git a/src/mongo/db/pipeline/accumulator_push.cpp b/src/mongo/db/pipeline/accumulator_push.cpp
index 932ca6361cd..b097894cb4e 100755
--- a/src/mongo/db/pipeline/accumulator_push.cpp
+++ b/src/mongo/db/pipeline/accumulator_push.cpp
@@ -1,73 +1,73 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "accumulator.h"
-
-#include "db/pipeline/expression_context.h"
-#include "db/pipeline/value.h"
-
-namespace mongo {
- intrusive_ptr<const Value> AccumulatorPush::evaluate(
- const intrusive_ptr<Document> &pDocument) const {
- verify(vpOperand.size() == 1);
- intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
-
- if (prhs->getType() == Undefined)
- ; /* nothing to add to the array */
- else if (!pCtx->getInRouter())
- vpValue.push_back(prhs);
- else {
- /*
- If we're in the router, we need to take apart the arrays we
- receive and put their elements into the array we are collecting.
- If we didn't, then we'd get an array of arrays, with one array
- from each shard that responds.
- */
- verify(prhs->getType() == Array);
-
- intrusive_ptr<ValueIterator> pvi(prhs->getArray());
- while(pvi->more()) {
- intrusive_ptr<const Value> pElement(pvi->next());
- vpValue.push_back(pElement);
- }
- }
-
- return Value::getNull();
- }
-
- intrusive_ptr<const Value> AccumulatorPush::getValue() const {
- return Value::createArray(vpValue);
- }
-
- AccumulatorPush::AccumulatorPush(
- const intrusive_ptr<ExpressionContext> &pTheCtx):
- Accumulator(),
- vpValue(),
- pCtx(pTheCtx) {
- }
-
- intrusive_ptr<Accumulator> AccumulatorPush::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorPush> pAccumulator(
- new AccumulatorPush(pCtx));
- return pAccumulator;
- }
-
- const char *AccumulatorPush::getOpName() const {
- return "$push";
- }
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "accumulator.h"
+
+#include "db/pipeline/expression_context.h"
+#include "db/pipeline/value.h"
+
+namespace mongo {
+ intrusive_ptr<const Value> AccumulatorPush::evaluate(
+ const intrusive_ptr<Document> &pDocument) const {
+ verify(vpOperand.size() == 1);
+ intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
+
+ if (prhs->getType() == Undefined)
+ ; /* nothing to add to the array */
+ else if (!pCtx->getInRouter())
+ vpValue.push_back(prhs);
+ else {
+ /*
+ If we're in the router, we need to take apart the arrays we
+ receive and put their elements into the array we are collecting.
+ If we didn't, then we'd get an array of arrays, with one array
+ from each shard that responds.
+ */
+ verify(prhs->getType() == Array);
+
+ intrusive_ptr<ValueIterator> pvi(prhs->getArray());
+ while(pvi->more()) {
+ intrusive_ptr<const Value> pElement(pvi->next());
+ vpValue.push_back(pElement);
+ }
+ }
+
+ return Value::getNull();
+ }
+
+ intrusive_ptr<const Value> AccumulatorPush::getValue() const {
+ return Value::createArray(vpValue);
+ }
+
+ AccumulatorPush::AccumulatorPush(
+ const intrusive_ptr<ExpressionContext> &pTheCtx):
+ Accumulator(),
+ vpValue(),
+ pCtx(pTheCtx) {
+ }
+
+ intrusive_ptr<Accumulator> AccumulatorPush::create(
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorPush> pAccumulator(
+ new AccumulatorPush(pCtx));
+ return pAccumulator;
+ }
+
+ const char *AccumulatorPush::getOpName() const {
+ return "$push";
+ }
+}
diff --git a/src/mongo/db/pipeline/accumulator_single_value.cpp b/src/mongo/db/pipeline/accumulator_single_value.cpp
index 7e2491d121f..ea12ee333a2 100755
--- a/src/mongo/db/pipeline/accumulator_single_value.cpp
+++ b/src/mongo/db/pipeline/accumulator_single_value.cpp
@@ -1,32 +1,32 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "accumulator.h"
-
-#include "db/pipeline/value.h"
-
-namespace mongo {
-
- intrusive_ptr<const Value> AccumulatorSingleValue::getValue() const {
- return pValue;
- }
-
- AccumulatorSingleValue::AccumulatorSingleValue():
- pValue(intrusive_ptr<const Value>()) {
- }
-
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "accumulator.h"
+
+#include "db/pipeline/value.h"
+
+namespace mongo {
+
+ intrusive_ptr<const Value> AccumulatorSingleValue::getValue() const {
+ return pValue;
+ }
+
+ AccumulatorSingleValue::AccumulatorSingleValue():
+ pValue(intrusive_ptr<const Value>()) {
+ }
+
+}
diff --git a/src/mongo/db/pipeline/accumulator_sum.cpp b/src/mongo/db/pipeline/accumulator_sum.cpp
index 26258c2f19a..f0a3c1a7ba3 100755
--- a/src/mongo/db/pipeline/accumulator_sum.cpp
+++ b/src/mongo/db/pipeline/accumulator_sum.cpp
@@ -1,74 +1,74 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "accumulator.h"
-
-#include "db/pipeline/value.h"
-
-namespace mongo {
-
- intrusive_ptr<const Value> AccumulatorSum::evaluate(
- const intrusive_ptr<Document> &pDocument) const {
- verify(vpOperand.size() == 1);
- intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
-
- /* upgrade to the widest type required to hold the result */
- totalType = Value::getWidestNumeric(totalType, prhs->getType());
-
- if (totalType == NumberInt) {
- int v = prhs->coerceToInt();
- longTotal += v;
- doubleTotal += v;
- }
- else if (totalType == NumberLong) {
- long long v = prhs->coerceToLong();
- longTotal += v;
- doubleTotal += v;
- }
- else { /* (totalType == NumberDouble) */
- double v = prhs->coerceToDouble();
- doubleTotal += v;
- }
-
- return Value::getZero();
- }
-
- intrusive_ptr<Accumulator> AccumulatorSum::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorSum> pSummer(new AccumulatorSum());
- return pSummer;
- }
-
- intrusive_ptr<const Value> AccumulatorSum::getValue() const {
- if (totalType == NumberInt)
- return Value::createInt((int)longTotal);
- if (totalType == NumberLong)
- return Value::createLong(longTotal);
- return Value::createDouble(doubleTotal);
- }
-
- AccumulatorSum::AccumulatorSum():
- Accumulator(),
- totalType(NumberInt),
- longTotal(0),
- doubleTotal(0) {
- }
-
- const char *AccumulatorSum::getOpName() const {
- return "$sum";
- }
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "accumulator.h"
+
+#include "db/pipeline/value.h"
+
+namespace mongo {
+
+ intrusive_ptr<const Value> AccumulatorSum::evaluate(
+ const intrusive_ptr<Document> &pDocument) const {
+ verify(vpOperand.size() == 1);
+ intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
+
+ /* upgrade to the widest type required to hold the result */
+ totalType = Value::getWidestNumeric(totalType, prhs->getType());
+
+ if (totalType == NumberInt) {
+ int v = prhs->coerceToInt();
+ longTotal += v;
+ doubleTotal += v;
+ }
+ else if (totalType == NumberLong) {
+ long long v = prhs->coerceToLong();
+ longTotal += v;
+ doubleTotal += v;
+ }
+ else { /* (totalType == NumberDouble) */
+ double v = prhs->coerceToDouble();
+ doubleTotal += v;
+ }
+
+ return Value::getZero();
+ }
+
+ intrusive_ptr<Accumulator> AccumulatorSum::create(
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorSum> pSummer(new AccumulatorSum());
+ return pSummer;
+ }
+
+ intrusive_ptr<const Value> AccumulatorSum::getValue() const {
+ if (totalType == NumberInt)
+ return Value::createInt((int)longTotal);
+ if (totalType == NumberLong)
+ return Value::createLong(longTotal);
+ return Value::createDouble(doubleTotal);
+ }
+
+ AccumulatorSum::AccumulatorSum():
+ Accumulator(),
+ totalType(NumberInt),
+ longTotal(0),
+ doubleTotal(0) {
+ }
+
+ const char *AccumulatorSum::getOpName() const {
+ return "$sum";
+ }
+}
diff --git a/src/mongo/db/pipeline/builder.cpp b/src/mongo/db/pipeline/builder.cpp
index b3f7872ef94..6b7673a20d3 100755
--- a/src/mongo/db/pipeline/builder.cpp
+++ b/src/mongo/db/pipeline/builder.cpp
@@ -1,117 +1,117 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-
-#include "db/jsobj.h"
-#include "db/pipeline/builder.h"
-
-
-namespace mongo {
-
- void BuilderObj::append() {
- pBuilder->appendNull(fieldName);
- }
-
- void BuilderObj::append(bool b) {
- pBuilder->append(fieldName, b);
- }
-
- void BuilderObj::append(int i) {
- pBuilder->append(fieldName, i);
- }
-
- void BuilderObj::append(long long ll) {
- pBuilder->append(fieldName, ll);
- }
-
- void BuilderObj::append(double d) {
- pBuilder->append(fieldName, d);
- }
-
- void BuilderObj::append(string s) {
- pBuilder->append(fieldName, s);
- }
-
- void BuilderObj::append(const OID &o) {
- pBuilder->append(fieldName, o);
- }
-
- void BuilderObj::append(const Date_t &d) {
- pBuilder->append(fieldName, d);
- }
-
- void BuilderObj::append(BSONObjBuilder *pDone) {
- pBuilder->append(fieldName, pDone->done());
- }
-
- void BuilderObj::append(BSONArrayBuilder *pDone) {
- pBuilder->append(fieldName, pDone->arr());
- }
-
- BuilderObj::BuilderObj(
- BSONObjBuilder *pObjBuilder, string theFieldName):
- pBuilder(pObjBuilder),
- fieldName(theFieldName) {
- }
-
-
- void BuilderArray::append() {
- pBuilder->appendNull();
- }
-
- void BuilderArray::append(bool b) {
- pBuilder->append(b);
- }
-
- void BuilderArray::append(int i) {
- pBuilder->append(i);
- }
-
- void BuilderArray::append(long long ll) {
- pBuilder->append(ll);
- }
-
- void BuilderArray::append(double d) {
- pBuilder->append(d);
- }
-
- void BuilderArray::append(string s) {
- pBuilder->append(s);
- }
-
- void BuilderArray::append(const OID &o) {
- pBuilder->append(o);
- }
-
- void BuilderArray::append(const Date_t &d) {
- pBuilder->append(d);
- }
-
- void BuilderArray::append(BSONObjBuilder *pDone) {
- pBuilder->append(pDone->done());
- }
-
- void BuilderArray::append(BSONArrayBuilder *pDone) {
- pBuilder->append(pDone->arr());
- }
-
- BuilderArray::BuilderArray(
- BSONArrayBuilder *pArrayBuilder):
- pBuilder(pArrayBuilder) {
- }
-
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+
+#include "db/jsobj.h"
+#include "db/pipeline/builder.h"
+
+
+namespace mongo {
+
+ void BuilderObj::append() {
+ pBuilder->appendNull(fieldName);
+ }
+
+ void BuilderObj::append(bool b) {
+ pBuilder->append(fieldName, b);
+ }
+
+ void BuilderObj::append(int i) {
+ pBuilder->append(fieldName, i);
+ }
+
+ void BuilderObj::append(long long ll) {
+ pBuilder->append(fieldName, ll);
+ }
+
+ void BuilderObj::append(double d) {
+ pBuilder->append(fieldName, d);
+ }
+
+ void BuilderObj::append(string s) {
+ pBuilder->append(fieldName, s);
+ }
+
+ void BuilderObj::append(const OID &o) {
+ pBuilder->append(fieldName, o);
+ }
+
+ void BuilderObj::append(const Date_t &d) {
+ pBuilder->append(fieldName, d);
+ }
+
+ void BuilderObj::append(BSONObjBuilder *pDone) {
+ pBuilder->append(fieldName, pDone->done());
+ }
+
+ void BuilderObj::append(BSONArrayBuilder *pDone) {
+ pBuilder->append(fieldName, pDone->arr());
+ }
+
+ BuilderObj::BuilderObj(
+ BSONObjBuilder *pObjBuilder, string theFieldName):
+ pBuilder(pObjBuilder),
+ fieldName(theFieldName) {
+ }
+
+
+ void BuilderArray::append() {
+ pBuilder->appendNull();
+ }
+
+ void BuilderArray::append(bool b) {
+ pBuilder->append(b);
+ }
+
+ void BuilderArray::append(int i) {
+ pBuilder->append(i);
+ }
+
+ void BuilderArray::append(long long ll) {
+ pBuilder->append(ll);
+ }
+
+ void BuilderArray::append(double d) {
+ pBuilder->append(d);
+ }
+
+ void BuilderArray::append(string s) {
+ pBuilder->append(s);
+ }
+
+ void BuilderArray::append(const OID &o) {
+ pBuilder->append(o);
+ }
+
+ void BuilderArray::append(const Date_t &d) {
+ pBuilder->append(d);
+ }
+
+ void BuilderArray::append(BSONObjBuilder *pDone) {
+ pBuilder->append(pDone->done());
+ }
+
+ void BuilderArray::append(BSONArrayBuilder *pDone) {
+ pBuilder->append(pDone->arr());
+ }
+
+ BuilderArray::BuilderArray(
+ BSONArrayBuilder *pArrayBuilder):
+ pBuilder(pArrayBuilder) {
+ }
+
+}
diff --git a/src/mongo/db/pipeline/builder.h b/src/mongo/db/pipeline/builder.h
index fc37a72236f..5456a9c5efb 100755
--- a/src/mongo/db/pipeline/builder.h
+++ b/src/mongo/db/pipeline/builder.h
@@ -1,95 +1,95 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include "pch.h"
-
-namespace mongo {
-
- class BSONArrayBuilder;
- class BSONObjBuilder;
-
- /*
- Generic Builder.
-
- The methods to append items to an object (on BSONObjBuilder) and an array
- (on BSONArrayBuilder) differ only by their inclusion of a field name.
- For more complicated implementations of addToBsonObj() and
- addToBsonArray(), it makes sense to abstract that out and use
- this generic builder that always looks the same, and then implement
- addToBsonObj() and addToBsonArray() by using a common method.
- */
- class Builder :
- boost::noncopyable {
- public:
- virtual ~Builder() {};
-
- virtual void append() = 0; // append a null
- virtual void append(bool b) = 0;
- virtual void append(int i) = 0;
- virtual void append(long long ll) = 0;
- virtual void append(double d) = 0;
- virtual void append(string s) = 0;
- virtual void append(const OID &o) = 0;
- virtual void append(const Date_t &d) = 0;
- virtual void append(BSONObjBuilder *pDone) = 0;
- virtual void append(BSONArrayBuilder *pDone) = 0;
- };
-
- class BuilderObj :
- public Builder {
- public:
- // virtuals from Builder
- virtual void append();
- virtual void append(bool b);
- virtual void append(int i);
- virtual void append(long long ll);
- virtual void append(double d);
- virtual void append(string s);
- virtual void append(const OID &o);
- virtual void append(const Date_t &d);
- virtual void append(BSONObjBuilder *pDone);
- virtual void append(BSONArrayBuilder *pDone);
-
- BuilderObj(BSONObjBuilder *pBuilder, string fieldName);
-
- private:
- BSONObjBuilder *pBuilder;
- string fieldName;
- };
-
- class BuilderArray :
- public Builder {
- public:
- // virtuals from Builder
- virtual void append();
- virtual void append(bool b);
- virtual void append(int i);
- virtual void append(long long ll);
- virtual void append(double d);
- virtual void append(string s);
- virtual void append(const OID &o);
- virtual void append(const Date_t &d);
- virtual void append(BSONObjBuilder *pDone);
- virtual void append(BSONArrayBuilder *pDone);
-
- BuilderArray(BSONArrayBuilder *pBuilder);
-
- private:
- BSONArrayBuilder *pBuilder;
- };
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#pragma once
+
+#include "pch.h"
+
+namespace mongo {
+
+ class BSONArrayBuilder;
+ class BSONObjBuilder;
+
+ /*
+ Generic Builder.
+
+ The methods to append items to an object (on BSONObjBuilder) and an array
+ (on BSONArrayBuilder) differ only by their inclusion of a field name.
+ For more complicated implementations of addToBsonObj() and
+ addToBsonArray(), it makes sense to abstract that out and use
+ this generic builder that always looks the same, and then implement
+ addToBsonObj() and addToBsonArray() by using a common method.
+ */
+ class Builder :
+ boost::noncopyable {
+ public:
+ virtual ~Builder() {};
+
+ virtual void append() = 0; // append a null
+ virtual void append(bool b) = 0;
+ virtual void append(int i) = 0;
+ virtual void append(long long ll) = 0;
+ virtual void append(double d) = 0;
+ virtual void append(string s) = 0;
+ virtual void append(const OID &o) = 0;
+ virtual void append(const Date_t &d) = 0;
+ virtual void append(BSONObjBuilder *pDone) = 0;
+ virtual void append(BSONArrayBuilder *pDone) = 0;
+ };
+
+ class BuilderObj :
+ public Builder {
+ public:
+ // virtuals from Builder
+ virtual void append();
+ virtual void append(bool b);
+ virtual void append(int i);
+ virtual void append(long long ll);
+ virtual void append(double d);
+ virtual void append(string s);
+ virtual void append(const OID &o);
+ virtual void append(const Date_t &d);
+ virtual void append(BSONObjBuilder *pDone);
+ virtual void append(BSONArrayBuilder *pDone);
+
+ BuilderObj(BSONObjBuilder *pBuilder, string fieldName);
+
+ private:
+ BSONObjBuilder *pBuilder;
+ string fieldName;
+ };
+
+ class BuilderArray :
+ public Builder {
+ public:
+ // virtuals from Builder
+ virtual void append();
+ virtual void append(bool b);
+ virtual void append(int i);
+ virtual void append(long long ll);
+ virtual void append(double d);
+ virtual void append(string s);
+ virtual void append(const OID &o);
+ virtual void append(const Date_t &d);
+ virtual void append(BSONObjBuilder *pDone);
+ virtual void append(BSONArrayBuilder *pDone);
+
+ BuilderArray(BSONArrayBuilder *pBuilder);
+
+ private:
+ BSONArrayBuilder *pBuilder;
+ };
+}
diff --git a/src/mongo/db/pipeline/doc_mem_monitor.cpp b/src/mongo/db/pipeline/doc_mem_monitor.cpp
index e4e8323cf96..3cbe14e8f40 100755
--- a/src/mongo/db/pipeline/doc_mem_monitor.cpp
+++ b/src/mongo/db/pipeline/doc_mem_monitor.cpp
@@ -1,68 +1,68 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "db/pipeline/doc_mem_monitor.h"
-#include "util/systeminfo.h"
-
-namespace mongo {
-
- DocMemMonitor::DocMemMonitor(StringWriter *pW) {
- /*
- Use the default values.
-
- Currently, we warn in log at 5%, and assert at 10%.
- */
- size_t errorRam = SystemInfo::getPhysicalRam() / 10;
- size_t warnRam = errorRam / 2;
-
- init(pW, warnRam, errorRam);
- }
-
- DocMemMonitor::DocMemMonitor(StringWriter *pW,
- size_t warnLimit, size_t errorLimit) {
- init(pW, warnLimit, errorLimit);
- }
-
- void DocMemMonitor::addToTotal(size_t amount) {
- totalUsed += amount;
-
- if (!warned) {
- if (warnLimit && (totalUsed > warnLimit)) {
- stringstream ss;
- ss << "warning, 5% of physical RAM used for ";
- pWriter->writeString(ss);
- ss << endl;
- warning() << ss.str();
- warned = true;
- }
- }
-
- if (errorLimit) {
- uassert(15944, "terminating request: request heap use exceeded 10% of physical RAM", (totalUsed <= errorLimit));
- }
- }
-
- void DocMemMonitor::init(StringWriter *pW,
- size_t warnLimit, size_t errorLimit) {
- this->pWriter = pW;
- this->warnLimit = warnLimit;
- this->errorLimit = errorLimit;
-
- warned = false;
- totalUsed = 0;
- }
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "db/pipeline/doc_mem_monitor.h"
+#include "util/systeminfo.h"
+
+namespace mongo {
+
+ DocMemMonitor::DocMemMonitor(StringWriter *pW) {
+ /*
+ Use the default values.
+
+ Currently, we warn in log at 5%, and assert at 10%.
+ */
+ size_t errorRam = SystemInfo::getPhysicalRam() / 10;
+ size_t warnRam = errorRam / 2;
+
+ init(pW, warnRam, errorRam);
+ }
+
+ DocMemMonitor::DocMemMonitor(StringWriter *pW,
+ size_t warnLimit, size_t errorLimit) {
+ init(pW, warnLimit, errorLimit);
+ }
+
+ void DocMemMonitor::addToTotal(size_t amount) {
+ totalUsed += amount;
+
+ if (!warned) {
+ if (warnLimit && (totalUsed > warnLimit)) {
+ stringstream ss;
+ ss << "warning, 5% of physical RAM used for ";
+ pWriter->writeString(ss);
+ ss << endl;
+ warning() << ss.str();
+ warned = true;
+ }
+ }
+
+ if (errorLimit) {
+ uassert(15944, "terminating request: request heap use exceeded 10% of physical RAM", (totalUsed <= errorLimit));
+ }
+ }
+
+ void DocMemMonitor::init(StringWriter *pW,
+ size_t warnLimit, size_t errorLimit) {
+ this->pWriter = pW;
+ this->warnLimit = warnLimit;
+ this->errorLimit = errorLimit;
+
+ warned = false;
+ totalUsed = 0;
+ }
+}
diff --git a/src/mongo/db/pipeline/doc_mem_monitor.h b/src/mongo/db/pipeline/doc_mem_monitor.h
index ca5ac23e16b..7a3f0062bfb 100755
--- a/src/mongo/db/pipeline/doc_mem_monitor.h
+++ b/src/mongo/db/pipeline/doc_mem_monitor.h
@@ -1,94 +1,94 @@
-/**
- * Copyright 2011 (c) 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include "pch.h"
-#include "util/string_writer.h"
-
-
-namespace mongo {
-
- /*
- This utility class provides an easy way to total up, monitor, warn, and
- signal an error when the amount of memory used for an operation exceeds
- given thresholds.
-
- Create a local instance of this class, and then inform it of any memory
- that you consume using addToTotal().
-
- Warnings or errors are issued as usage exceeds certain fractions of
- physical memory on the host, as determined by SystemInfo.
-
- This class is not guaranteed to warn or signal errors if the host system
- does not support the ability to report its memory, as per the warnings
- for SystemInfo in systeminfo.h.
- */
- class DocMemMonitor {
- public:
- /*
- Constructor.
-
- Uses default limits for warnings and errors.
-
- The StringWriter parameter must outlive the DocMemMonitor instance.
-
- @param pWriter string writer that provides information about the
- operation being monitored
- */
- DocMemMonitor(StringWriter *pWriter);
-
- /*
- Constructor.
-
- This variant allows explicit selection of the limits. Note that
- limits of zero are treated as infinite.
-
- The StringWriter parameter must outlive the DocMemMonitor instance.
-
- @param pWriter string writer that provides information about the
- operation being monitored
- @param warnLimit the amount of ram to issue (log) a warning for
- @param errorLimit the amount of ram to throw an error for
- */
- DocMemMonitor(StringWriter *pWriter, size_t warnLimit,
- size_t errorLimit);
-
- /*
- Increment the total amount of memory used by the given amount. If
- the warning threshold is exceeded, a warning will be logged. If the
- error threshold is exceeded, an error will be thrown.
-
- @param amount the amount of memory to add to the current total
- */
- void addToTotal(size_t amount);
-
- private:
- /*
- Real constructor body.
-
- Provides common construction for all the variant constructors.
- */
- void init(StringWriter *pW, size_t warnLimit, size_t errorLimit);
-
- bool warned;
- size_t totalUsed;
- size_t warnLimit;
- size_t errorLimit;
- StringWriter *pWriter;
- };
-
-}
+/**
+ * Copyright 2011 (c) 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#pragma once
+
+#include "pch.h"
+#include "util/string_writer.h"
+
+
+namespace mongo {
+
+ /*
+ This utility class provides an easy way to total up, monitor, warn, and
+ signal an error when the amount of memory used for an operation exceeds
+ given thresholds.
+
+ Create a local instance of this class, and then inform it of any memory
+ that you consume using addToTotal().
+
+ Warnings or errors are issued as usage exceeds certain fractions of
+ physical memory on the host, as determined by SystemInfo.
+
+ This class is not guaranteed to warn or signal errors if the host system
+ does not support the ability to report its memory, as per the warnings
+ for SystemInfo in systeminfo.h.
+ */
+ class DocMemMonitor {
+ public:
+ /*
+ Constructor.
+
+ Uses default limits for warnings and errors.
+
+ The StringWriter parameter must outlive the DocMemMonitor instance.
+
+ @param pWriter string writer that provides information about the
+ operation being monitored
+ */
+ DocMemMonitor(StringWriter *pWriter);
+
+ /*
+ Constructor.
+
+ This variant allows explicit selection of the limits. Note that
+ limits of zero are treated as infinite.
+
+ The StringWriter parameter must outlive the DocMemMonitor instance.
+
+ @param pWriter string writer that provides information about the
+ operation being monitored
+ @param warnLimit the amount of ram to issue (log) a warning for
+ @param errorLimit the amount of ram to throw an error for
+ */
+ DocMemMonitor(StringWriter *pWriter, size_t warnLimit,
+ size_t errorLimit);
+
+ /*
+ Increment the total amount of memory used by the given amount. If
+ the warning threshold is exceeded, a warning will be logged. If the
+ error threshold is exceeded, an error will be thrown.
+
+ @param amount the amount of memory to add to the current total
+ */
+ void addToTotal(size_t amount);
+
+ private:
+ /*
+ Real constructor body.
+
+ Provides common construction for all the variant constructors.
+ */
+ void init(StringWriter *pW, size_t warnLimit, size_t errorLimit);
+
+ bool warned;
+ size_t totalUsed;
+ size_t warnLimit;
+ size_t errorLimit;
+ StringWriter *pWriter;
+ };
+
+}
diff --git a/src/mongo/db/pipeline/document.cpp b/src/mongo/db/pipeline/document.cpp
index a224f56eacb..3242dec4c4a 100755
--- a/src/mongo/db/pipeline/document.cpp
+++ b/src/mongo/db/pipeline/document.cpp
@@ -1,227 +1,227 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include <boost/functional/hash.hpp>
-#include "db/jsobj.h"
-#include "db/pipeline/dependency_tracker.h"
-#include "db/pipeline/document.h"
-#include "db/pipeline/value.h"
-#include "util/mongoutils/str.h"
-
-namespace mongo {
- using namespace mongoutils;
-
- string Document::idName("_id");
-
- intrusive_ptr<Document> Document::createFromBsonObj(
- BSONObj *pBsonObj, const DependencyTracker *pDependencies) {
- intrusive_ptr<Document> pDocument(
- new Document(pBsonObj, pDependencies));
- return pDocument;
- }
-
- Document::Document(BSONObj *pBsonObj,
- const DependencyTracker *pDependencies):
- vFieldName(),
- vpValue() {
- BSONObjIterator bsonIterator(pBsonObj->begin());
- while(bsonIterator.more()) {
- BSONElement bsonElement(bsonIterator.next());
- string fieldName(bsonElement.fieldName());
-
- // LATER check pDependencies
- // LATER grovel through structures???
- intrusive_ptr<const Value> pValue(
- Value::createFromBsonElement(&bsonElement));
-
- vFieldName.push_back(fieldName);
- vpValue.push_back(pValue);
- }
- }
-
- void Document::toBson(BSONObjBuilder *pBuilder) {
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i)
- vpValue[i]->addToBsonObj(pBuilder, vFieldName[i]);
- }
-
- intrusive_ptr<Document> Document::create(size_t sizeHint) {
- intrusive_ptr<Document> pDocument(new Document(sizeHint));
- return pDocument;
- }
-
- Document::Document(size_t sizeHint):
- vFieldName(),
- vpValue() {
- if (sizeHint) {
- vFieldName.reserve(sizeHint);
- vpValue.reserve(sizeHint);
- }
- }
-
- intrusive_ptr<Document> Document::clone() {
- const size_t n = vFieldName.size();
- intrusive_ptr<Document> pNew(Document::create(n));
- for(size_t i = 0; i < n; ++i)
- pNew->addField(vFieldName[i], vpValue[i]);
-
- return pNew;
- }
-
- Document::~Document() {
- }
-
- FieldIterator *Document::createFieldIterator() {
- return new FieldIterator(intrusive_ptr<Document>(this));
- }
-
- intrusive_ptr<const Value> Document::getValue(const string &fieldName) {
- /*
- For now, assume the number of fields is small enough that iteration
- is ok. Later, if this gets large, we can create a map into the
- vector for these lookups.
-
- Note that because of the schema-less nature of this data, we always
- have to look, and can't assume that the requested field is always
- in a particular place as we would with a statically compilable
- reference.
- */
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- if (fieldName.compare(vFieldName[i]) == 0)
- return vpValue[i];
- }
-
- return(intrusive_ptr<const Value>());
- }
-
- void Document::addField(const string &fieldName,
- const intrusive_ptr<const Value> &pValue) {
- uassert(15945, str::stream() << "cannot add undefined field " <<
- fieldName << " to document", pValue->getType() != Undefined);
-
- vFieldName.push_back(fieldName);
- vpValue.push_back(pValue);
- }
-
- void Document::setField(size_t index,
- const string &fieldName,
- const intrusive_ptr<const Value> &pValue) {
- /* special case: should this field be removed? */
- if (!pValue.get()) {
- vFieldName.erase(vFieldName.begin() + index);
- vpValue.erase(vpValue.begin() + index);
- return;
- }
-
- /* make sure we have a valid value */
- uassert(15968, str::stream() << "cannot set undefined field " <<
- fieldName << " to document", pValue->getType() != Undefined);
-
- /* set the indicated field */
- vFieldName[index] = fieldName;
- vpValue[index] = pValue;
- }
-
- intrusive_ptr<const Value> Document::getField(const string &fieldName) const {
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- if (fieldName.compare(vFieldName[i]) == 0)
- return vpValue[i];
- }
-
- /* if we got here, there's no such field */
- return intrusive_ptr<const Value>();
- }
-
- size_t Document::getApproximateSize() const {
- size_t size = sizeof(Document);
- const size_t n = vpValue.size();
- for(size_t i = 0; i < n; ++i)
- size += vpValue[i]->getApproximateSize();
-
- return size;
- }
-
- size_t Document::getFieldIndex(const string &fieldName) const {
- const size_t n = vFieldName.size();
- size_t i = 0;
- for(; i < n; ++i) {
- if (fieldName.compare(vFieldName[i]) == 0)
- break;
- }
-
- return i;
- }
-
- void Document::hash_combine(size_t &seed) const {
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- boost::hash_combine(seed, vFieldName[i]);
- vpValue[i]->hash_combine(seed);
- }
- }
-
- int Document::compare(const intrusive_ptr<Document> &rL,
- const intrusive_ptr<Document> &rR) {
- const size_t lSize = rL->vFieldName.size();
- const size_t rSize = rR->vFieldName.size();
-
- for(size_t i = 0; true; ++i) {
- if (i >= lSize) {
- if (i >= rSize)
- return 0; // documents are the same length
-
- return -1; // left document is shorter
- }
-
- if (i >= rSize)
- return 1; // right document is shorter
-
- const int nameCmp = rL->vFieldName[i].compare(rR->vFieldName[i]);
- if (nameCmp)
- return nameCmp; // field names are unequal
-
- const int valueCmp = Value::compare(rL->vpValue[i], rR->vpValue[i]);
- if (valueCmp)
- return valueCmp; // fields are unequal
- }
-
- /* NOTREACHED */
- verify(false);
- return 0;
- }
-
- /* ----------------------- FieldIterator ------------------------------- */
-
- FieldIterator::FieldIterator(const intrusive_ptr<Document> &pTheDocument):
- pDocument(pTheDocument),
- index(0) {
- }
-
- bool FieldIterator::more() const {
- return (index < pDocument->vFieldName.size());
- }
-
- pair<string, intrusive_ptr<const Value> > FieldIterator::next() {
- verify(more());
- pair<string, intrusive_ptr<const Value> > result(
- pDocument->vFieldName[index], pDocument->vpValue[index]);
- ++index;
- return result;
- }
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include <boost/functional/hash.hpp>
+#include "db/jsobj.h"
+#include "db/pipeline/dependency_tracker.h"
+#include "db/pipeline/document.h"
+#include "db/pipeline/value.h"
+#include "util/mongoutils/str.h"
+
+namespace mongo {
+ using namespace mongoutils;
+
+ string Document::idName("_id");
+
+ intrusive_ptr<Document> Document::createFromBsonObj(
+ BSONObj *pBsonObj, const DependencyTracker *pDependencies) {
+ intrusive_ptr<Document> pDocument(
+ new Document(pBsonObj, pDependencies));
+ return pDocument;
+ }
+
+ Document::Document(BSONObj *pBsonObj,
+ const DependencyTracker *pDependencies):
+ vFieldName(),
+ vpValue() {
+ BSONObjIterator bsonIterator(pBsonObj->begin());
+ while(bsonIterator.more()) {
+ BSONElement bsonElement(bsonIterator.next());
+ string fieldName(bsonElement.fieldName());
+
+ // LATER check pDependencies
+ // LATER grovel through structures???
+ intrusive_ptr<const Value> pValue(
+ Value::createFromBsonElement(&bsonElement));
+
+ vFieldName.push_back(fieldName);
+ vpValue.push_back(pValue);
+ }
+ }
+
+ void Document::toBson(BSONObjBuilder *pBuilder) {
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i)
+ vpValue[i]->addToBsonObj(pBuilder, vFieldName[i]);
+ }
+
+ intrusive_ptr<Document> Document::create(size_t sizeHint) {
+ intrusive_ptr<Document> pDocument(new Document(sizeHint));
+ return pDocument;
+ }
+
+ Document::Document(size_t sizeHint):
+ vFieldName(),
+ vpValue() {
+ if (sizeHint) {
+ vFieldName.reserve(sizeHint);
+ vpValue.reserve(sizeHint);
+ }
+ }
+
+ intrusive_ptr<Document> Document::clone() {
+ const size_t n = vFieldName.size();
+ intrusive_ptr<Document> pNew(Document::create(n));
+ for(size_t i = 0; i < n; ++i)
+ pNew->addField(vFieldName[i], vpValue[i]);
+
+ return pNew;
+ }
+
+ Document::~Document() {
+ }
+
+ FieldIterator *Document::createFieldIterator() {
+ return new FieldIterator(intrusive_ptr<Document>(this));
+ }
+
+ intrusive_ptr<const Value> Document::getValue(const string &fieldName) {
+ /*
+ For now, assume the number of fields is small enough that iteration
+ is ok. Later, if this gets large, we can create a map into the
+ vector for these lookups.
+
+ Note that because of the schema-less nature of this data, we always
+ have to look, and can't assume that the requested field is always
+ in a particular place as we would with a statically compilable
+ reference.
+ */
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ if (fieldName.compare(vFieldName[i]) == 0)
+ return vpValue[i];
+ }
+
+ return(intrusive_ptr<const Value>());
+ }
+
+ void Document::addField(const string &fieldName,
+ const intrusive_ptr<const Value> &pValue) {
+ uassert(15945, str::stream() << "cannot add undefined field " <<
+ fieldName << " to document", pValue->getType() != Undefined);
+
+ vFieldName.push_back(fieldName);
+ vpValue.push_back(pValue);
+ }
+
+ void Document::setField(size_t index,
+ const string &fieldName,
+ const intrusive_ptr<const Value> &pValue) {
+ /* special case: should this field be removed? */
+ if (!pValue.get()) {
+ vFieldName.erase(vFieldName.begin() + index);
+ vpValue.erase(vpValue.begin() + index);
+ return;
+ }
+
+ /* make sure we have a valid value */
+ uassert(15968, str::stream() << "cannot set undefined field " <<
+ fieldName << " to document", pValue->getType() != Undefined);
+
+ /* set the indicated field */
+ vFieldName[index] = fieldName;
+ vpValue[index] = pValue;
+ }
+
+ intrusive_ptr<const Value> Document::getField(const string &fieldName) const {
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ if (fieldName.compare(vFieldName[i]) == 0)
+ return vpValue[i];
+ }
+
+ /* if we got here, there's no such field */
+ return intrusive_ptr<const Value>();
+ }
+
+ size_t Document::getApproximateSize() const {
+ size_t size = sizeof(Document);
+ const size_t n = vpValue.size();
+ for(size_t i = 0; i < n; ++i)
+ size += vpValue[i]->getApproximateSize();
+
+ return size;
+ }
+
+ size_t Document::getFieldIndex(const string &fieldName) const {
+ const size_t n = vFieldName.size();
+ size_t i = 0;
+ for(; i < n; ++i) {
+ if (fieldName.compare(vFieldName[i]) == 0)
+ break;
+ }
+
+ return i;
+ }
+
+ void Document::hash_combine(size_t &seed) const {
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ boost::hash_combine(seed, vFieldName[i]);
+ vpValue[i]->hash_combine(seed);
+ }
+ }
+
+ int Document::compare(const intrusive_ptr<Document> &rL,
+ const intrusive_ptr<Document> &rR) {
+ const size_t lSize = rL->vFieldName.size();
+ const size_t rSize = rR->vFieldName.size();
+
+ for(size_t i = 0; true; ++i) {
+ if (i >= lSize) {
+ if (i >= rSize)
+ return 0; // documents are the same length
+
+ return -1; // left document is shorter
+ }
+
+ if (i >= rSize)
+ return 1; // right document is shorter
+
+ const int nameCmp = rL->vFieldName[i].compare(rR->vFieldName[i]);
+ if (nameCmp)
+ return nameCmp; // field names are unequal
+
+ const int valueCmp = Value::compare(rL->vpValue[i], rR->vpValue[i]);
+ if (valueCmp)
+ return valueCmp; // fields are unequal
+ }
+
+ /* NOTREACHED */
+ verify(false);
+ return 0;
+ }
+
+ /* ----------------------- FieldIterator ------------------------------- */
+
+ FieldIterator::FieldIterator(const intrusive_ptr<Document> &pTheDocument):
+ pDocument(pTheDocument),
+ index(0) {
+ }
+
+ bool FieldIterator::more() const {
+ return (index < pDocument->vFieldName.size());
+ }
+
+ pair<string, intrusive_ptr<const Value> > FieldIterator::next() {
+ verify(more());
+ pair<string, intrusive_ptr<const Value> > result(
+ pDocument->vFieldName[index], pDocument->vpValue[index]);
+ ++index;
+ return result;
+ }
+}
diff --git a/src/mongo/db/pipeline/document_source.cpp b/src/mongo/db/pipeline/document_source.cpp
index 7d5f8355dfa..0f77a90730d 100755
--- a/src/mongo/db/pipeline/document_source.cpp
+++ b/src/mongo/db/pipeline/document_source.cpp
@@ -1,86 +1,86 @@
-/**
-* Copyright (C) 2011 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "pch.h"
-
-#include "db/pipeline/document_source.h"
-#include "db/pipeline/expression_context.h"
-
-namespace mongo {
-
- DocumentSource::DocumentSource(
- const intrusive_ptr<ExpressionContext> &pCtx):
- pSource(NULL),
- step(-1),
- pExpCtx(pCtx),
- nRowsOut(0) {
- }
-
- DocumentSource::~DocumentSource() {
- }
-
- const char *DocumentSource::getSourceName() const {
- static const char unknown[] = "[UNKNOWN]";
- return unknown;
- }
-
- void DocumentSource::setSource(DocumentSource *pTheSource) {
- verify(!pSource);
- pSource = pTheSource;
- }
-
- bool DocumentSource::coalesce(
- const intrusive_ptr<DocumentSource> &pNextSource) {
- return false;
- }
-
- void DocumentSource::optimize() {
- }
-
- void DocumentSource::manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker) {
-#ifdef MONGO_LATER_SERVER_4644
- verify(false); // identify any sources that need this but don't have it
-#endif /* MONGO_LATER_SERVER_4644 */
- }
-
- bool DocumentSource::advance() {
- pExpCtx->checkForInterrupt(); // might not return
- return false;
- }
-
- void DocumentSource::addToBsonArray(
- BSONArrayBuilder *pBuilder, bool explain) const {
- BSONObjBuilder insides;
- sourceToBson(&insides, explain);
-
-/* No statistics at this time
- if (explain) {
- insides.append("nOut", nOut);
- }
-*/
-
- pBuilder->append(insides.done());
- }
-
- void DocumentSource::writeString(stringstream &ss) const {
- BSONArrayBuilder bab;
- addToBsonArray(&bab);
- BSONArray ba(bab.arr());
- ss << ba.toString(/* isArray */true);
- // our toString should use standard string types.....
- }
-}
+/**
+* Copyright (C) 2011 10gen Inc.
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU Affero General Public License, version 3,
+* as published by the Free Software Foundation.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU Affero General Public License for more details.
+*
+* You should have received a copy of the GNU Affero General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#include "pch.h"
+
+#include "db/pipeline/document_source.h"
+#include "db/pipeline/expression_context.h"
+
+namespace mongo {
+
+ DocumentSource::DocumentSource(
+ const intrusive_ptr<ExpressionContext> &pCtx):
+ pSource(NULL),
+ step(-1),
+ pExpCtx(pCtx),
+ nRowsOut(0) {
+ }
+
+ DocumentSource::~DocumentSource() {
+ }
+
+ const char *DocumentSource::getSourceName() const {
+ static const char unknown[] = "[UNKNOWN]";
+ return unknown;
+ }
+
+ void DocumentSource::setSource(DocumentSource *pTheSource) {
+ verify(!pSource);
+ pSource = pTheSource;
+ }
+
+ bool DocumentSource::coalesce(
+ const intrusive_ptr<DocumentSource> &pNextSource) {
+ return false;
+ }
+
+ void DocumentSource::optimize() {
+ }
+
+ void DocumentSource::manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker) {
+#ifdef MONGO_LATER_SERVER_4644
+ verify(false); // identify any sources that need this but don't have it
+#endif /* MONGO_LATER_SERVER_4644 */
+ }
+
+ bool DocumentSource::advance() {
+ pExpCtx->checkForInterrupt(); // might not return
+ return false;
+ }
+
+ void DocumentSource::addToBsonArray(
+ BSONArrayBuilder *pBuilder, bool explain) const {
+ BSONObjBuilder insides;
+ sourceToBson(&insides, explain);
+
+/* No statistics at this time
+ if (explain) {
+ insides.append("nOut", nOut);
+ }
+*/
+
+ pBuilder->append(insides.done());
+ }
+
+ void DocumentSource::writeString(stringstream &ss) const {
+ BSONArrayBuilder bab;
+ addToBsonArray(&bab);
+ BSONArray ba(bab.arr());
+ ss << ba.toString(/* isArray */true);
+ // our toString should use standard string types.....
+ }
+}
diff --git a/src/mongo/db/pipeline/document_source.h b/src/mongo/db/pipeline/document_source.h
index 24b7c84704b..2d9d0f10d5b 100755
--- a/src/mongo/db/pipeline/document_source.h
+++ b/src/mongo/db/pipeline/document_source.h
@@ -1,1264 +1,1264 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include "pch.h"
-
-#include <boost/unordered_map.hpp>
-#include "util/intrusive_counter.h"
-#include "client/parallel.h"
-#include "db/clientcursor.h"
-#include "db/jsobj.h"
-#include "db/pipeline/dependency_tracker.h"
-#include "db/pipeline/document.h"
-#include "db/pipeline/expression.h"
-#include "db/pipeline/value.h"
-#include "util/string_writer.h"
-
-namespace mongo {
- class Accumulator;
- class Cursor;
- class DependencyTracker;
- class Document;
- class Expression;
- class ExpressionContext;
- class ExpressionFieldPath;
- class ExpressionObject;
- class Matcher;
-
- class DocumentSource :
- public IntrusiveCounterUnsigned,
- public StringWriter {
- public:
- virtual ~DocumentSource();
-
- // virtuals from StringWriter
- virtual void writeString(stringstream &ss) const;
-
- /**
- Set the step for a user-specified pipeline step.
-
- The step is used for diagnostics.
-
- @param step step number 0 to n.
- */
- void setPipelineStep(int step);
-
- /**
- Get the user-specified pipeline step.
-
- @returns the step number, or -1 if it has never been set
- */
- int getPipelineStep() const;
-
- /**
- Is the source at EOF?
-
- @returns true if the source has no more Documents to return.
- */
- virtual bool eof() = 0;
-
- /**
- Advance the state of the DocumentSource so that it will return the
- next Document.
-
- The default implementation returns false, after checking for
- interrupts. Derived classes can call the default implementation
- in their own implementations in order to check for interrupts.
-
- @returns whether there is another document to fetch, i.e., whether or
- not getCurrent() will succeed. This default implementation always
- returns false.
- */
- virtual bool advance();
-
- /**
- Advance the source, and return the next Expression.
-
- @returns the current Document
- TODO throws an exception if there are no more expressions to return.
- */
- virtual intrusive_ptr<Document> getCurrent() = 0;
-
- /**
- Get the source's name.
-
- @returns the string name of the source as a constant string;
- this is static, and there's no need to worry about adopting it
- */
- virtual const char *getSourceName() const;
-
- /**
- Set the underlying source this source should use to get Documents
- from.
-
- It is an error to set the source more than once. This is to
- prevent changing sources once the original source has been started;
- this could break the state maintained by the DocumentSource.
-
- This pointer is not reference counted because that has led to
- some circular references. As a result, this doesn't keep
- sources alive, and is only intended to be used temporarily for
- the lifetime of a Pipeline::run().
-
- @param pSource the underlying source to use
- */
- virtual void setSource(DocumentSource *pSource);
-
- /**
- Attempt to coalesce this DocumentSource with its successor in the
- document processing pipeline. If successful, the successor
- DocumentSource should be removed from the pipeline and discarded.
-
- If successful, this operation can be applied repeatedly, in an
- attempt to coalesce several sources together.
-
- The default implementation is to do nothing, and return false.
-
- @param pNextSource the next source in the document processing chain.
- @returns whether or not the attempt to coalesce was successful or not;
- if the attempt was not successful, nothing has been changed
- */
- virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
-
- /**
- Optimize the pipeline operation, if possible. This is a local
- optimization that only looks within this DocumentSource. For best
- results, first coalesce compatible sources using coalesce().
-
- This is intended for any operations that include expressions, and
- provides a hook for those to optimize those operations.
-
- The default implementation is to do nothing.
- */
- virtual void optimize();
-
- /**
- Adjust dependencies according to the needs of this source.
-
- $$$ MONGO_LATER_SERVER_4644
- @param pTracker the dependency tracker
- */
- virtual void manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker);
-
- /**
- Add the DocumentSource to the array builder.
-
- The default implementation calls sourceToBson() in order to
- convert the inner part of the object which will be added to the
- array being built here.
-
- @param pBuilder the array builder to add the operation to.
- @param explain create explain output
- */
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder,
- bool explain = false) const;
-
- protected:
- /**
- Base constructor.
- */
- DocumentSource(const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Create an object that represents the document source. The object
- will have a single field whose name is the source's name. This
- will be used by the default implementation of addToBsonArray()
- to add this object to a pipeline being represented in BSON.
-
- @param pBuilder a blank object builder to write to
- @param explain create explain output
- */
- virtual void sourceToBson(BSONObjBuilder *pBuilder,
- bool explain) const = 0;
-
- /*
- Most DocumentSources have an underlying source they get their data
- from. This is a convenience for them.
-
- The default implementation of setSource() sets this; if you don't
- need a source, override that to verify(). The default is to
- verify() if this has already been set.
- */
- DocumentSource *pSource;
-
- /*
- The zero-based user-specified pipeline step. Used for diagnostics.
- Will be set to -1 for artificial pipeline steps that were not part
- of the original user specification.
- */
- int step;
-
- intrusive_ptr<ExpressionContext> pExpCtx;
-
- /*
- for explain: # of rows returned by this source
-
- This is *not* unsigned so it can be passed to BSONObjBuilder.append().
- */
- long long nRowsOut;
- };
-
-
- class DocumentSourceBsonArray :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceBsonArray();
- virtual bool eof();
- virtual bool advance();
- virtual intrusive_ptr<Document> getCurrent();
- virtual void setSource(DocumentSource *pSource);
-
- /**
- Create a document source based on a BSON array.
-
- This is usually put at the beginning of a chain of document sources
- in order to fetch data from the database.
-
- CAUTION: the BSON is not read until the source is used. Any
- elements that appear after these documents must not be read until
- this source is exhausted.
-
- @param pBsonElement the BSON array to treat as a document source
- @param pExpCtx the expression context for the pipeline
- @returns the newly created document source
- */
- static intrusive_ptr<DocumentSourceBsonArray> create(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- private:
- DocumentSourceBsonArray(BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- BSONObj embeddedObject;
- BSONObjIterator arrayIterator;
- BSONElement currentElement;
- bool haveCurrent;
- };
-
-
- class DocumentSourceCommandFutures :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceCommandFutures();
- virtual bool eof();
- virtual bool advance();
- virtual intrusive_ptr<Document> getCurrent();
- virtual void setSource(DocumentSource *pSource);
-
- /* convenient shorthand for a commonly used type */
- typedef list<shared_ptr<Future::CommandResult> > FuturesList;
-
- /**
- Create a DocumentSource that wraps a list of Command::Futures.
-
- @param errmsg place to write error messages to; must exist for the
- lifetime of the created DocumentSourceCommandFutures
- @param pList the list of futures
- @param pExpCtx the expression context for the pipeline
- @returns the newly created DocumentSource
- */
- static intrusive_ptr<DocumentSourceCommandFutures> create(
- string &errmsg, FuturesList *pList,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- private:
- DocumentSourceCommandFutures(string &errmsg, FuturesList *pList,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Advance to the next document, setting pCurrent appropriately.
-
- Adjusts pCurrent, pBsonSource, and iterator, as needed. On exit,
- pCurrent is the Document to return, or NULL. If NULL, this
- indicates there is nothing more to return.
- */
- void getNextDocument();
-
- bool newSource; // set to true for the first item of a new source
- intrusive_ptr<DocumentSourceBsonArray> pBsonSource;
- intrusive_ptr<Document> pCurrent;
- FuturesList::iterator iterator;
- FuturesList::iterator listEnd;
- string &errmsg;
- };
-
-
- class DocumentSourceCursor :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceCursor();
- virtual bool eof();
- virtual bool advance();
- virtual intrusive_ptr<Document> getCurrent();
- virtual void setSource(DocumentSource *pSource);
- virtual void manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker);
-
- /**
- Create a document source based on a cursor.
-
- This is usually put at the beginning of a chain of document sources
- in order to fetch data from the database.
-
- @param pCursor the cursor to use to fetch data
- @param pExpCtx the expression context for the pipeline
- */
- static intrusive_ptr<DocumentSourceCursor> create(
- const shared_ptr<Cursor> &pCursor,
- const string &ns,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /*
- Record the namespace. Required for explain.
-
- @param namespace the namespace
- */
- void setNamespace(const string &ns);
-
- /*
- Record the query that was specified for the cursor this wraps, if
- any.
-
- This should be captured after any optimizations are applied to
- the pipeline so that it reflects what is really used.
-
- This gets used for explain output.
-
- @param pBsonObj the query to record
- */
- void setQuery(const shared_ptr<BSONObj> &pBsonObj);
-
- /*
- Record the sort that was specified for the cursor this wraps, if
- any.
-
- This should be captured after any optimizations are applied to
- the pipeline so that it reflects what is really used.
-
- This gets used for explain output.
-
- @param pBsonObj the sort to record
- */
- void setSort(const shared_ptr<BSONObj> &pBsonObj);
-
- /**
- Release the cursor, but without changing the other data. This
- is used for the explain version of pipeline execution.
- */
- void releaseCursor();
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- private:
- DocumentSourceCursor(
- const shared_ptr<Cursor> &pTheCursor, const string &ns,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- void findNext();
- intrusive_ptr<Document> pCurrent;
-
- string ns; // namespace
-
- /*
- The bsonDependencies must outlive the Cursor wrapped by this
- source. Therefore, bsonDependencies must appear before pCursor
- in order cause its destructor to be called *after* pCursor's.
- */
- shared_ptr<BSONObj> pQuery;
- shared_ptr<BSONObj> pSort;
- vector<shared_ptr<BSONObj> > bsonDependencies;
- shared_ptr<Cursor> pCursor;
-
- /*
- In order to yield, we need a ClientCursor.
- */
- ClientCursor::Holder pClientCursor;
-
- /*
- Advance the cursor, and yield sometimes.
-
- If the state of the world changed during the yield such that we
- are unable to continue execution of the query, this will release the
- client cursor, and throw an error.
- */
- void advanceAndYield();
-
- /*
- This document source hangs on to the dependency tracker when it
- gets it so that it can be used for selective reification of
- fields in order to avoid fields that are not required through the
- pipeline.
- */
- intrusive_ptr<DependencyTracker> pDependencies;
-
- /**
- (5/14/12 - moved this to private because it's not used atm)
- Add a BSONObj dependency.
-
- Some Cursor creation functions rely on BSON objects to specify
- their query predicate or sort. These often take a BSONObj
- by reference for these, but do not copy it. As a result, the
- BSONObjs specified must outlive the Cursor. In order to ensure
- that, use this to preserve a pointer to the BSONObj here.
-
- From the outside, you must also make sure the BSONObjBuilder
- creates a lasting copy of the data, otherwise it will go away
- when the builder goes out of scope. Therefore, the typical usage
- pattern for this is
- {
- BSONObjBuilder builder;
- // do stuff to the builder
- shared_ptr<BSONObj> pBsonObj(new BSONObj(builder.obj()));
- pDocumentSourceCursor->addBsonDependency(pBsonObj);
- }
-
- @param pBsonObj pointer to the BSON object to preserve
- */
- void addBsonDependency(const shared_ptr<BSONObj> &pBsonObj);
- };
-
-
- /*
- This contains all the basic mechanics for filtering a stream of
- Documents, except for the actual predicate evaluation itself. This was
- factored out so we could create DocumentSources that use both Matcher
- style predicates as well as full Expressions.
- */
- class DocumentSourceFilterBase :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceFilterBase();
- virtual bool eof();
- virtual bool advance();
- virtual intrusive_ptr<Document> getCurrent();
-
- /**
- Create a BSONObj suitable for Matcher construction.
-
- This is used after filter analysis has moved as many filters to
- as early a point as possible in the document processing pipeline.
- See db/Matcher.h and the associated wiki documentation for the
- format. This conversion is used to move back to the low-level
- find() Cursor mechanism.
-
- @param pBuilder the builder to write to
- */
- virtual void toMatcherBson(BSONObjBuilder *pBuilder) const = 0;
-
- protected:
- DocumentSourceFilterBase(
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Test the given document against the predicate and report if it
- should be accepted or not.
-
- @param pDocument the document to test
- @returns true if the document matches the filter, false otherwise
- */
- virtual bool accept(const intrusive_ptr<Document> &pDocument) const = 0;
-
- private:
-
- void findNext();
-
- bool unstarted;
- bool hasNext;
- intrusive_ptr<Document> pCurrent;
- };
-
-
- class DocumentSourceFilter :
- public DocumentSourceFilterBase {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceFilter();
- virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
- virtual void optimize();
- virtual const char *getSourceName() const;
-
- /**
- Create a filter.
-
- @param pBsonElement the raw BSON specification for the filter
- @param pExpCtx the expression context for the pipeline
- @returns the filter
- */
- static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Create a filter.
-
- @param pFilter the expression to use to filter
- @param pExpCtx the expression context for the pipeline
- @returns the filter
- */
- static intrusive_ptr<DocumentSourceFilter> create(
- const intrusive_ptr<Expression> &pFilter,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Create a BSONObj suitable for Matcher construction.
-
- This is used after filter analysis has moved as many filters to
- as early a point as possible in the document processing pipeline.
- See db/Matcher.h and the associated wiki documentation for the
- format. This conversion is used to move back to the low-level
- find() Cursor mechanism.
-
- @param pBuilder the builder to write to
- */
- void toMatcherBson(BSONObjBuilder *pBuilder) const;
-
- static const char filterName[];
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- // virtuals from DocumentSourceFilterBase
- virtual bool accept(const intrusive_ptr<Document> &pDocument) const;
-
- private:
- DocumentSourceFilter(const intrusive_ptr<Expression> &pFilter,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- intrusive_ptr<Expression> pFilter;
- };
-
-
- class DocumentSourceGroup :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceGroup();
- virtual bool eof();
- virtual bool advance();
- virtual const char *getSourceName() const;
- virtual intrusive_ptr<Document> getCurrent();
-
- /**
- Create a new grouping DocumentSource.
-
- @param pExpCtx the expression context for the pipeline
- @returns the DocumentSource
- */
- static intrusive_ptr<DocumentSourceGroup> create(
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Set the Id Expression.
-
- Documents that pass through the grouping Document are grouped
- according to this key. This will generate the id_ field in the
- result documents.
-
- @param pExpression the group key
- */
- void setIdExpression(const intrusive_ptr<Expression> &pExpression);
-
- /**
- Add an accumulator.
-
- Accumulators become fields in the Documents that result from
- grouping. Each unique group document must have it's own
- accumulator; the accumulator factory is used to create that.
-
- @param fieldName the name the accumulator result will have in the
- result documents
- @param pAccumulatorFactory used to create the accumulator for the
- group field
- */
- void addAccumulator(string fieldName,
- intrusive_ptr<Accumulator> (*pAccumulatorFactory)(
- const intrusive_ptr<ExpressionContext> &),
- const intrusive_ptr<Expression> &pExpression);
-
- /**
- Create a grouping DocumentSource from BSON.
-
- This is a convenience method that uses the above, and operates on
- a BSONElement that has been deteremined to be an Object with an
- element named $group.
-
- @param pBsonElement the BSONELement that defines the group
- @param pExpCtx the expression context
- @returns the grouping DocumentSource
- */
- static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
-
- /**
- Create a unifying group that can be used to combine group results
- from shards.
-
- @returns the grouping DocumentSource
- */
- intrusive_ptr<DocumentSource> createMerger();
-
- static const char groupName[];
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- private:
- DocumentSourceGroup(const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /*
- Before returning anything, this source must fetch everything from
- the underlying source and group it. populate() is used to do that
- on the first call to any method on this source. The populated
- boolean indicates that this has been done.
- */
- void populate();
- bool populated;
-
- intrusive_ptr<Expression> pIdExpression;
-
- typedef boost::unordered_map<intrusive_ptr<const Value>,
- vector<intrusive_ptr<Accumulator> >, Value::Hash> GroupsType;
- GroupsType groups;
-
- /*
- The field names for the result documents and the accumulator
- factories for the result documents. The Expressions are the
- common expressions used by each instance of each accumulator
- in order to find the right-hand side of what gets added to the
- accumulator. Note that each of those is the same for each group,
- so we can share them across all groups by adding them to the
- accumulators after we use the factories to make a new set of
- accumulators for each new group.
-
- These three vectors parallel each other.
- */
- vector<string> vFieldName;
- vector<intrusive_ptr<Accumulator> (*)(
- const intrusive_ptr<ExpressionContext> &)> vpAccumulatorFactory;
- vector<intrusive_ptr<Expression> > vpExpression;
-
-
- intrusive_ptr<Document> makeDocument(
- const GroupsType::iterator &rIter);
-
- GroupsType::iterator groupsIterator;
- intrusive_ptr<Document> pCurrent;
- };
-
-
- class DocumentSourceMatch :
- public DocumentSourceFilterBase {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceMatch();
- virtual const char *getSourceName() const;
- virtual void manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker);
-
- /**
- Create a filter.
-
- @param pBsonElement the raw BSON specification for the filter
- @returns the filter
- */
- static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx);
-
- /**
- Create a BSONObj suitable for Matcher construction.
-
- This is used after filter analysis has moved as many filters to
- as early a point as possible in the document processing pipeline.
- See db/Matcher.h and the associated wiki documentation for the
- format. This conversion is used to move back to the low-level
- find() Cursor mechanism.
-
- @param pBuilder the builder to write to
- */
- void toMatcherBson(BSONObjBuilder *pBuilder) const;
-
- static const char matchName[];
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- // virtuals from DocumentSourceFilterBase
- virtual bool accept(const intrusive_ptr<Document> &pDocument) const;
-
- private:
- DocumentSourceMatch(const BSONObj &query,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- Matcher matcher;
- };
-
-
- class DocumentSourceOut :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceOut();
- virtual bool eof();
- virtual bool advance();
- virtual const char *getSourceName() const;
- virtual intrusive_ptr<Document> getCurrent();
-
- /**
- Create a document source for output and pass-through.
-
- This can be put anywhere in a pipeline and will store content as
- well as pass it on.
-
- @param pBsonElement the raw BSON specification for the source
- @param pExpCtx the expression context for the pipeline
- @returns the newly created document source
- */
- static intrusive_ptr<DocumentSourceOut> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- static const char outName[];
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- private:
- DocumentSourceOut(BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
- };
-
-
- class DocumentSourceProject :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceProject();
- virtual bool eof();
- virtual bool advance();
- virtual const char *getSourceName() const;
- virtual intrusive_ptr<Document> getCurrent();
- virtual void optimize();
- virtual void manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker);
-
- /**
- Create a new DocumentSource that can implement projection.
-
- @param pExpCtx the expression context for the pipeline
- @returns the projection DocumentSource
- */
- static intrusive_ptr<DocumentSourceProject> create(
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Include a field path in a projection.
-
- @param fieldPath the path of the field to include
- */
- void includePath(const string &fieldPath);
-
- /**
- Exclude a field path from the projection.
-
- @param fieldPath the path of the field to exclude
- */
- void excludePath(const string &fieldPath);
-
- /**
- Add an output Expression in the projection.
-
- BSON document fields are ordered, so the new field will be
- appended to the existing set.
-
- @param fieldName the name of the field as it will appear
- @param pExpression the expression used to compute the field
- */
- void addField(const string &fieldName,
- const intrusive_ptr<Expression> &pExpression);
-
- /**
- Create a new projection DocumentSource from BSON.
-
- This is a convenience for directly handling BSON, and relies on the
- above methods.
-
- @param pBsonElement the BSONElement with an object named $project
- @param pExpCtx the expression context for the pipeline
- @returns the created projection
- */
- static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- static const char projectName[];
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- private:
- DocumentSourceProject(const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- // configuration state
- bool excludeId;
- intrusive_ptr<ExpressionObject> pEO;
-
- /*
- Utility object used by manageDependencies().
-
- Removes dependencies from a DependencyTracker.
- */
- class DependencyRemover :
- public ExpressionObject::PathSink {
- public:
- // virtuals from PathSink
- virtual void path(const string &path, bool include);
-
- /*
- Constructor.
-
- Captures a reference to the smart pointer to the DependencyTracker
- that this will remove dependencies from via
- ExpressionObject::emitPaths().
-
- @param pTracker reference to the smart pointer to the
- DependencyTracker
- */
- DependencyRemover(const intrusive_ptr<DependencyTracker> &pTracker);
-
- private:
- const intrusive_ptr<DependencyTracker> &pTracker;
- };
-
- /*
- Utility object used by manageDependencies().
-
- Checks dependencies to see if they are present. If not, then
- throws a user error.
- */
- class DependencyChecker :
- public ExpressionObject::PathSink {
- public:
- // virtuals from PathSink
- virtual void path(const string &path, bool include);
-
- /*
- Constructor.
-
- Captures a reference to the smart pointer to the DependencyTracker
- that this will check dependencies from from
- ExpressionObject::emitPaths() to see if they are required.
-
- @param pTracker reference to the smart pointer to the
- DependencyTracker
- @param pThis the projection that is making this request
- */
- DependencyChecker(
- const intrusive_ptr<DependencyTracker> &pTracker,
- const DocumentSourceProject *pThis);
-
- private:
- const intrusive_ptr<DependencyTracker> &pTracker;
- const DocumentSourceProject *pThis;
- };
- };
-
-
- class DocumentSourceSort :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceSort();
- virtual bool eof();
- virtual bool advance();
- virtual const char *getSourceName() const;
- virtual intrusive_ptr<Document> getCurrent();
- virtual void manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker);
- /*
- TODO
- Adjacent sorts should reduce to the last sort.
- virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
- */
-
- /**
- Create a new sorting DocumentSource.
-
- @param pExpCtx the expression context for the pipeline
- @returns the DocumentSource
- */
- static intrusive_ptr<DocumentSourceSort> create(
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Add sort key field.
-
- Adds a sort key field to the key being built up. A concatenated
- key is built up by calling this repeatedly.
-
- @param fieldPath the field path to the key component
- @param ascending if true, use the key for an ascending sort,
- otherwise, use it for descending
- */
- void addKey(const string &fieldPath, bool ascending);
-
- /**
- Write out an object whose contents are the sort key.
-
- @param pBuilder initialized object builder.
- @param fieldPrefix specify whether or not to include the field prefix
- */
- void sortKeyToBson(BSONObjBuilder *pBuilder, bool usePrefix) const;
-
- /**
- Create a sorting DocumentSource from BSON.
-
- This is a convenience method that uses the above, and operates on
- a BSONElement that has been deteremined to be an Object with an
- element named $group.
-
- @param pBsonElement the BSONELement that defines the group
- @param pExpCtx the expression context for the pipeline
- @returns the grouping DocumentSource
- */
- static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
-
- static const char sortName[];
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- private:
- DocumentSourceSort(const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /*
- Before returning anything, this source must fetch everything from
- the underlying source and group it. populate() is used to do that
- on the first call to any method on this source. The populated
- boolean indicates that this has been done.
- */
- void populate();
- bool populated;
- long long count;
-
- /* these two parallel each other */
- typedef vector<intrusive_ptr<ExpressionFieldPath> > SortPaths;
- SortPaths vSortKey;
- vector<bool> vAscending;
-
- /*
- Compare two documents according to the specified sort key.
-
- @param rL reference to the left document
- @param rR reference to the right document
- @returns a number less than, equal to, or greater than zero,
- indicating pL < pR, pL == pR, or pL > pR, respectively
- */
- int compare(const intrusive_ptr<Document> &pL,
- const intrusive_ptr<Document> &pR);
-
- /*
- This is a utility class just for the STL sort that is done
- inside.
- */
- class Comparator {
- public:
- bool operator()(
- const intrusive_ptr<Document> &pL,
- const intrusive_ptr<Document> &pR) {
- return (pSort->compare(pL, pR) < 0);
- }
-
- inline Comparator(DocumentSourceSort *pS):
- pSort(pS) {
- }
-
- private:
- DocumentSourceSort *pSort;
- };
-
- typedef vector<intrusive_ptr<Document> > VectorType;
- VectorType documents;
-
- VectorType::iterator docIterator;
- intrusive_ptr<Document> pCurrent;
- };
-
-
- class DocumentSourceLimit :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceLimit();
- virtual bool eof();
- virtual bool advance();
- virtual intrusive_ptr<Document> getCurrent();
- virtual const char *getSourceName() const;
- virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
-
- /**
- Create a new limiting DocumentSource.
-
- @param pExpCtx the expression context for the pipeline
- @returns the DocumentSource
- */
- static intrusive_ptr<DocumentSourceLimit> create(
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Create a limiting DocumentSource from BSON.
-
- This is a convenience method that uses the above, and operates on
- a BSONElement that has been deteremined to be an Object with an
- element named $limit.
-
- @param pBsonElement the BSONELement that defines the limit
- @param pExpCtx the expression context
- @returns the grouping DocumentSource
- */
- static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
-
- static const char limitName[];
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- private:
- DocumentSourceLimit(
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- long long limit;
- long long count;
- intrusive_ptr<Document> pCurrent;
- };
-
- class DocumentSourceSkip :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceSkip();
- virtual bool eof();
- virtual bool advance();
- virtual intrusive_ptr<Document> getCurrent();
- virtual const char *getSourceName() const;
- virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
-
- /**
- Create a new skipping DocumentSource.
-
- @param pExpCtx the expression context
- @returns the DocumentSource
- */
- static intrusive_ptr<DocumentSourceSkip> create(
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Create a skipping DocumentSource from BSON.
-
- This is a convenience method that uses the above, and operates on
- a BSONElement that has been deteremined to be an Object with an
- element named $skip.
-
- @param pBsonElement the BSONELement that defines the skip
- @param pExpCtx the expression context
- @returns the grouping DocumentSource
- */
- static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
-
- static const char skipName[];
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- private:
- DocumentSourceSkip(const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /*
- Skips initial documents.
- */
- void skipper();
-
- long long skip;
- long long count;
- intrusive_ptr<Document> pCurrent;
- };
-
-
- class DocumentSourceUnwind :
- public DocumentSource {
- public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceUnwind();
- virtual bool eof();
- virtual bool advance();
- virtual const char *getSourceName() const;
- virtual intrusive_ptr<Document> getCurrent();
- virtual void manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker);
-
- /**
- Create a new DocumentSource that can implement unwind.
-
- @param pExpCtx the expression context for the pipeline
- @returns the projection DocumentSource
- */
- static intrusive_ptr<DocumentSourceUnwind> create(
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- /**
- Specify the field to unwind. There must be exactly one before
- the pipeline begins execution.
-
- @param rFieldPath - path to the field to unwind
- */
- void unwindField(const FieldPath &rFieldPath);
-
- /**
- Create a new projection DocumentSource from BSON.
-
- This is a convenience for directly handling BSON, and relies on the
- above methods.
-
- @param pBsonElement the BSONElement with an object named $project
- @param pExpCtx the expression context for the pipeline
- @returns the created projection
- */
- static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- static const char unwindName[];
-
- protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
-
- private:
- DocumentSourceUnwind(const intrusive_ptr<ExpressionContext> &pExpCtx);
-
- // configuration state
- FieldPath unwindPath;
-
- vector<int> fieldIndex; /* for the current document, the indices
- leading down to the field being unwound */
-
- // iteration state
- intrusive_ptr<Document> pNoUnwindDocument;
- // document to return, pre-unwind
- intrusive_ptr<const Value> pUnwindArray; // field being unwound
- intrusive_ptr<ValueIterator> pUnwinder; // iterator used for unwinding
- intrusive_ptr<const Value> pUnwindValue; // current value
-
- /*
- Clear all the state related to unwinding an array.
- */
- void resetArray();
-
- /*
- Clone the current document being unwound.
-
- This is a partial deep clone. Because we're going to replace the
- value at the end, we have to replace everything along the path
- leading to that in order to not share that change with any other
- clones (or the original) that we've made.
-
- This expects pUnwindValue to have been set by a prior call to
- advance(). However, pUnwindValue may also be NULL, in which case
- the field will be removed -- this is the action for an empty
- array.
-
- @returns a partial deep clone of pNoUnwindDocument
- */
- intrusive_ptr<Document> clonePath() const;
- };
-
-}
-
-
-/* ======================= INLINED IMPLEMENTATIONS ========================== */
-
-namespace mongo {
-
- inline void DocumentSource::setPipelineStep(int s) {
- step = s;
- }
-
- inline int DocumentSource::getPipelineStep() const {
- return step;
- }
-
- inline void DocumentSourceGroup::setIdExpression(
- const intrusive_ptr<Expression> &pExpression) {
- pIdExpression = pExpression;
- }
-
- inline DocumentSourceProject::DependencyRemover::DependencyRemover(
- const intrusive_ptr<DependencyTracker> &pT):
- pTracker(pT) {
- }
-
- inline DocumentSourceProject::DependencyChecker::DependencyChecker(
- const intrusive_ptr<DependencyTracker> &pTrack,
- const DocumentSourceProject *pT):
- pTracker(pTrack),
- pThis(pT) {
- }
-
- inline void DocumentSourceUnwind::resetArray() {
- pNoUnwindDocument.reset();
- pUnwindArray.reset();
- pUnwinder.reset();
- pUnwindValue.reset();
- }
-
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#pragma once
+
+#include "pch.h"
+
+#include <boost/unordered_map.hpp>
+#include "util/intrusive_counter.h"
+#include "client/parallel.h"
+#include "db/clientcursor.h"
+#include "db/jsobj.h"
+#include "db/pipeline/dependency_tracker.h"
+#include "db/pipeline/document.h"
+#include "db/pipeline/expression.h"
+#include "db/pipeline/value.h"
+#include "util/string_writer.h"
+
+namespace mongo {
+ class Accumulator;
+ class Cursor;
+ class DependencyTracker;
+ class Document;
+ class Expression;
+ class ExpressionContext;
+ class ExpressionFieldPath;
+ class ExpressionObject;
+ class Matcher;
+
+ class DocumentSource :
+ public IntrusiveCounterUnsigned,
+ public StringWriter {
+ public:
+ virtual ~DocumentSource();
+
+ // virtuals from StringWriter
+ virtual void writeString(stringstream &ss) const;
+
+ /**
+ Set the step for a user-specified pipeline step.
+
+ The step is used for diagnostics.
+
+ @param step step number 0 to n.
+ */
+ void setPipelineStep(int step);
+
+ /**
+ Get the user-specified pipeline step.
+
+ @returns the step number, or -1 if it has never been set
+ */
+ int getPipelineStep() const;
+
+ /**
+ Is the source at EOF?
+
+ @returns true if the source has no more Documents to return.
+ */
+ virtual bool eof() = 0;
+
+ /**
+ Advance the state of the DocumentSource so that it will return the
+ next Document.
+
+ The default implementation returns false, after checking for
+ interrupts. Derived classes can call the default implementation
+ in their own implementations in order to check for interrupts.
+
+ @returns whether there is another document to fetch, i.e., whether or
+ not getCurrent() will succeed. This default implementation always
+ returns false.
+ */
+ virtual bool advance();
+
+ /**
+ Advance the source, and return the next Expression.
+
+ @returns the current Document
+ TODO throws an exception if there are no more expressions to return.
+ */
+ virtual intrusive_ptr<Document> getCurrent() = 0;
+
+ /**
+ Get the source's name.
+
+ @returns the string name of the source as a constant string;
+ this is static, and there's no need to worry about adopting it
+ */
+ virtual const char *getSourceName() const;
+
+ /**
+ Set the underlying source this source should use to get Documents
+ from.
+
+ It is an error to set the source more than once. This is to
+ prevent changing sources once the original source has been started;
+ this could break the state maintained by the DocumentSource.
+
+ This pointer is not reference counted because that has led to
+ some circular references. As a result, this doesn't keep
+ sources alive, and is only intended to be used temporarily for
+ the lifetime of a Pipeline::run().
+
+ @param pSource the underlying source to use
+ */
+ virtual void setSource(DocumentSource *pSource);
+
+ /**
+ Attempt to coalesce this DocumentSource with its successor in the
+ document processing pipeline. If successful, the successor
+ DocumentSource should be removed from the pipeline and discarded.
+
+ If successful, this operation can be applied repeatedly, in an
+ attempt to coalesce several sources together.
+
+ The default implementation is to do nothing, and return false.
+
+ @param pNextSource the next source in the document processing chain.
+ @returns whether or not the attempt to coalesce was successful or not;
+ if the attempt was not successful, nothing has been changed
+ */
+ virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
+
+ /**
+ Optimize the pipeline operation, if possible. This is a local
+ optimization that only looks within this DocumentSource. For best
+ results, first coalesce compatible sources using coalesce().
+
+ This is intended for any operations that include expressions, and
+ provides a hook for those to optimize those operations.
+
+ The default implementation is to do nothing.
+ */
+ virtual void optimize();
+
+ /**
+ Adjust dependencies according to the needs of this source.
+
+ $$$ MONGO_LATER_SERVER_4644
+ @param pTracker the dependency tracker
+ */
+ virtual void manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker);
+
+ /**
+ Add the DocumentSource to the array builder.
+
+ The default implementation calls sourceToBson() in order to
+ convert the inner part of the object which will be added to the
+ array being built here.
+
+ @param pBuilder the array builder to add the operation to.
+ @param explain create explain output
+ */
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder,
+ bool explain = false) const;
+
+ protected:
+ /**
+ Base constructor.
+ */
+ DocumentSource(const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Create an object that represents the document source. The object
+ will have a single field whose name is the source's name. This
+ will be used by the default implementation of addToBsonArray()
+ to add this object to a pipeline being represented in BSON.
+
+ @param pBuilder a blank object builder to write to
+ @param explain create explain output
+ */
+ virtual void sourceToBson(BSONObjBuilder *pBuilder,
+ bool explain) const = 0;
+
+ /*
+ Most DocumentSources have an underlying source they get their data
+ from. This is a convenience for them.
+
+ The default implementation of setSource() sets this; if you don't
+ need a source, override that to verify(). The default is to
+ verify() if this has already been set.
+ */
+ DocumentSource *pSource;
+
+ /*
+ The zero-based user-specified pipeline step. Used for diagnostics.
+ Will be set to -1 for artificial pipeline steps that were not part
+ of the original user specification.
+ */
+ int step;
+
+ intrusive_ptr<ExpressionContext> pExpCtx;
+
+ /*
+ for explain: # of rows returned by this source
+
+ This is *not* unsigned so it can be passed to BSONObjBuilder.append().
+ */
+ long long nRowsOut;
+ };
+
+
+ class DocumentSourceBsonArray :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceBsonArray();
+ virtual bool eof();
+ virtual bool advance();
+ virtual intrusive_ptr<Document> getCurrent();
+ virtual void setSource(DocumentSource *pSource);
+
+ /**
+ Create a document source based on a BSON array.
+
+ This is usually put at the beginning of a chain of document sources
+ in order to fetch data from the database.
+
+ CAUTION: the BSON is not read until the source is used. Any
+ elements that appear after these documents must not be read until
+ this source is exhausted.
+
+ @param pBsonElement the BSON array to treat as a document source
+ @param pExpCtx the expression context for the pipeline
+ @returns the newly created document source
+ */
+ static intrusive_ptr<DocumentSourceBsonArray> create(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ private:
+ DocumentSourceBsonArray(BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ BSONObj embeddedObject;
+ BSONObjIterator arrayIterator;
+ BSONElement currentElement;
+ bool haveCurrent;
+ };
+
+
+ class DocumentSourceCommandFutures :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceCommandFutures();
+ virtual bool eof();
+ virtual bool advance();
+ virtual intrusive_ptr<Document> getCurrent();
+ virtual void setSource(DocumentSource *pSource);
+
+ /* convenient shorthand for a commonly used type */
+ typedef list<shared_ptr<Future::CommandResult> > FuturesList;
+
+ /**
+ Create a DocumentSource that wraps a list of Command::Futures.
+
+ @param errmsg place to write error messages to; must exist for the
+ lifetime of the created DocumentSourceCommandFutures
+ @param pList the list of futures
+ @param pExpCtx the expression context for the pipeline
+ @returns the newly created DocumentSource
+ */
+ static intrusive_ptr<DocumentSourceCommandFutures> create(
+ string &errmsg, FuturesList *pList,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ private:
+ DocumentSourceCommandFutures(string &errmsg, FuturesList *pList,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Advance to the next document, setting pCurrent appropriately.
+
+ Adjusts pCurrent, pBsonSource, and iterator, as needed. On exit,
+ pCurrent is the Document to return, or NULL. If NULL, this
+ indicates there is nothing more to return.
+ */
+ void getNextDocument();
+
+ bool newSource; // set to true for the first item of a new source
+ intrusive_ptr<DocumentSourceBsonArray> pBsonSource;
+ intrusive_ptr<Document> pCurrent;
+ FuturesList::iterator iterator;
+ FuturesList::iterator listEnd;
+ string &errmsg;
+ };
+
+
+ class DocumentSourceCursor :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceCursor();
+ virtual bool eof();
+ virtual bool advance();
+ virtual intrusive_ptr<Document> getCurrent();
+ virtual void setSource(DocumentSource *pSource);
+ virtual void manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker);
+
+ /**
+ Create a document source based on a cursor.
+
+ This is usually put at the beginning of a chain of document sources
+ in order to fetch data from the database.
+
+ @param pCursor the cursor to use to fetch data
+ @param pExpCtx the expression context for the pipeline
+ */
+ static intrusive_ptr<DocumentSourceCursor> create(
+ const shared_ptr<Cursor> &pCursor,
+ const string &ns,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /*
+ Record the namespace. Required for explain.
+
+ @param namespace the namespace
+ */
+ void setNamespace(const string &ns);
+
+ /*
+ Record the query that was specified for the cursor this wraps, if
+ any.
+
+ This should be captured after any optimizations are applied to
+ the pipeline so that it reflects what is really used.
+
+ This gets used for explain output.
+
+ @param pBsonObj the query to record
+ */
+ void setQuery(const shared_ptr<BSONObj> &pBsonObj);
+
+ /*
+ Record the sort that was specified for the cursor this wraps, if
+ any.
+
+ This should be captured after any optimizations are applied to
+ the pipeline so that it reflects what is really used.
+
+ This gets used for explain output.
+
+ @param pBsonObj the sort to record
+ */
+ void setSort(const shared_ptr<BSONObj> &pBsonObj);
+
+ /**
+ Release the cursor, but without changing the other data. This
+ is used for the explain version of pipeline execution.
+ */
+ void releaseCursor();
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ private:
+ DocumentSourceCursor(
+ const shared_ptr<Cursor> &pTheCursor, const string &ns,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ void findNext();
+ intrusive_ptr<Document> pCurrent;
+
+ string ns; // namespace
+
+ /*
+ The bsonDependencies must outlive the Cursor wrapped by this
+ source. Therefore, bsonDependencies must appear before pCursor
+ in order cause its destructor to be called *after* pCursor's.
+ */
+ shared_ptr<BSONObj> pQuery;
+ shared_ptr<BSONObj> pSort;
+ vector<shared_ptr<BSONObj> > bsonDependencies;
+ shared_ptr<Cursor> pCursor;
+
+ /*
+ In order to yield, we need a ClientCursor.
+ */
+ ClientCursor::Holder pClientCursor;
+
+ /*
+ Advance the cursor, and yield sometimes.
+
+ If the state of the world changed during the yield such that we
+ are unable to continue execution of the query, this will release the
+ client cursor, and throw an error.
+ */
+ void advanceAndYield();
+
+ /*
+ This document source hangs on to the dependency tracker when it
+ gets it so that it can be used for selective reification of
+ fields in order to avoid fields that are not required through the
+ pipeline.
+ */
+ intrusive_ptr<DependencyTracker> pDependencies;
+
+ /**
+ (5/14/12 - moved this to private because it's not used atm)
+ Add a BSONObj dependency.
+
+ Some Cursor creation functions rely on BSON objects to specify
+ their query predicate or sort. These often take a BSONObj
+ by reference for these, but do not copy it. As a result, the
+ BSONObjs specified must outlive the Cursor. In order to ensure
+ that, use this to preserve a pointer to the BSONObj here.
+
+ From the outside, you must also make sure the BSONObjBuilder
+ creates a lasting copy of the data, otherwise it will go away
+ when the builder goes out of scope. Therefore, the typical usage
+ pattern for this is
+ {
+ BSONObjBuilder builder;
+ // do stuff to the builder
+ shared_ptr<BSONObj> pBsonObj(new BSONObj(builder.obj()));
+ pDocumentSourceCursor->addBsonDependency(pBsonObj);
+ }
+
+ @param pBsonObj pointer to the BSON object to preserve
+ */
+ void addBsonDependency(const shared_ptr<BSONObj> &pBsonObj);
+ };
+
+
+ /*
+ This contains all the basic mechanics for filtering a stream of
+ Documents, except for the actual predicate evaluation itself. This was
+ factored out so we could create DocumentSources that use both Matcher
+ style predicates as well as full Expressions.
+ */
+ class DocumentSourceFilterBase :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceFilterBase();
+ virtual bool eof();
+ virtual bool advance();
+ virtual intrusive_ptr<Document> getCurrent();
+
+ /**
+ Create a BSONObj suitable for Matcher construction.
+
+ This is used after filter analysis has moved as many filters to
+ as early a point as possible in the document processing pipeline.
+ See db/Matcher.h and the associated wiki documentation for the
+ format. This conversion is used to move back to the low-level
+ find() Cursor mechanism.
+
+ @param pBuilder the builder to write to
+ */
+ virtual void toMatcherBson(BSONObjBuilder *pBuilder) const = 0;
+
+ protected:
+ DocumentSourceFilterBase(
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Test the given document against the predicate and report if it
+ should be accepted or not.
+
+ @param pDocument the document to test
+ @returns true if the document matches the filter, false otherwise
+ */
+ virtual bool accept(const intrusive_ptr<Document> &pDocument) const = 0;
+
+ private:
+
+ void findNext();
+
+ bool unstarted;
+ bool hasNext;
+ intrusive_ptr<Document> pCurrent;
+ };
+
+
+ class DocumentSourceFilter :
+ public DocumentSourceFilterBase {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceFilter();
+ virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
+ virtual void optimize();
+ virtual const char *getSourceName() const;
+
+ /**
+ Create a filter.
+
+ @param pBsonElement the raw BSON specification for the filter
+ @param pExpCtx the expression context for the pipeline
+ @returns the filter
+ */
+ static intrusive_ptr<DocumentSource> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Create a filter.
+
+ @param pFilter the expression to use to filter
+ @param pExpCtx the expression context for the pipeline
+ @returns the filter
+ */
+ static intrusive_ptr<DocumentSourceFilter> create(
+ const intrusive_ptr<Expression> &pFilter,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Create a BSONObj suitable for Matcher construction.
+
+ This is used after filter analysis has moved as many filters to
+ as early a point as possible in the document processing pipeline.
+ See db/Matcher.h and the associated wiki documentation for the
+ format. This conversion is used to move back to the low-level
+ find() Cursor mechanism.
+
+ @param pBuilder the builder to write to
+ */
+ void toMatcherBson(BSONObjBuilder *pBuilder) const;
+
+ static const char filterName[];
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ // virtuals from DocumentSourceFilterBase
+ virtual bool accept(const intrusive_ptr<Document> &pDocument) const;
+
+ private:
+ DocumentSourceFilter(const intrusive_ptr<Expression> &pFilter,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ intrusive_ptr<Expression> pFilter;
+ };
+
+
+ class DocumentSourceGroup :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceGroup();
+ virtual bool eof();
+ virtual bool advance();
+ virtual const char *getSourceName() const;
+ virtual intrusive_ptr<Document> getCurrent();
+
+ /**
+ Create a new grouping DocumentSource.
+
+ @param pExpCtx the expression context for the pipeline
+ @returns the DocumentSource
+ */
+ static intrusive_ptr<DocumentSourceGroup> create(
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Set the Id Expression.
+
+ Documents that pass through the grouping Document are grouped
+ according to this key. This will generate the id_ field in the
+ result documents.
+
+ @param pExpression the group key
+ */
+ void setIdExpression(const intrusive_ptr<Expression> &pExpression);
+
+ /**
+ Add an accumulator.
+
+ Accumulators become fields in the Documents that result from
+ grouping. Each unique group document must have it's own
+ accumulator; the accumulator factory is used to create that.
+
+ @param fieldName the name the accumulator result will have in the
+ result documents
+ @param pAccumulatorFactory used to create the accumulator for the
+ group field
+ */
+ void addAccumulator(string fieldName,
+ intrusive_ptr<Accumulator> (*pAccumulatorFactory)(
+ const intrusive_ptr<ExpressionContext> &),
+ const intrusive_ptr<Expression> &pExpression);
+
+ /**
+ Create a grouping DocumentSource from BSON.
+
+ This is a convenience method that uses the above, and operates on
+ a BSONElement that has been deteremined to be an Object with an
+ element named $group.
+
+ @param pBsonElement the BSONELement that defines the group
+ @param pExpCtx the expression context
+ @returns the grouping DocumentSource
+ */
+ static intrusive_ptr<DocumentSource> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+
+ /**
+ Create a unifying group that can be used to combine group results
+ from shards.
+
+ @returns the grouping DocumentSource
+ */
+ intrusive_ptr<DocumentSource> createMerger();
+
+ static const char groupName[];
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ private:
+ DocumentSourceGroup(const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /*
+ Before returning anything, this source must fetch everything from
+ the underlying source and group it. populate() is used to do that
+ on the first call to any method on this source. The populated
+ boolean indicates that this has been done.
+ */
+ void populate();
+ bool populated;
+
+ intrusive_ptr<Expression> pIdExpression;
+
+ typedef boost::unordered_map<intrusive_ptr<const Value>,
+ vector<intrusive_ptr<Accumulator> >, Value::Hash> GroupsType;
+ GroupsType groups;
+
+ /*
+ The field names for the result documents and the accumulator
+ factories for the result documents. The Expressions are the
+ common expressions used by each instance of each accumulator
+ in order to find the right-hand side of what gets added to the
+ accumulator. Note that each of those is the same for each group,
+ so we can share them across all groups by adding them to the
+ accumulators after we use the factories to make a new set of
+ accumulators for each new group.
+
+ These three vectors parallel each other.
+ */
+ vector<string> vFieldName;
+ vector<intrusive_ptr<Accumulator> (*)(
+ const intrusive_ptr<ExpressionContext> &)> vpAccumulatorFactory;
+ vector<intrusive_ptr<Expression> > vpExpression;
+
+
+ intrusive_ptr<Document> makeDocument(
+ const GroupsType::iterator &rIter);
+
+ GroupsType::iterator groupsIterator;
+ intrusive_ptr<Document> pCurrent;
+ };
+
+
+ class DocumentSourceMatch :
+ public DocumentSourceFilterBase {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceMatch();
+ virtual const char *getSourceName() const;
+ virtual void manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker);
+
+ /**
+ Create a filter.
+
+ @param pBsonElement the raw BSON specification for the filter
+ @returns the filter
+ */
+ static intrusive_ptr<DocumentSource> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx);
+
+ /**
+ Create a BSONObj suitable for Matcher construction.
+
+ This is used after filter analysis has moved as many filters to
+ as early a point as possible in the document processing pipeline.
+ See db/Matcher.h and the associated wiki documentation for the
+ format. This conversion is used to move back to the low-level
+ find() Cursor mechanism.
+
+ @param pBuilder the builder to write to
+ */
+ void toMatcherBson(BSONObjBuilder *pBuilder) const;
+
+ static const char matchName[];
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ // virtuals from DocumentSourceFilterBase
+ virtual bool accept(const intrusive_ptr<Document> &pDocument) const;
+
+ private:
+ DocumentSourceMatch(const BSONObj &query,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ Matcher matcher;
+ };
+
+
+ class DocumentSourceOut :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceOut();
+ virtual bool eof();
+ virtual bool advance();
+ virtual const char *getSourceName() const;
+ virtual intrusive_ptr<Document> getCurrent();
+
+ /**
+ Create a document source for output and pass-through.
+
+ This can be put anywhere in a pipeline and will store content as
+ well as pass it on.
+
+ @param pBsonElement the raw BSON specification for the source
+ @param pExpCtx the expression context for the pipeline
+ @returns the newly created document source
+ */
+ static intrusive_ptr<DocumentSourceOut> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ static const char outName[];
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ private:
+ DocumentSourceOut(BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+ };
+
+
+ class DocumentSourceProject :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceProject();
+ virtual bool eof();
+ virtual bool advance();
+ virtual const char *getSourceName() const;
+ virtual intrusive_ptr<Document> getCurrent();
+ virtual void optimize();
+ virtual void manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker);
+
+ /**
+ Create a new DocumentSource that can implement projection.
+
+ @param pExpCtx the expression context for the pipeline
+ @returns the projection DocumentSource
+ */
+ static intrusive_ptr<DocumentSourceProject> create(
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Include a field path in a projection.
+
+ @param fieldPath the path of the field to include
+ */
+ void includePath(const string &fieldPath);
+
+ /**
+ Exclude a field path from the projection.
+
+ @param fieldPath the path of the field to exclude
+ */
+ void excludePath(const string &fieldPath);
+
+ /**
+ Add an output Expression in the projection.
+
+ BSON document fields are ordered, so the new field will be
+ appended to the existing set.
+
+ @param fieldName the name of the field as it will appear
+ @param pExpression the expression used to compute the field
+ */
+ void addField(const string &fieldName,
+ const intrusive_ptr<Expression> &pExpression);
+
+ /**
+ Create a new projection DocumentSource from BSON.
+
+ This is a convenience for directly handling BSON, and relies on the
+ above methods.
+
+ @param pBsonElement the BSONElement with an object named $project
+ @param pExpCtx the expression context for the pipeline
+ @returns the created projection
+ */
+ static intrusive_ptr<DocumentSource> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ static const char projectName[];
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ private:
+ DocumentSourceProject(const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ // configuration state
+ bool excludeId;
+ intrusive_ptr<ExpressionObject> pEO;
+
+ /*
+ Utility object used by manageDependencies().
+
+ Removes dependencies from a DependencyTracker.
+ */
+ class DependencyRemover :
+ public ExpressionObject::PathSink {
+ public:
+ // virtuals from PathSink
+ virtual void path(const string &path, bool include);
+
+ /*
+ Constructor.
+
+ Captures a reference to the smart pointer to the DependencyTracker
+ that this will remove dependencies from via
+ ExpressionObject::emitPaths().
+
+ @param pTracker reference to the smart pointer to the
+ DependencyTracker
+ */
+ DependencyRemover(const intrusive_ptr<DependencyTracker> &pTracker);
+
+ private:
+ const intrusive_ptr<DependencyTracker> &pTracker;
+ };
+
+ /*
+ Utility object used by manageDependencies().
+
+ Checks dependencies to see if they are present. If not, then
+ throws a user error.
+ */
+ class DependencyChecker :
+ public ExpressionObject::PathSink {
+ public:
+ // virtuals from PathSink
+ virtual void path(const string &path, bool include);
+
+ /*
+ Constructor.
+
+ Captures a reference to the smart pointer to the DependencyTracker
+ that this will check dependencies from from
+ ExpressionObject::emitPaths() to see if they are required.
+
+ @param pTracker reference to the smart pointer to the
+ DependencyTracker
+ @param pThis the projection that is making this request
+ */
+ DependencyChecker(
+ const intrusive_ptr<DependencyTracker> &pTracker,
+ const DocumentSourceProject *pThis);
+
+ private:
+ const intrusive_ptr<DependencyTracker> &pTracker;
+ const DocumentSourceProject *pThis;
+ };
+ };
+
+
+ class DocumentSourceSort :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceSort();
+ virtual bool eof();
+ virtual bool advance();
+ virtual const char *getSourceName() const;
+ virtual intrusive_ptr<Document> getCurrent();
+ virtual void manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker);
+ /*
+ TODO
+ Adjacent sorts should reduce to the last sort.
+ virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
+ */
+
+ /**
+ Create a new sorting DocumentSource.
+
+ @param pExpCtx the expression context for the pipeline
+ @returns the DocumentSource
+ */
+ static intrusive_ptr<DocumentSourceSort> create(
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Add sort key field.
+
+ Adds a sort key field to the key being built up. A concatenated
+ key is built up by calling this repeatedly.
+
+ @param fieldPath the field path to the key component
+ @param ascending if true, use the key for an ascending sort,
+ otherwise, use it for descending
+ */
+ void addKey(const string &fieldPath, bool ascending);
+
+ /**
+ Write out an object whose contents are the sort key.
+
+ @param pBuilder initialized object builder.
+ @param fieldPrefix specify whether or not to include the field prefix
+ */
+ void sortKeyToBson(BSONObjBuilder *pBuilder, bool usePrefix) const;
+
+ /**
+ Create a sorting DocumentSource from BSON.
+
+ This is a convenience method that uses the above, and operates on
+ a BSONElement that has been deteremined to be an Object with an
+ element named $group.
+
+ @param pBsonElement the BSONELement that defines the group
+ @param pExpCtx the expression context for the pipeline
+ @returns the grouping DocumentSource
+ */
+ static intrusive_ptr<DocumentSource> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+
+ static const char sortName[];
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ private:
+ DocumentSourceSort(const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /*
+ Before returning anything, this source must fetch everything from
+ the underlying source and group it. populate() is used to do that
+ on the first call to any method on this source. The populated
+ boolean indicates that this has been done.
+ */
+ void populate();
+ bool populated;
+ long long count;
+
+ /* these two parallel each other */
+ typedef vector<intrusive_ptr<ExpressionFieldPath> > SortPaths;
+ SortPaths vSortKey;
+ vector<bool> vAscending;
+
+ /*
+ Compare two documents according to the specified sort key.
+
+ @param rL reference to the left document
+ @param rR reference to the right document
+ @returns a number less than, equal to, or greater than zero,
+ indicating pL < pR, pL == pR, or pL > pR, respectively
+ */
+ int compare(const intrusive_ptr<Document> &pL,
+ const intrusive_ptr<Document> &pR);
+
+ /*
+ This is a utility class just for the STL sort that is done
+ inside.
+ */
+ class Comparator {
+ public:
+ bool operator()(
+ const intrusive_ptr<Document> &pL,
+ const intrusive_ptr<Document> &pR) {
+ return (pSort->compare(pL, pR) < 0);
+ }
+
+ inline Comparator(DocumentSourceSort *pS):
+ pSort(pS) {
+ }
+
+ private:
+ DocumentSourceSort *pSort;
+ };
+
+ typedef vector<intrusive_ptr<Document> > VectorType;
+ VectorType documents;
+
+ VectorType::iterator docIterator;
+ intrusive_ptr<Document> pCurrent;
+ };
+
+
+ class DocumentSourceLimit :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceLimit();
+ virtual bool eof();
+ virtual bool advance();
+ virtual intrusive_ptr<Document> getCurrent();
+ virtual const char *getSourceName() const;
+ virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
+
+ /**
+ Create a new limiting DocumentSource.
+
+ @param pExpCtx the expression context for the pipeline
+ @returns the DocumentSource
+ */
+ static intrusive_ptr<DocumentSourceLimit> create(
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Create a limiting DocumentSource from BSON.
+
+ This is a convenience method that uses the above, and operates on
+ a BSONElement that has been deteremined to be an Object with an
+ element named $limit.
+
+ @param pBsonElement the BSONELement that defines the limit
+ @param pExpCtx the expression context
+ @returns the grouping DocumentSource
+ */
+ static intrusive_ptr<DocumentSource> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+
+ static const char limitName[];
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ private:
+ DocumentSourceLimit(
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ long long limit;
+ long long count;
+ intrusive_ptr<Document> pCurrent;
+ };
+
+ class DocumentSourceSkip :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceSkip();
+ virtual bool eof();
+ virtual bool advance();
+ virtual intrusive_ptr<Document> getCurrent();
+ virtual const char *getSourceName() const;
+ virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
+
+ /**
+ Create a new skipping DocumentSource.
+
+ @param pExpCtx the expression context
+ @returns the DocumentSource
+ */
+ static intrusive_ptr<DocumentSourceSkip> create(
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Create a skipping DocumentSource from BSON.
+
+ This is a convenience method that uses the above, and operates on
+ a BSONElement that has been deteremined to be an Object with an
+ element named $skip.
+
+ @param pBsonElement the BSONELement that defines the skip
+ @param pExpCtx the expression context
+ @returns the grouping DocumentSource
+ */
+ static intrusive_ptr<DocumentSource> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+
+ static const char skipName[];
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ private:
+ DocumentSourceSkip(const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /*
+ Skips initial documents.
+ */
+ void skipper();
+
+ long long skip;
+ long long count;
+ intrusive_ptr<Document> pCurrent;
+ };
+
+
+ class DocumentSourceUnwind :
+ public DocumentSource {
+ public:
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceUnwind();
+ virtual bool eof();
+ virtual bool advance();
+ virtual const char *getSourceName() const;
+ virtual intrusive_ptr<Document> getCurrent();
+ virtual void manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker);
+
+ /**
+ Create a new DocumentSource that can implement unwind.
+
+ @param pExpCtx the expression context for the pipeline
+ @returns the projection DocumentSource
+ */
+ static intrusive_ptr<DocumentSourceUnwind> create(
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ /**
+ Specify the field to unwind. There must be exactly one before
+ the pipeline begins execution.
+
+ @param rFieldPath - path to the field to unwind
+ */
+ void unwindField(const FieldPath &rFieldPath);
+
+ /**
+ Create a new projection DocumentSource from BSON.
+
+ This is a convenience for directly handling BSON, and relies on the
+ above methods.
+
+ @param pBsonElement the BSONElement with an object named $project
+ @param pExpCtx the expression context for the pipeline
+ @returns the created projection
+ */
+ static intrusive_ptr<DocumentSource> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ static const char unwindName[];
+
+ protected:
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const;
+
+ private:
+ DocumentSourceUnwind(const intrusive_ptr<ExpressionContext> &pExpCtx);
+
+ // configuration state
+ FieldPath unwindPath;
+
+ vector<int> fieldIndex; /* for the current document, the indices
+ leading down to the field being unwound */
+
+ // iteration state
+ intrusive_ptr<Document> pNoUnwindDocument;
+ // document to return, pre-unwind
+ intrusive_ptr<const Value> pUnwindArray; // field being unwound
+ intrusive_ptr<ValueIterator> pUnwinder; // iterator used for unwinding
+ intrusive_ptr<const Value> pUnwindValue; // current value
+
+ /*
+ Clear all the state related to unwinding an array.
+ */
+ void resetArray();
+
+ /*
+ Clone the current document being unwound.
+
+ This is a partial deep clone. Because we're going to replace the
+ value at the end, we have to replace everything along the path
+ leading to that in order to not share that change with any other
+ clones (or the original) that we've made.
+
+ This expects pUnwindValue to have been set by a prior call to
+ advance(). However, pUnwindValue may also be NULL, in which case
+ the field will be removed -- this is the action for an empty
+ array.
+
+ @returns a partial deep clone of pNoUnwindDocument
+ */
+ intrusive_ptr<Document> clonePath() const;
+ };
+
+}
+
+
+/* ======================= INLINED IMPLEMENTATIONS ========================== */
+
+namespace mongo {
+
+ inline void DocumentSource::setPipelineStep(int s) {
+ step = s;
+ }
+
+ inline int DocumentSource::getPipelineStep() const {
+ return step;
+ }
+
+ inline void DocumentSourceGroup::setIdExpression(
+ const intrusive_ptr<Expression> &pExpression) {
+ pIdExpression = pExpression;
+ }
+
+ inline DocumentSourceProject::DependencyRemover::DependencyRemover(
+ const intrusive_ptr<DependencyTracker> &pT):
+ pTracker(pT) {
+ }
+
+ inline DocumentSourceProject::DependencyChecker::DependencyChecker(
+ const intrusive_ptr<DependencyTracker> &pTrack,
+ const DocumentSourceProject *pT):
+ pTracker(pTrack),
+ pThis(pT) {
+ }
+
+ inline void DocumentSourceUnwind::resetArray() {
+ pNoUnwindDocument.reset();
+ pUnwindArray.reset();
+ pUnwinder.reset();
+ pUnwindValue.reset();
+ }
+
+}
diff --git a/src/mongo/db/pipeline/document_source_bson_array.cpp b/src/mongo/db/pipeline/document_source_bson_array.cpp
index eee951dc1f9..79bce29220a 100755
--- a/src/mongo/db/pipeline/document_source_bson_array.cpp
+++ b/src/mongo/db/pipeline/document_source_bson_array.cpp
@@ -1,93 +1,93 @@
-/**
- * Copyright 2011 (c) 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-
-#include "db/pipeline/document_source.h"
-#include "db/pipeline/document.h"
-
-
-namespace mongo {
-
- DocumentSourceBsonArray::~DocumentSourceBsonArray() {
- }
-
- bool DocumentSourceBsonArray::eof() {
- return !haveCurrent;
- }
-
- bool DocumentSourceBsonArray::advance() {
- DocumentSource::advance(); // check for interrupts
-
- if (eof())
- return false;
-
- if (!arrayIterator.more()) {
- haveCurrent = false;
- return false;
- }
-
- currentElement = arrayIterator.next();
- return true;
- }
-
- intrusive_ptr<Document> DocumentSourceBsonArray::getCurrent() {
- verify(haveCurrent);
- BSONObj documentObj(currentElement.Obj());
- intrusive_ptr<Document> pDocument(
- Document::createFromBsonObj(&documentObj));
- return pDocument;
- }
-
- void DocumentSourceBsonArray::setSource(DocumentSource *pSource) {
- /* this doesn't take a source */
- verify(false);
- }
-
- DocumentSourceBsonArray::DocumentSourceBsonArray(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx):
- DocumentSource(pExpCtx),
- embeddedObject(pBsonElement->embeddedObject()),
- arrayIterator(embeddedObject),
- haveCurrent(false) {
- if (arrayIterator.more()) {
- currentElement = arrayIterator.next();
- haveCurrent = true;
- }
- }
-
- intrusive_ptr<DocumentSourceBsonArray> DocumentSourceBsonArray::create(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
-
- verify(pBsonElement->type() == Array);
- intrusive_ptr<DocumentSourceBsonArray> pSource(
- new DocumentSourceBsonArray(pBsonElement, pExpCtx));
-
- return pSource;
- }
-
- void DocumentSourceBsonArray::sourceToBson(
- BSONObjBuilder *pBuilder, bool explain) const {
-
- if (explain) {
- BSONObj empty;
-
- pBuilder->append("bsonArray", empty);
- }
- }
-}
+/**
+ * Copyright 2011 (c) 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+
+#include "db/pipeline/document_source.h"
+#include "db/pipeline/document.h"
+
+
+namespace mongo {
+
+ DocumentSourceBsonArray::~DocumentSourceBsonArray() {
+ }
+
+ bool DocumentSourceBsonArray::eof() {
+ return !haveCurrent;
+ }
+
+ bool DocumentSourceBsonArray::advance() {
+ DocumentSource::advance(); // check for interrupts
+
+ if (eof())
+ return false;
+
+ if (!arrayIterator.more()) {
+ haveCurrent = false;
+ return false;
+ }
+
+ currentElement = arrayIterator.next();
+ return true;
+ }
+
+ intrusive_ptr<Document> DocumentSourceBsonArray::getCurrent() {
+ verify(haveCurrent);
+ BSONObj documentObj(currentElement.Obj());
+ intrusive_ptr<Document> pDocument(
+ Document::createFromBsonObj(&documentObj));
+ return pDocument;
+ }
+
+ void DocumentSourceBsonArray::setSource(DocumentSource *pSource) {
+ /* this doesn't take a source */
+ verify(false);
+ }
+
+ DocumentSourceBsonArray::DocumentSourceBsonArray(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx):
+ DocumentSource(pExpCtx),
+ embeddedObject(pBsonElement->embeddedObject()),
+ arrayIterator(embeddedObject),
+ haveCurrent(false) {
+ if (arrayIterator.more()) {
+ currentElement = arrayIterator.next();
+ haveCurrent = true;
+ }
+ }
+
+ intrusive_ptr<DocumentSourceBsonArray> DocumentSourceBsonArray::create(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+
+ verify(pBsonElement->type() == Array);
+ intrusive_ptr<DocumentSourceBsonArray> pSource(
+ new DocumentSourceBsonArray(pBsonElement, pExpCtx));
+
+ return pSource;
+ }
+
+ void DocumentSourceBsonArray::sourceToBson(
+ BSONObjBuilder *pBuilder, bool explain) const {
+
+ if (explain) {
+ BSONObj empty;
+
+ pBuilder->append("bsonArray", empty);
+ }
+ }
+}
diff --git a/src/mongo/db/pipeline/document_source_command_futures.cpp b/src/mongo/db/pipeline/document_source_command_futures.cpp
index 283be179de5..d4b92356108 100755
--- a/src/mongo/db/pipeline/document_source_command_futures.cpp
+++ b/src/mongo/db/pipeline/document_source_command_futures.cpp
@@ -1,137 +1,137 @@
-/**
- * Copyright 2011 (c) 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-
-#include "db/pipeline/document_source.h"
-
-namespace mongo {
-
- DocumentSourceCommandFutures::~DocumentSourceCommandFutures() {
- }
-
- bool DocumentSourceCommandFutures::eof() {
- /* if we haven't even started yet, do so */
- if (!pCurrent.get())
- getNextDocument();
-
- return (pCurrent.get() == NULL);
- }
-
- bool DocumentSourceCommandFutures::advance() {
- DocumentSource::advance(); // check for interrupts
-
- if (eof())
- return false;
-
- /* advance */
- getNextDocument();
-
- return (pCurrent.get() != NULL);
- }
-
- intrusive_ptr<Document> DocumentSourceCommandFutures::getCurrent() {
- verify(!eof());
- return pCurrent;
- }
-
- void DocumentSourceCommandFutures::setSource(DocumentSource *pSource) {
- /* this doesn't take a source */
- verify(false);
- }
-
- void DocumentSourceCommandFutures::sourceToBson(
- BSONObjBuilder *pBuilder, bool explain) const {
- /* this has no BSON equivalent */
- verify(false);
- }
-
- DocumentSourceCommandFutures::DocumentSourceCommandFutures(
- string &theErrmsg, FuturesList *pList,
- const intrusive_ptr<ExpressionContext> &pExpCtx):
- DocumentSource(pExpCtx),
- newSource(false),
- pBsonSource(),
- pCurrent(),
- iterator(pList->begin()),
- listEnd(pList->end()),
- errmsg(theErrmsg) {
- }
-
- intrusive_ptr<DocumentSourceCommandFutures>
- DocumentSourceCommandFutures::create(
- string &errmsg, FuturesList *pList,
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- intrusive_ptr<DocumentSourceCommandFutures> pSource(
- new DocumentSourceCommandFutures(errmsg, pList, pExpCtx));
- return pSource;
- }
-
- void DocumentSourceCommandFutures::getNextDocument() {
- while(true) {
- if (!pBsonSource.get()) {
- /* if there aren't any more futures, we're done */
- if (iterator == listEnd) {
- pCurrent.reset();
- return;
- }
-
- /* grab the next command result */
- shared_ptr<Future::CommandResult> pResult(*iterator);
- ++iterator;
-
- /* try to wait for it */
- if (!pResult->join()) {
- error() << "sharded pipeline failed on shard: " <<
- pResult->getServer() << " error: " <<
- pResult->result() << endl;
- errmsg += "-- mongod pipeline failed: ";
- errmsg += pResult->result().toString();
-
- /* move on to the next command future */
- continue;
- }
-
- /* grab the result array out of the shard server's response */
- BSONObj shardResult(pResult->result());
- BSONObjIterator objIterator(shardResult);
- while(objIterator.more()) {
- BSONElement element(objIterator.next());
- const char *pFieldName = element.fieldName();
-
- /* find the result array and quit this loop */
- if (strcmp(pFieldName, "result") == 0) {
- pBsonSource = DocumentSourceBsonArray::create(
- &element, pExpCtx);
- newSource = true;
- break;
- }
- }
- }
-
- /* if we're done with this shard's results, try the next */
- if (pBsonSource->eof() ||
- (!newSource && !pBsonSource->advance())) {
- pBsonSource.reset();
- continue;
- }
-
- pCurrent = pBsonSource->getCurrent();
- newSource = false;
- return;
- }
- }
-}
+/**
+ * Copyright 2011 (c) 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+
+#include "db/pipeline/document_source.h"
+
+namespace mongo {
+
+ DocumentSourceCommandFutures::~DocumentSourceCommandFutures() {
+ }
+
+ bool DocumentSourceCommandFutures::eof() {
+ /* if we haven't even started yet, do so */
+ if (!pCurrent.get())
+ getNextDocument();
+
+ return (pCurrent.get() == NULL);
+ }
+
+ bool DocumentSourceCommandFutures::advance() {
+ DocumentSource::advance(); // check for interrupts
+
+ if (eof())
+ return false;
+
+ /* advance */
+ getNextDocument();
+
+ return (pCurrent.get() != NULL);
+ }
+
+ intrusive_ptr<Document> DocumentSourceCommandFutures::getCurrent() {
+ verify(!eof());
+ return pCurrent;
+ }
+
+ void DocumentSourceCommandFutures::setSource(DocumentSource *pSource) {
+ /* this doesn't take a source */
+ verify(false);
+ }
+
+ void DocumentSourceCommandFutures::sourceToBson(
+ BSONObjBuilder *pBuilder, bool explain) const {
+ /* this has no BSON equivalent */
+ verify(false);
+ }
+
+ DocumentSourceCommandFutures::DocumentSourceCommandFutures(
+ string &theErrmsg, FuturesList *pList,
+ const intrusive_ptr<ExpressionContext> &pExpCtx):
+ DocumentSource(pExpCtx),
+ newSource(false),
+ pBsonSource(),
+ pCurrent(),
+ iterator(pList->begin()),
+ listEnd(pList->end()),
+ errmsg(theErrmsg) {
+ }
+
+ intrusive_ptr<DocumentSourceCommandFutures>
+ DocumentSourceCommandFutures::create(
+ string &errmsg, FuturesList *pList,
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ intrusive_ptr<DocumentSourceCommandFutures> pSource(
+ new DocumentSourceCommandFutures(errmsg, pList, pExpCtx));
+ return pSource;
+ }
+
+ void DocumentSourceCommandFutures::getNextDocument() {
+ while(true) {
+ if (!pBsonSource.get()) {
+ /* if there aren't any more futures, we're done */
+ if (iterator == listEnd) {
+ pCurrent.reset();
+ return;
+ }
+
+ /* grab the next command result */
+ shared_ptr<Future::CommandResult> pResult(*iterator);
+ ++iterator;
+
+ /* try to wait for it */
+ if (!pResult->join()) {
+ error() << "sharded pipeline failed on shard: " <<
+ pResult->getServer() << " error: " <<
+ pResult->result() << endl;
+ errmsg += "-- mongod pipeline failed: ";
+ errmsg += pResult->result().toString();
+
+ /* move on to the next command future */
+ continue;
+ }
+
+ /* grab the result array out of the shard server's response */
+ BSONObj shardResult(pResult->result());
+ BSONObjIterator objIterator(shardResult);
+ while(objIterator.more()) {
+ BSONElement element(objIterator.next());
+ const char *pFieldName = element.fieldName();
+
+ /* find the result array and quit this loop */
+ if (strcmp(pFieldName, "result") == 0) {
+ pBsonSource = DocumentSourceBsonArray::create(
+ &element, pExpCtx);
+ newSource = true;
+ break;
+ }
+ }
+ }
+
+ /* if we're done with this shard's results, try the next */
+ if (pBsonSource->eof() ||
+ (!newSource && !pBsonSource->advance())) {
+ pBsonSource.reset();
+ continue;
+ }
+
+ pCurrent = pBsonSource->getCurrent();
+ newSource = false;
+ return;
+ }
+ }
+}
diff --git a/src/mongo/db/pipeline/document_source_filter.cpp b/src/mongo/db/pipeline/document_source_filter.cpp
index 156772fc335..b9cb0a369d1 100755
--- a/src/mongo/db/pipeline/document_source_filter.cpp
+++ b/src/mongo/db/pipeline/document_source_filter.cpp
@@ -1,105 +1,105 @@
-/**
-* Copyright (C) 2011 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "pch.h"
-
-#include "db/pipeline/document_source.h"
-
-#include "db/jsobj.h"
-#include "db/pipeline/expression.h"
-#include "db/pipeline/value.h"
-
-namespace mongo {
-
- const char DocumentSourceFilter::filterName[] = "$filter";
-
- DocumentSourceFilter::~DocumentSourceFilter() {
- }
-
- const char *DocumentSourceFilter::getSourceName() const {
- return filterName;
- }
-
- bool DocumentSourceFilter::coalesce(
- const intrusive_ptr<DocumentSource> &pNextSource) {
-
- /* we only know how to coalesce other filters */
- DocumentSourceFilter *pDocFilter =
- dynamic_cast<DocumentSourceFilter *>(pNextSource.get());
- if (!pDocFilter)
- return false;
-
- /*
- Two adjacent filters can be combined by creating a conjunction of
- their predicates.
- */
- intrusive_ptr<ExpressionNary> pAnd(ExpressionAnd::create());
- pAnd->addOperand(pFilter);
- pAnd->addOperand(pDocFilter->pFilter);
- pFilter = pAnd;
-
- return true;
- }
-
- void DocumentSourceFilter::optimize() {
- pFilter = pFilter->optimize();
- }
-
- void DocumentSourceFilter::sourceToBson(
- BSONObjBuilder *pBuilder, bool explain) const {
- pFilter->addToBsonObj(pBuilder, filterName, false);
- }
-
- bool DocumentSourceFilter::accept(
- const intrusive_ptr<Document> &pDocument) const {
- intrusive_ptr<const Value> pValue(pFilter->evaluate(pDocument));
- return pValue->coerceToBool();
- }
-
- intrusive_ptr<DocumentSource> DocumentSourceFilter::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx) {
- uassert(15946, "a document filter expression must be an object",
- pBsonElement->type() == Object);
-
- Expression::ObjectCtx oCtx(0);
- intrusive_ptr<Expression> pExpression(
- Expression::parseObject(pBsonElement, &oCtx));
- intrusive_ptr<DocumentSourceFilter> pFilter(
- DocumentSourceFilter::create(pExpression, pCtx));
-
- return pFilter;
- }
-
- intrusive_ptr<DocumentSourceFilter> DocumentSourceFilter::create(
- const intrusive_ptr<Expression> &pFilter,
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- intrusive_ptr<DocumentSourceFilter> pSource(
- new DocumentSourceFilter(pFilter, pExpCtx));
- return pSource;
- }
-
- DocumentSourceFilter::DocumentSourceFilter(
- const intrusive_ptr<Expression> &pTheFilter,
- const intrusive_ptr<ExpressionContext> &pExpCtx):
- DocumentSourceFilterBase(pExpCtx),
- pFilter(pTheFilter) {
- }
-
- void DocumentSourceFilter::toMatcherBson(BSONObjBuilder *pBuilder) const {
- pFilter->toMatcherBson(pBuilder);
- }
-}
+/**
+* Copyright (C) 2011 10gen Inc.
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU Affero General Public License, version 3,
+* as published by the Free Software Foundation.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU Affero General Public License for more details.
+*
+* You should have received a copy of the GNU Affero General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#include "pch.h"
+
+#include "db/pipeline/document_source.h"
+
+#include "db/jsobj.h"
+#include "db/pipeline/expression.h"
+#include "db/pipeline/value.h"
+
+namespace mongo {
+
+ const char DocumentSourceFilter::filterName[] = "$filter";
+
+ DocumentSourceFilter::~DocumentSourceFilter() {
+ }
+
+ const char *DocumentSourceFilter::getSourceName() const {
+ return filterName;
+ }
+
+ bool DocumentSourceFilter::coalesce(
+ const intrusive_ptr<DocumentSource> &pNextSource) {
+
+ /* we only know how to coalesce other filters */
+ DocumentSourceFilter *pDocFilter =
+ dynamic_cast<DocumentSourceFilter *>(pNextSource.get());
+ if (!pDocFilter)
+ return false;
+
+ /*
+ Two adjacent filters can be combined by creating a conjunction of
+ their predicates.
+ */
+ intrusive_ptr<ExpressionNary> pAnd(ExpressionAnd::create());
+ pAnd->addOperand(pFilter);
+ pAnd->addOperand(pDocFilter->pFilter);
+ pFilter = pAnd;
+
+ return true;
+ }
+
+ void DocumentSourceFilter::optimize() {
+ pFilter = pFilter->optimize();
+ }
+
+ void DocumentSourceFilter::sourceToBson(
+ BSONObjBuilder *pBuilder, bool explain) const {
+ pFilter->addToBsonObj(pBuilder, filterName, false);
+ }
+
+ bool DocumentSourceFilter::accept(
+ const intrusive_ptr<Document> &pDocument) const {
+ intrusive_ptr<const Value> pValue(pFilter->evaluate(pDocument));
+ return pValue->coerceToBool();
+ }
+
+ intrusive_ptr<DocumentSource> DocumentSourceFilter::createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ uassert(15946, "a document filter expression must be an object",
+ pBsonElement->type() == Object);
+
+ Expression::ObjectCtx oCtx(0);
+ intrusive_ptr<Expression> pExpression(
+ Expression::parseObject(pBsonElement, &oCtx));
+ intrusive_ptr<DocumentSourceFilter> pFilter(
+ DocumentSourceFilter::create(pExpression, pCtx));
+
+ return pFilter;
+ }
+
+ intrusive_ptr<DocumentSourceFilter> DocumentSourceFilter::create(
+ const intrusive_ptr<Expression> &pFilter,
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ intrusive_ptr<DocumentSourceFilter> pSource(
+ new DocumentSourceFilter(pFilter, pExpCtx));
+ return pSource;
+ }
+
+ DocumentSourceFilter::DocumentSourceFilter(
+ const intrusive_ptr<Expression> &pTheFilter,
+ const intrusive_ptr<ExpressionContext> &pExpCtx):
+ DocumentSourceFilterBase(pExpCtx),
+ pFilter(pTheFilter) {
+ }
+
+ void DocumentSourceFilter::toMatcherBson(BSONObjBuilder *pBuilder) const {
+ pFilter->toMatcherBson(pBuilder);
+ }
+}
diff --git a/src/mongo/db/pipeline/document_source_filter_base.cpp b/src/mongo/db/pipeline/document_source_filter_base.cpp
index c04ff7a9f29..3354b3c6bc2 100755
--- a/src/mongo/db/pipeline/document_source_filter_base.cpp
+++ b/src/mongo/db/pipeline/document_source_filter_base.cpp
@@ -1,89 +1,89 @@
-/**
-* Copyright (C) 2011 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "pch.h"
-
-#include "db/pipeline/document_source.h"
-
-#include "db/jsobj.h"
-#include "db/pipeline/expression.h"
-#include "db/pipeline/value.h"
-
-namespace mongo {
-
- DocumentSourceFilterBase::~DocumentSourceFilterBase() {
- }
-
- void DocumentSourceFilterBase::findNext() {
- /* only do this the first time */
- if (unstarted) {
- hasNext = !pSource->eof();
- unstarted = false;
- }
-
- while(hasNext) {
- boost::intrusive_ptr<Document> pDocument(pSource->getCurrent());
- hasNext = pSource->advance();
-
- if (accept(pDocument)) {
- pCurrent = pDocument;
- return;
- }
- }
-
- pCurrent.reset();
- }
-
- bool DocumentSourceFilterBase::eof() {
- if (unstarted)
- findNext();
-
- return (pCurrent.get() == NULL);
- }
-
- bool DocumentSourceFilterBase::advance() {
- DocumentSource::advance(); // check for interrupts
-
- if (unstarted)
- findNext();
-
- /*
- This looks weird after the above, but is correct. Note that calling
- getCurrent() when first starting already yields the first document
- in the collection. Calling advance() without using getCurrent()
- first will skip over the first item.
- */
- findNext();
-
- return (pCurrent.get() != NULL);
- }
-
- boost::intrusive_ptr<Document> DocumentSourceFilterBase::getCurrent() {
- if (unstarted)
- findNext();
-
- verify(pCurrent.get() != NULL);
- return pCurrent;
- }
-
- DocumentSourceFilterBase::DocumentSourceFilterBase(
- const intrusive_ptr<ExpressionContext> &pExpCtx):
- DocumentSource(pExpCtx),
- unstarted(true),
- hasNext(false),
- pCurrent() {
- }
-}
+/**
+* Copyright (C) 2011 10gen Inc.
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU Affero General Public License, version 3,
+* as published by the Free Software Foundation.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU Affero General Public License for more details.
+*
+* You should have received a copy of the GNU Affero General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#include "pch.h"
+
+#include "db/pipeline/document_source.h"
+
+#include "db/jsobj.h"
+#include "db/pipeline/expression.h"
+#include "db/pipeline/value.h"
+
+namespace mongo {
+
+ DocumentSourceFilterBase::~DocumentSourceFilterBase() {
+ }
+
+ void DocumentSourceFilterBase::findNext() {
+ /* only do this the first time */
+ if (unstarted) {
+ hasNext = !pSource->eof();
+ unstarted = false;
+ }
+
+ while(hasNext) {
+ boost::intrusive_ptr<Document> pDocument(pSource->getCurrent());
+ hasNext = pSource->advance();
+
+ if (accept(pDocument)) {
+ pCurrent = pDocument;
+ return;
+ }
+ }
+
+ pCurrent.reset();
+ }
+
+ bool DocumentSourceFilterBase::eof() {
+ if (unstarted)
+ findNext();
+
+ return (pCurrent.get() == NULL);
+ }
+
+ bool DocumentSourceFilterBase::advance() {
+ DocumentSource::advance(); // check for interrupts
+
+ if (unstarted)
+ findNext();
+
+ /*
+ This looks weird after the above, but is correct. Note that calling
+ getCurrent() when first starting already yields the first document
+ in the collection. Calling advance() without using getCurrent()
+ first will skip over the first item.
+ */
+ findNext();
+
+ return (pCurrent.get() != NULL);
+ }
+
+ boost::intrusive_ptr<Document> DocumentSourceFilterBase::getCurrent() {
+ if (unstarted)
+ findNext();
+
+ verify(pCurrent.get() != NULL);
+ return pCurrent;
+ }
+
+ DocumentSourceFilterBase::DocumentSourceFilterBase(
+ const intrusive_ptr<ExpressionContext> &pExpCtx):
+ DocumentSource(pExpCtx),
+ unstarted(true),
+ hasNext(false),
+ pCurrent() {
+ }
+}
diff --git a/src/mongo/db/pipeline/document_source_group.cpp b/src/mongo/db/pipeline/document_source_group.cpp
index 69f42e20868..f829c3b609b 100755
--- a/src/mongo/db/pipeline/document_source_group.cpp
+++ b/src/mongo/db/pipeline/document_source_group.cpp
@@ -1,398 +1,398 @@
-/**
-* Copyright (C) 2011 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "pch.h"
-
-#include "db/pipeline/document_source.h"
-
-#include "db/jsobj.h"
-#include "db/pipeline/accumulator.h"
-#include "db/pipeline/document.h"
-#include "db/pipeline/expression.h"
-#include "db/pipeline/expression_context.h"
-#include "db/pipeline/value.h"
-
-namespace mongo {
- const char DocumentSourceGroup::groupName[] = "$group";
-
- DocumentSourceGroup::~DocumentSourceGroup() {
- }
-
- const char *DocumentSourceGroup::getSourceName() const {
- return groupName;
- }
-
- bool DocumentSourceGroup::eof() {
- if (!populated)
- populate();
-
- return (groupsIterator == groups.end());
- }
-
- bool DocumentSourceGroup::advance() {
- DocumentSource::advance(); // check for interrupts
-
- if (!populated)
- populate();
-
- verify(groupsIterator != groups.end());
-
- ++groupsIterator;
- if (groupsIterator == groups.end()) {
- pCurrent.reset();
- return false;
- }
-
- pCurrent = makeDocument(groupsIterator);
- return true;
- }
-
- intrusive_ptr<Document> DocumentSourceGroup::getCurrent() {
- if (!populated)
- populate();
-
- return pCurrent;
- }
-
- void DocumentSourceGroup::sourceToBson(
- BSONObjBuilder *pBuilder, bool explain) const {
- BSONObjBuilder insides;
-
- /* add the _id */
- pIdExpression->addToBsonObj(&insides, Document::idName.c_str(), false);
-
- /* add the remaining fields */
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- intrusive_ptr<Accumulator> pA((*vpAccumulatorFactory[i])(pExpCtx));
- pA->addOperand(vpExpression[i]);
- pA->addToBsonObj(&insides, vFieldName[i], false);
- }
-
- pBuilder->append(groupName, insides.done());
- }
-
- intrusive_ptr<DocumentSourceGroup> DocumentSourceGroup::create(
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- intrusive_ptr<DocumentSourceGroup> pSource(
- new DocumentSourceGroup(pExpCtx));
- return pSource;
- }
-
- DocumentSourceGroup::DocumentSourceGroup(
- const intrusive_ptr<ExpressionContext> &pExpCtx):
- DocumentSource(pExpCtx),
- populated(false),
- pIdExpression(),
- groups(),
- vFieldName(),
- vpAccumulatorFactory(),
- vpExpression() {
- }
-
- void DocumentSourceGroup::addAccumulator(
- string fieldName,
- intrusive_ptr<Accumulator> (*pAccumulatorFactory)(
- const intrusive_ptr<ExpressionContext> &),
- const intrusive_ptr<Expression> &pExpression) {
- vFieldName.push_back(fieldName);
- vpAccumulatorFactory.push_back(pAccumulatorFactory);
- vpExpression.push_back(pExpression);
- }
-
-
- struct GroupOpDesc {
- const char *pName;
- intrusive_ptr<Accumulator> (*pFactory)(
- const intrusive_ptr<ExpressionContext> &);
- };
-
- static int GroupOpDescCmp(const void *pL, const void *pR) {
- return strcmp(((const GroupOpDesc *)pL)->pName,
- ((const GroupOpDesc *)pR)->pName);
- }
-
- /*
- Keep these sorted alphabetically so we can bsearch() them using
- GroupOpDescCmp() above.
- */
- static const GroupOpDesc GroupOpTable[] = {
- {"$addToSet", AccumulatorAddToSet::create},
- {"$avg", AccumulatorAvg::create},
- {"$first", AccumulatorFirst::create},
- {"$last", AccumulatorLast::create},
- {"$max", AccumulatorMinMax::createMax},
- {"$min", AccumulatorMinMax::createMin},
- {"$push", AccumulatorPush::create},
- {"$sum", AccumulatorSum::create},
- };
-
- static const size_t NGroupOp = sizeof(GroupOpTable)/sizeof(GroupOpTable[0]);
-
- intrusive_ptr<DocumentSource> DocumentSourceGroup::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- uassert(15947, "a group's fields must be specified in an object",
- pBsonElement->type() == Object);
-
- intrusive_ptr<DocumentSourceGroup> pGroup(
- DocumentSourceGroup::create(pExpCtx));
- bool idSet = false;
-
- BSONObj groupObj(pBsonElement->Obj());
- BSONObjIterator groupIterator(groupObj);
- while(groupIterator.more()) {
- BSONElement groupField(groupIterator.next());
- const char *pFieldName = groupField.fieldName();
-
- if (strcmp(pFieldName, Document::idName.c_str()) == 0) {
- uassert(15948, "a group's _id may only be specified once",
- !idSet);
-
- BSONType groupType = groupField.type();
-
- if (groupType == Object) {
- /*
- Use the projection-like set of field paths to create the
- group-by key.
- */
- Expression::ObjectCtx oCtx(
- Expression::ObjectCtx::DOCUMENT_OK);
- intrusive_ptr<Expression> pId(
- Expression::parseObject(&groupField, &oCtx));
-
- pGroup->setIdExpression(pId);
- idSet = true;
- }
- else if (groupType == String) {
- string groupString(groupField.String());
- const char *pGroupString = groupString.c_str();
- if ((groupString.length() == 0) ||
- (pGroupString[0] != '$'))
- goto StringConstantId;
-
- string pathString(
- Expression::removeFieldPrefix(groupString));
- intrusive_ptr<ExpressionFieldPath> pFieldPath(
- ExpressionFieldPath::create(pathString));
- pGroup->setIdExpression(pFieldPath);
- idSet = true;
- }
- else {
- /* pick out the constant types that are allowed */
- switch(groupType) {
- case NumberDouble:
- case String:
- case Object:
- case Array:
- case jstOID:
- case Bool:
- case Date:
- case NumberInt:
- case Timestamp:
- case NumberLong:
- case jstNULL:
- StringConstantId: // from string case above
- {
- intrusive_ptr<const Value> pValue(
- Value::createFromBsonElement(&groupField));
- intrusive_ptr<ExpressionConstant> pConstant(
- ExpressionConstant::create(pValue));
- pGroup->setIdExpression(pConstant);
- idSet = true;
- break;
- }
-
- default:
- uassert(15949, str::stream() <<
- "a group's _id may not include fields of BSON type " << groupType,
- false);
- }
- }
- }
- else {
- /*
- Treat as a projection field with the additional ability to
- add aggregation operators.
- */
- uassert(15950, str::stream() <<
- "the group aggregate field name \"" <<
- pFieldName << "\" cannot be an operator name",
- *pFieldName != '$');
-
- uassert(15951, str::stream() <<
- "the group aggregate field \"" << pFieldName <<
- "\" must be defined as an expression inside an object",
- groupField.type() == Object);
-
- BSONObj subField(groupField.Obj());
- BSONObjIterator subIterator(subField);
- size_t subCount = 0;
- for(; subIterator.more(); ++subCount) {
- BSONElement subElement(subIterator.next());
-
- /* look for the specified operator */
- GroupOpDesc key;
- key.pName = subElement.fieldName();
- const GroupOpDesc *pOp =
- (const GroupOpDesc *)bsearch(
- &key, GroupOpTable, NGroupOp, sizeof(GroupOpDesc),
- GroupOpDescCmp);
-
- uassert(15952, str::stream() <<
- "unknown group operator \"" <<
- key.pName << "\"",
- pOp);
-
- intrusive_ptr<Expression> pGroupExpr;
-
- BSONType elementType = subElement.type();
- if (elementType == Object) {
- Expression::ObjectCtx oCtx(
- Expression::ObjectCtx::DOCUMENT_OK);
- pGroupExpr = Expression::parseObject(
- &subElement, &oCtx);
- }
- else if (elementType == Array) {
- uassert(15953, str::stream() <<
- "aggregating group operators are unary (" <<
- key.pName << ")", false);
- }
- else { /* assume its an atomic single operand */
- pGroupExpr = Expression::parseOperand(&subElement);
- }
-
- pGroup->addAccumulator(
- pFieldName, pOp->pFactory, pGroupExpr);
- }
-
- uassert(15954, str::stream() <<
- "the computed aggregate \"" <<
- pFieldName << "\" must specify exactly one operator",
- subCount == 1);
- }
- }
-
- uassert(15955, "a group specification must include an _id", idSet);
-
- return pGroup;
- }
-
- void DocumentSourceGroup::populate() {
- for(bool hasNext = !pSource->eof(); hasNext;
- hasNext = pSource->advance()) {
- intrusive_ptr<Document> pDocument(pSource->getCurrent());
-
- /* get the _id document */
- intrusive_ptr<const Value> pId(pIdExpression->evaluate(pDocument));
-
- /* treat Undefined the same as NULL SERVER-4674 */
- if (pId->getType() == Undefined)
- pId = Value::getNull();
-
- /*
- Look for the _id value in the map; if it's not there, add a
- new entry with a blank accumulator.
- */
- vector<intrusive_ptr<Accumulator> > *pGroup;
- GroupsType::iterator it(groups.find(pId));
- if (it != groups.end()) {
- /* point at the existing accumulators */
- pGroup = &it->second;
- }
- else {
- /* insert a new group into the map */
- groups.insert(it,
- pair<intrusive_ptr<const Value>,
- vector<intrusive_ptr<Accumulator> > >(
- pId, vector<intrusive_ptr<Accumulator> >()));
-
- /* find the accumulator vector (the map value) */
- it = groups.find(pId);
- pGroup = &it->second;
-
- /* add the accumulators */
- const size_t n = vpAccumulatorFactory.size();
- pGroup->reserve(n);
- for(size_t i = 0; i < n; ++i) {
- intrusive_ptr<Accumulator> pAccumulator(
- (*vpAccumulatorFactory[i])(pExpCtx));
- pAccumulator->addOperand(vpExpression[i]);
- pGroup->push_back(pAccumulator);
- }
- }
-
- /* point at the existing key */
- // unneeded atm // pId = it.first;
-
- /* tickle all the accumulators for the group we found */
- const size_t n = pGroup->size();
- for(size_t i = 0; i < n; ++i)
- (*pGroup)[i]->evaluate(pDocument);
- }
-
- /* start the group iterator */
- groupsIterator = groups.begin();
- if (groupsIterator != groups.end())
- pCurrent = makeDocument(groupsIterator);
- populated = true;
- }
-
- intrusive_ptr<Document> DocumentSourceGroup::makeDocument(
- const GroupsType::iterator &rIter) {
- vector<intrusive_ptr<Accumulator> > *pGroup = &rIter->second;
- const size_t n = vFieldName.size();
- intrusive_ptr<Document> pResult(Document::create(1 + n));
-
- /* add the _id field */
- pResult->addField(Document::idName, rIter->first);
-
- /* add the rest of the fields */
- for(size_t i = 0; i < n; ++i) {
- intrusive_ptr<const Value> pValue((*pGroup)[i]->getValue());
- if (pValue->getType() != Undefined)
- pResult->addField(vFieldName[i], pValue);
- }
-
- return pResult;
- }
-
- intrusive_ptr<DocumentSource> DocumentSourceGroup::createMerger() {
- intrusive_ptr<DocumentSourceGroup> pMerger(
- DocumentSourceGroup::create(pExpCtx));
-
- /* the merger will use the same grouping key */
- pMerger->setIdExpression(ExpressionFieldPath::create(
- Document::idName.c_str()));
-
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- /*
- The merger's output field names will be the same, as will the
- accumulator factories. However, for some accumulators, the
- expression to be accumulated will be different. The original
- accumulator may be collecting an expression based on a field
- expression or constant. Here, we accumulate the output of the
- same name from the prior group.
- */
- pMerger->addAccumulator(
- vFieldName[i], vpAccumulatorFactory[i],
- ExpressionFieldPath::create(vFieldName[i]));
- }
-
- return pMerger;
- }
-}
+/**
+* Copyright (C) 2011 10gen Inc.
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU Affero General Public License, version 3,
+* as published by the Free Software Foundation.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU Affero General Public License for more details.
+*
+* You should have received a copy of the GNU Affero General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#include "pch.h"
+
+#include "db/pipeline/document_source.h"
+
+#include "db/jsobj.h"
+#include "db/pipeline/accumulator.h"
+#include "db/pipeline/document.h"
+#include "db/pipeline/expression.h"
+#include "db/pipeline/expression_context.h"
+#include "db/pipeline/value.h"
+
+namespace mongo {
+ const char DocumentSourceGroup::groupName[] = "$group";
+
+ DocumentSourceGroup::~DocumentSourceGroup() {
+ }
+
+ const char *DocumentSourceGroup::getSourceName() const {
+ return groupName;
+ }
+
+ bool DocumentSourceGroup::eof() {
+ if (!populated)
+ populate();
+
+ return (groupsIterator == groups.end());
+ }
+
+ bool DocumentSourceGroup::advance() {
+ DocumentSource::advance(); // check for interrupts
+
+ if (!populated)
+ populate();
+
+ verify(groupsIterator != groups.end());
+
+ ++groupsIterator;
+ if (groupsIterator == groups.end()) {
+ pCurrent.reset();
+ return false;
+ }
+
+ pCurrent = makeDocument(groupsIterator);
+ return true;
+ }
+
+ intrusive_ptr<Document> DocumentSourceGroup::getCurrent() {
+ if (!populated)
+ populate();
+
+ return pCurrent;
+ }
+
+ void DocumentSourceGroup::sourceToBson(
+ BSONObjBuilder *pBuilder, bool explain) const {
+ BSONObjBuilder insides;
+
+ /* add the _id */
+ pIdExpression->addToBsonObj(&insides, Document::idName.c_str(), false);
+
+ /* add the remaining fields */
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ intrusive_ptr<Accumulator> pA((*vpAccumulatorFactory[i])(pExpCtx));
+ pA->addOperand(vpExpression[i]);
+ pA->addToBsonObj(&insides, vFieldName[i], false);
+ }
+
+ pBuilder->append(groupName, insides.done());
+ }
+
+ intrusive_ptr<DocumentSourceGroup> DocumentSourceGroup::create(
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ intrusive_ptr<DocumentSourceGroup> pSource(
+ new DocumentSourceGroup(pExpCtx));
+ return pSource;
+ }
+
+ DocumentSourceGroup::DocumentSourceGroup(
+ const intrusive_ptr<ExpressionContext> &pExpCtx):
+ DocumentSource(pExpCtx),
+ populated(false),
+ pIdExpression(),
+ groups(),
+ vFieldName(),
+ vpAccumulatorFactory(),
+ vpExpression() {
+ }
+
+ void DocumentSourceGroup::addAccumulator(
+ string fieldName,
+ intrusive_ptr<Accumulator> (*pAccumulatorFactory)(
+ const intrusive_ptr<ExpressionContext> &),
+ const intrusive_ptr<Expression> &pExpression) {
+ vFieldName.push_back(fieldName);
+ vpAccumulatorFactory.push_back(pAccumulatorFactory);
+ vpExpression.push_back(pExpression);
+ }
+
+
+ struct GroupOpDesc {
+ const char *pName;
+ intrusive_ptr<Accumulator> (*pFactory)(
+ const intrusive_ptr<ExpressionContext> &);
+ };
+
+ static int GroupOpDescCmp(const void *pL, const void *pR) {
+ return strcmp(((const GroupOpDesc *)pL)->pName,
+ ((const GroupOpDesc *)pR)->pName);
+ }
+
+ /*
+ Keep these sorted alphabetically so we can bsearch() them using
+ GroupOpDescCmp() above.
+ */
+ static const GroupOpDesc GroupOpTable[] = {
+ {"$addToSet", AccumulatorAddToSet::create},
+ {"$avg", AccumulatorAvg::create},
+ {"$first", AccumulatorFirst::create},
+ {"$last", AccumulatorLast::create},
+ {"$max", AccumulatorMinMax::createMax},
+ {"$min", AccumulatorMinMax::createMin},
+ {"$push", AccumulatorPush::create},
+ {"$sum", AccumulatorSum::create},
+ };
+
+ static const size_t NGroupOp = sizeof(GroupOpTable)/sizeof(GroupOpTable[0]);
+
+ intrusive_ptr<DocumentSource> DocumentSourceGroup::createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ uassert(15947, "a group's fields must be specified in an object",
+ pBsonElement->type() == Object);
+
+ intrusive_ptr<DocumentSourceGroup> pGroup(
+ DocumentSourceGroup::create(pExpCtx));
+ bool idSet = false;
+
+ BSONObj groupObj(pBsonElement->Obj());
+ BSONObjIterator groupIterator(groupObj);
+ while(groupIterator.more()) {
+ BSONElement groupField(groupIterator.next());
+ const char *pFieldName = groupField.fieldName();
+
+ if (strcmp(pFieldName, Document::idName.c_str()) == 0) {
+ uassert(15948, "a group's _id may only be specified once",
+ !idSet);
+
+ BSONType groupType = groupField.type();
+
+ if (groupType == Object) {
+ /*
+ Use the projection-like set of field paths to create the
+ group-by key.
+ */
+ Expression::ObjectCtx oCtx(
+ Expression::ObjectCtx::DOCUMENT_OK);
+ intrusive_ptr<Expression> pId(
+ Expression::parseObject(&groupField, &oCtx));
+
+ pGroup->setIdExpression(pId);
+ idSet = true;
+ }
+ else if (groupType == String) {
+ string groupString(groupField.String());
+ const char *pGroupString = groupString.c_str();
+ if ((groupString.length() == 0) ||
+ (pGroupString[0] != '$'))
+ goto StringConstantId;
+
+ string pathString(
+ Expression::removeFieldPrefix(groupString));
+ intrusive_ptr<ExpressionFieldPath> pFieldPath(
+ ExpressionFieldPath::create(pathString));
+ pGroup->setIdExpression(pFieldPath);
+ idSet = true;
+ }
+ else {
+ /* pick out the constant types that are allowed */
+ switch(groupType) {
+ case NumberDouble:
+ case String:
+ case Object:
+ case Array:
+ case jstOID:
+ case Bool:
+ case Date:
+ case NumberInt:
+ case Timestamp:
+ case NumberLong:
+ case jstNULL:
+ StringConstantId: // from string case above
+ {
+ intrusive_ptr<const Value> pValue(
+ Value::createFromBsonElement(&groupField));
+ intrusive_ptr<ExpressionConstant> pConstant(
+ ExpressionConstant::create(pValue));
+ pGroup->setIdExpression(pConstant);
+ idSet = true;
+ break;
+ }
+
+ default:
+ uassert(15949, str::stream() <<
+ "a group's _id may not include fields of BSON type " << groupType,
+ false);
+ }
+ }
+ }
+ else {
+ /*
+ Treat as a projection field with the additional ability to
+ add aggregation operators.
+ */
+ uassert(15950, str::stream() <<
+ "the group aggregate field name \"" <<
+ pFieldName << "\" cannot be an operator name",
+ *pFieldName != '$');
+
+ uassert(15951, str::stream() <<
+ "the group aggregate field \"" << pFieldName <<
+ "\" must be defined as an expression inside an object",
+ groupField.type() == Object);
+
+ BSONObj subField(groupField.Obj());
+ BSONObjIterator subIterator(subField);
+ size_t subCount = 0;
+ for(; subIterator.more(); ++subCount) {
+ BSONElement subElement(subIterator.next());
+
+ /* look for the specified operator */
+ GroupOpDesc key;
+ key.pName = subElement.fieldName();
+ const GroupOpDesc *pOp =
+ (const GroupOpDesc *)bsearch(
+ &key, GroupOpTable, NGroupOp, sizeof(GroupOpDesc),
+ GroupOpDescCmp);
+
+ uassert(15952, str::stream() <<
+ "unknown group operator \"" <<
+ key.pName << "\"",
+ pOp);
+
+ intrusive_ptr<Expression> pGroupExpr;
+
+ BSONType elementType = subElement.type();
+ if (elementType == Object) {
+ Expression::ObjectCtx oCtx(
+ Expression::ObjectCtx::DOCUMENT_OK);
+ pGroupExpr = Expression::parseObject(
+ &subElement, &oCtx);
+ }
+ else if (elementType == Array) {
+ uassert(15953, str::stream() <<
+ "aggregating group operators are unary (" <<
+ key.pName << ")", false);
+ }
+ else { /* assume its an atomic single operand */
+ pGroupExpr = Expression::parseOperand(&subElement);
+ }
+
+ pGroup->addAccumulator(
+ pFieldName, pOp->pFactory, pGroupExpr);
+ }
+
+ uassert(15954, str::stream() <<
+ "the computed aggregate \"" <<
+ pFieldName << "\" must specify exactly one operator",
+ subCount == 1);
+ }
+ }
+
+ uassert(15955, "a group specification must include an _id", idSet);
+
+ return pGroup;
+ }
+
+ void DocumentSourceGroup::populate() {
+ for(bool hasNext = !pSource->eof(); hasNext;
+ hasNext = pSource->advance()) {
+ intrusive_ptr<Document> pDocument(pSource->getCurrent());
+
+ /* get the _id document */
+ intrusive_ptr<const Value> pId(pIdExpression->evaluate(pDocument));
+
+ /* treat Undefined the same as NULL SERVER-4674 */
+ if (pId->getType() == Undefined)
+ pId = Value::getNull();
+
+ /*
+ Look for the _id value in the map; if it's not there, add a
+ new entry with a blank accumulator.
+ */
+ vector<intrusive_ptr<Accumulator> > *pGroup;
+ GroupsType::iterator it(groups.find(pId));
+ if (it != groups.end()) {
+ /* point at the existing accumulators */
+ pGroup = &it->second;
+ }
+ else {
+ /* insert a new group into the map */
+ groups.insert(it,
+ pair<intrusive_ptr<const Value>,
+ vector<intrusive_ptr<Accumulator> > >(
+ pId, vector<intrusive_ptr<Accumulator> >()));
+
+ /* find the accumulator vector (the map value) */
+ it = groups.find(pId);
+ pGroup = &it->second;
+
+ /* add the accumulators */
+ const size_t n = vpAccumulatorFactory.size();
+ pGroup->reserve(n);
+ for(size_t i = 0; i < n; ++i) {
+ intrusive_ptr<Accumulator> pAccumulator(
+ (*vpAccumulatorFactory[i])(pExpCtx));
+ pAccumulator->addOperand(vpExpression[i]);
+ pGroup->push_back(pAccumulator);
+ }
+ }
+
+ /* point at the existing key */
+ // unneeded atm // pId = it.first;
+
+ /* tickle all the accumulators for the group we found */
+ const size_t n = pGroup->size();
+ for(size_t i = 0; i < n; ++i)
+ (*pGroup)[i]->evaluate(pDocument);
+ }
+
+ /* start the group iterator */
+ groupsIterator = groups.begin();
+ if (groupsIterator != groups.end())
+ pCurrent = makeDocument(groupsIterator);
+ populated = true;
+ }
+
+ intrusive_ptr<Document> DocumentSourceGroup::makeDocument(
+ const GroupsType::iterator &rIter) {
+ vector<intrusive_ptr<Accumulator> > *pGroup = &rIter->second;
+ const size_t n = vFieldName.size();
+ intrusive_ptr<Document> pResult(Document::create(1 + n));
+
+ /* add the _id field */
+ pResult->addField(Document::idName, rIter->first);
+
+ /* add the rest of the fields */
+ for(size_t i = 0; i < n; ++i) {
+ intrusive_ptr<const Value> pValue((*pGroup)[i]->getValue());
+ if (pValue->getType() != Undefined)
+ pResult->addField(vFieldName[i], pValue);
+ }
+
+ return pResult;
+ }
+
+ intrusive_ptr<DocumentSource> DocumentSourceGroup::createMerger() {
+ intrusive_ptr<DocumentSourceGroup> pMerger(
+ DocumentSourceGroup::create(pExpCtx));
+
+ /* the merger will use the same grouping key */
+ pMerger->setIdExpression(ExpressionFieldPath::create(
+ Document::idName.c_str()));
+
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ /*
+ The merger's output field names will be the same, as will the
+ accumulator factories. However, for some accumulators, the
+ expression to be accumulated will be different. The original
+ accumulator may be collecting an expression based on a field
+ expression or constant. Here, we accumulate the output of the
+ same name from the prior group.
+ */
+ pMerger->addAccumulator(
+ vFieldName[i], vpAccumulatorFactory[i],
+ ExpressionFieldPath::create(vFieldName[i]));
+ }
+
+ return pMerger;
+ }
+}
diff --git a/src/mongo/db/pipeline/document_source_match.cpp b/src/mongo/db/pipeline/document_source_match.cpp
index 0141e5fc7b4..3ae327d6102 100755
--- a/src/mongo/db/pipeline/document_source_match.cpp
+++ b/src/mongo/db/pipeline/document_source_match.cpp
@@ -1,94 +1,94 @@
-/**
-* Copyright (C) 2011 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "pch.h"
-
-#include "db/pipeline/document_source.h"
-
-#include "db/jsobj.h"
-#include "db/matcher.h"
-#include "db/pipeline/document.h"
-#include "db/pipeline/expression.h"
-
-namespace mongo {
-
- const char DocumentSourceMatch::matchName[] = "$match";
-
- DocumentSourceMatch::~DocumentSourceMatch() {
- }
-
- const char *DocumentSourceMatch::getSourceName() const {
- return matchName;
- }
-
- void DocumentSourceMatch::sourceToBson(
- BSONObjBuilder *pBuilder, bool explain) const {
- const BSONObj *pQuery = matcher.getQuery();
- pBuilder->append(matchName, *pQuery);
- }
-
- bool DocumentSourceMatch::accept(
- const intrusive_ptr<Document> &pDocument) const {
-
- /*
- The matcher only takes BSON documents, so we have to make one.
-
- LATER
- We could optimize this by making a document with only the
- fields referenced by the Matcher. We could do this by looking inside
- the Matcher's BSON before it is created, and recording those. The
- easiest implementation might be to hold onto an ExpressionDocument
- in here, and give that pDocument to create the created subset of
- fields, and then convert that instead.
- */
- BSONObjBuilder objBuilder;
- pDocument->toBson(&objBuilder);
- BSONObj obj(objBuilder.done());
-
- return matcher.matches(obj);
- }
-
- intrusive_ptr<DocumentSource> DocumentSourceMatch::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- uassert(15959, "the match filter must be an expression in an object",
- pBsonElement->type() == Object);
-
- intrusive_ptr<DocumentSourceMatch> pMatcher(
- new DocumentSourceMatch(pBsonElement->Obj(), pExpCtx));
-
- return pMatcher;
- }
-
- void DocumentSourceMatch::toMatcherBson(BSONObjBuilder *pBuilder) const {
- const BSONObj *pQuery = matcher.getQuery();
- pBuilder->appendElements(*pQuery);
- }
-
- DocumentSourceMatch::DocumentSourceMatch(
- const BSONObj &query,
- const intrusive_ptr<ExpressionContext> &pExpCtx):
- DocumentSourceFilterBase(pExpCtx),
- matcher(query) {
- }
-
- void DocumentSourceMatch::manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker) {
-#ifdef MONGO_LATER_SERVER_4644
- verify(false); // $$$ implement dependencies on Matcher
-#endif /* MONGO_LATER_SERVER_4644 */
- }
-}
+/**
+* Copyright (C) 2011 10gen Inc.
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU Affero General Public License, version 3,
+* as published by the Free Software Foundation.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU Affero General Public License for more details.
+*
+* You should have received a copy of the GNU Affero General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#include "pch.h"
+
+#include "db/pipeline/document_source.h"
+
+#include "db/jsobj.h"
+#include "db/matcher.h"
+#include "db/pipeline/document.h"
+#include "db/pipeline/expression.h"
+
+namespace mongo {
+
+ const char DocumentSourceMatch::matchName[] = "$match";
+
+ DocumentSourceMatch::~DocumentSourceMatch() {
+ }
+
+ const char *DocumentSourceMatch::getSourceName() const {
+ return matchName;
+ }
+
+ void DocumentSourceMatch::sourceToBson(
+ BSONObjBuilder *pBuilder, bool explain) const {
+ const BSONObj *pQuery = matcher.getQuery();
+ pBuilder->append(matchName, *pQuery);
+ }
+
+ bool DocumentSourceMatch::accept(
+ const intrusive_ptr<Document> &pDocument) const {
+
+ /*
+ The matcher only takes BSON documents, so we have to make one.
+
+ LATER
+ We could optimize this by making a document with only the
+ fields referenced by the Matcher. We could do this by looking inside
+ the Matcher's BSON before it is created, and recording those. The
+ easiest implementation might be to hold onto an ExpressionDocument
+ in here, and give that pDocument to create the created subset of
+ fields, and then convert that instead.
+ */
+ BSONObjBuilder objBuilder;
+ pDocument->toBson(&objBuilder);
+ BSONObj obj(objBuilder.done());
+
+ return matcher.matches(obj);
+ }
+
+ intrusive_ptr<DocumentSource> DocumentSourceMatch::createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ uassert(15959, "the match filter must be an expression in an object",
+ pBsonElement->type() == Object);
+
+ intrusive_ptr<DocumentSourceMatch> pMatcher(
+ new DocumentSourceMatch(pBsonElement->Obj(), pExpCtx));
+
+ return pMatcher;
+ }
+
+ void DocumentSourceMatch::toMatcherBson(BSONObjBuilder *pBuilder) const {
+ const BSONObj *pQuery = matcher.getQuery();
+ pBuilder->appendElements(*pQuery);
+ }
+
+ DocumentSourceMatch::DocumentSourceMatch(
+ const BSONObj &query,
+ const intrusive_ptr<ExpressionContext> &pExpCtx):
+ DocumentSourceFilterBase(pExpCtx),
+ matcher(query) {
+ }
+
+ void DocumentSourceMatch::manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker) {
+#ifdef MONGO_LATER_SERVER_4644
+ verify(false); // $$$ implement dependencies on Matcher
+#endif /* MONGO_LATER_SERVER_4644 */
+ }
+}
diff --git a/src/mongo/db/pipeline/document_source_out.cpp b/src/mongo/db/pipeline/document_source_out.cpp
index d62b830cab0..c19e066efe6 100755
--- a/src/mongo/db/pipeline/document_source_out.cpp
+++ b/src/mongo/db/pipeline/document_source_out.cpp
@@ -1,67 +1,67 @@
-/**
- * Copyright 2011 (c) 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-
-#include "db/pipeline/document_source.h"
-
-
-namespace mongo {
-
- const char DocumentSourceOut::outName[] = "$out";
-
- DocumentSourceOut::~DocumentSourceOut() {
- }
-
- const char *DocumentSourceOut::getSourceName() const {
- return outName;
- }
-
- bool DocumentSourceOut::eof() {
- return pSource->eof();
- }
-
- bool DocumentSourceOut::advance() {
- DocumentSource::advance(); // check for interrupts
-
- return pSource->advance();
- }
-
- boost::intrusive_ptr<Document> DocumentSourceOut::getCurrent() {
- return pSource->getCurrent();
- }
-
- DocumentSourceOut::DocumentSourceOut(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx):
- DocumentSource(pExpCtx) {
- verify(false && "unimplemented");
- }
-
- intrusive_ptr<DocumentSourceOut> DocumentSourceOut::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- intrusive_ptr<DocumentSourceOut> pSource(
- new DocumentSourceOut(pBsonElement, pExpCtx));
-
- return pSource;
- }
-
- void DocumentSourceOut::sourceToBson(
- BSONObjBuilder *pBuilder, bool explain) const {
- verify(false); // CW TODO
- }
-}
+/**
+ * Copyright 2011 (c) 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+
+#include "db/pipeline/document_source.h"
+
+
+namespace mongo {
+
+ const char DocumentSourceOut::outName[] = "$out";
+
+ DocumentSourceOut::~DocumentSourceOut() {
+ }
+
+ const char *DocumentSourceOut::getSourceName() const {
+ return outName;
+ }
+
+ bool DocumentSourceOut::eof() {
+ return pSource->eof();
+ }
+
+ bool DocumentSourceOut::advance() {
+ DocumentSource::advance(); // check for interrupts
+
+ return pSource->advance();
+ }
+
+ boost::intrusive_ptr<Document> DocumentSourceOut::getCurrent() {
+ return pSource->getCurrent();
+ }
+
+ DocumentSourceOut::DocumentSourceOut(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx):
+ DocumentSource(pExpCtx) {
+ verify(false && "unimplemented");
+ }
+
+ intrusive_ptr<DocumentSourceOut> DocumentSourceOut::createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ intrusive_ptr<DocumentSourceOut> pSource(
+ new DocumentSourceOut(pBsonElement, pExpCtx));
+
+ return pSource;
+ }
+
+ void DocumentSourceOut::sourceToBson(
+ BSONObjBuilder *pBuilder, bool explain) const {
+ verify(false); // CW TODO
+ }
+}
diff --git a/src/mongo/db/pipeline/document_source_project.cpp b/src/mongo/db/pipeline/document_source_project.cpp
index dc841931dee..c42c3eec683 100755
--- a/src/mongo/db/pipeline/document_source_project.cpp
+++ b/src/mongo/db/pipeline/document_source_project.cpp
@@ -1,289 +1,289 @@
-/**
- * Copyright 2011 (c) 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "db/pipeline/document_source.h"
-
-#include "db/jsobj.h"
-#include "db/pipeline/document.h"
-#include "db/pipeline/expression.h"
-#include "db/pipeline/value.h"
-
-namespace mongo {
-
- const char DocumentSourceProject::projectName[] = "$project";
-
- DocumentSourceProject::~DocumentSourceProject() {
- }
-
- DocumentSourceProject::DocumentSourceProject(
- const intrusive_ptr<ExpressionContext> &pExpCtx):
- DocumentSource(pExpCtx),
- excludeId(false),
- pEO(ExpressionObject::create()) {
- }
-
- const char *DocumentSourceProject::getSourceName() const {
- return projectName;
- }
-
- bool DocumentSourceProject::eof() {
- return pSource->eof();
- }
-
- bool DocumentSourceProject::advance() {
- DocumentSource::advance(); // check for interrupts
-
- return pSource->advance();
- }
-
- intrusive_ptr<Document> DocumentSourceProject::getCurrent() {
- intrusive_ptr<Document> pInDocument(pSource->getCurrent());
-
- /* create the result document */
- const size_t sizeHint =
- pEO->getSizeHint(pInDocument) + (excludeId ? 0 : 1);
- intrusive_ptr<Document> pResultDocument(Document::create(sizeHint));
-
- if (!excludeId) {
- intrusive_ptr<const Value> pId(
- pInDocument->getField(Document::idName));
-
- /*
- Previous projections could have removed _id, (or declined to
- generate it) so it might already not exist. Only attempt to add
- if we found it.
- */
- if (pId.get())
- pResultDocument->addField(Document::idName, pId);
- }
-
- /*
- Use the ExpressionObject to create the base result.
-
- If we're excluding fields at the top level, leave out the _id if
- it is found, because we took care of it above.
- */
- pEO->addToDocument(pResultDocument, pInDocument, true);
-
- return pResultDocument;
- }
-
- void DocumentSourceProject::optimize() {
- intrusive_ptr<Expression> pE(pEO->optimize());
- pEO = dynamic_pointer_cast<ExpressionObject>(pE);
- }
-
- void DocumentSourceProject::sourceToBson(
- BSONObjBuilder *pBuilder, bool explain) const {
- BSONObjBuilder insides;
- if (excludeId)
- insides.append(Document::idName, false);
- pEO->documentToBson(&insides, true);
- pBuilder->append(projectName, insides.done());
- }
-
- intrusive_ptr<DocumentSourceProject> DocumentSourceProject::create(
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- intrusive_ptr<DocumentSourceProject> pSource(
- new DocumentSourceProject(pExpCtx));
- return pSource;
- }
-
- void DocumentSourceProject::addField(
- const string &fieldName, const intrusive_ptr<Expression> &pExpression) {
- uassert(15960,
- "projection fields must be defined by non-empty expressions",
- pExpression);
-
- pEO->addField(fieldName, pExpression);
- }
-
- void DocumentSourceProject::includePath(const string &fieldPath) {
- if (Document::idName.compare(fieldPath) == 0) {
- uassert(15961, str::stream() << projectName <<
- ": _id cannot be included once it has been excluded",
- !excludeId);
-
- return;
- }
-
- pEO->includePath(fieldPath);
- }
-
- void DocumentSourceProject::excludePath(const string &fieldPath) {
- if (Document::idName.compare(fieldPath) == 0) {
- excludeId = true;
- return;
- }
-
- pEO->excludePath(fieldPath);
- }
-
- intrusive_ptr<DocumentSource> DocumentSourceProject::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- /* validate */
- uassert(15969, str::stream() << projectName <<
- " specification must be an object",
- pBsonElement->type() == Object);
-
- /* chain the projection onto the original source */
- intrusive_ptr<DocumentSourceProject> pProject(
- DocumentSourceProject::create(pExpCtx));
-
- /*
- Pull out the $project object. This should just be a list of
- field inclusion or exclusion specifications. Note you can't do
- both, except for the case of _id.
- */
- BSONObj projectObj(pBsonElement->Obj());
- BSONObjIterator fieldIterator(projectObj);
- Expression::ObjectCtx objectCtx(
- Expression::ObjectCtx::DOCUMENT_OK);
- while(fieldIterator.more()) {
- BSONElement outFieldElement(fieldIterator.next());
- string outFieldPath(outFieldElement.fieldName());
- string inFieldName(outFieldPath);
- BSONType specType = outFieldElement.type();
- int fieldInclusion = -1;
-
- switch(specType) {
- case NumberDouble: {
- double inclusion = outFieldElement.numberDouble();
- fieldInclusion = static_cast<int>(inclusion);
- goto IncludeExclude;
- }
-
- case NumberLong: {
- long long inclusion = outFieldElement.numberLong();
- fieldInclusion = static_cast<int>(inclusion);
- goto IncludeExclude;
- }
-
- case NumberInt:
- /* just a plain integer include/exclude specification */
- fieldInclusion = outFieldElement.numberInt();
-
-IncludeExclude:
- uassert(15970, str::stream() <<
- "field inclusion or exclusion specification for \"" <<
- outFieldPath <<
- "\" must be true, 1, false, or zero",
- ((fieldInclusion == 0) || (fieldInclusion == 1)));
-
- if (fieldInclusion == 0)
- pProject->excludePath(outFieldPath);
- else
- pProject->includePath(outFieldPath);
- break;
-
- case Bool:
- /* just a plain boolean include/exclude specification */
- fieldInclusion = (outFieldElement.Bool() ? 1 : 0);
- goto IncludeExclude;
-
- case String:
- /* include a field, with rename */
- fieldInclusion = 1;
- inFieldName = outFieldElement.String();
- pProject->addField(
- outFieldPath,
- ExpressionFieldPath::create(
- Expression::removeFieldPrefix(inFieldName)));
- break;
-
- case Object: {
- intrusive_ptr<Expression> pDocument(
- Expression::parseObject(&outFieldElement, &objectCtx));
-
- /* add The document expression to the projection */
- pProject->addField(outFieldPath, pDocument);
- break;
- }
-
- default:
- uassert(15971, str::stream() <<
- "invalid BSON type (" << specType <<
- ") for " << projectName <<
- " field " << outFieldPath, false);
- }
-
- }
-
- return pProject;
- }
-
- void DocumentSourceProject::DependencyRemover::path(
- const string &path, bool include) {
- if (include)
- pTracker->removeDependency(path);
- }
-
- void DocumentSourceProject::DependencyChecker::path(
- const string &path, bool include) {
- /* if the specified path is included, there's nothing to check */
- if (include)
- return;
-
- /* if the specified path is excluded, see if it is required */
- intrusive_ptr<const DocumentSource> pSource;
- if (pTracker->getDependency(&pSource, path)) {
- uassert(15984, str::stream() <<
- "unable to satisfy dependency on " <<
- FieldPath::getPrefix() <<
- path << " in pipeline step " <<
- pSource->getPipelineStep() <<
- " (" << pSource->getSourceName() << "), because step " <<
- pThis->getPipelineStep() << " ("
- << pThis->getSourceName() << ") excludes it",
- false); // printf() is way easier to read than this crap
- }
- }
-
- void DocumentSourceProject::manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker) {
- /*
- Look at all the products (inclusions and computed fields) of this
- projection. For each one that is a dependency, remove it from the
- list of dependencies, because this product will satisfy that
- dependency.
- */
- DependencyRemover dependencyRemover(pTracker);
- pEO->emitPaths(&dependencyRemover);
-
- /*
- Look at the exclusions of this projection. If any of them are
- dependencies, inform the user (error/usassert) that the dependency
- can't be satisfied.
-
- Note we need to do this after the product examination above because
- it is possible for there to be an exclusion field name that matches
- a new computed product field name. The latter would satisfy the
- dependency.
- */
- DependencyChecker dependencyChecker(pTracker, this);
- pEO->emitPaths(&dependencyChecker);
-
- /*
- Look at the products of this projection. For inclusions, add the
- field names to the list of dependencies. For computed expressions,
- add their dependencies to the list of dependencies.
- */
- pEO->addDependencies(pTracker, this);
- }
-
-}
+/**
+ * Copyright 2011 (c) 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "db/pipeline/document_source.h"
+
+#include "db/jsobj.h"
+#include "db/pipeline/document.h"
+#include "db/pipeline/expression.h"
+#include "db/pipeline/value.h"
+
+namespace mongo {
+
+ const char DocumentSourceProject::projectName[] = "$project";
+
+ DocumentSourceProject::~DocumentSourceProject() {
+ }
+
+ DocumentSourceProject::DocumentSourceProject(
+ const intrusive_ptr<ExpressionContext> &pExpCtx):
+ DocumentSource(pExpCtx),
+ excludeId(false),
+ pEO(ExpressionObject::create()) {
+ }
+
+ const char *DocumentSourceProject::getSourceName() const {
+ return projectName;
+ }
+
+ bool DocumentSourceProject::eof() {
+ return pSource->eof();
+ }
+
+ bool DocumentSourceProject::advance() {
+ DocumentSource::advance(); // check for interrupts
+
+ return pSource->advance();
+ }
+
+ intrusive_ptr<Document> DocumentSourceProject::getCurrent() {
+ intrusive_ptr<Document> pInDocument(pSource->getCurrent());
+
+ /* create the result document */
+ const size_t sizeHint =
+ pEO->getSizeHint(pInDocument) + (excludeId ? 0 : 1);
+ intrusive_ptr<Document> pResultDocument(Document::create(sizeHint));
+
+ if (!excludeId) {
+ intrusive_ptr<const Value> pId(
+ pInDocument->getField(Document::idName));
+
+ /*
+ Previous projections could have removed _id, (or declined to
+ generate it) so it might already not exist. Only attempt to add
+ if we found it.
+ */
+ if (pId.get())
+ pResultDocument->addField(Document::idName, pId);
+ }
+
+ /*
+ Use the ExpressionObject to create the base result.
+
+ If we're excluding fields at the top level, leave out the _id if
+ it is found, because we took care of it above.
+ */
+ pEO->addToDocument(pResultDocument, pInDocument, true);
+
+ return pResultDocument;
+ }
+
+ void DocumentSourceProject::optimize() {
+ intrusive_ptr<Expression> pE(pEO->optimize());
+ pEO = dynamic_pointer_cast<ExpressionObject>(pE);
+ }
+
+ void DocumentSourceProject::sourceToBson(
+ BSONObjBuilder *pBuilder, bool explain) const {
+ BSONObjBuilder insides;
+ if (excludeId)
+ insides.append(Document::idName, false);
+ pEO->documentToBson(&insides, true);
+ pBuilder->append(projectName, insides.done());
+ }
+
+ intrusive_ptr<DocumentSourceProject> DocumentSourceProject::create(
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ intrusive_ptr<DocumentSourceProject> pSource(
+ new DocumentSourceProject(pExpCtx));
+ return pSource;
+ }
+
+ void DocumentSourceProject::addField(
+ const string &fieldName, const intrusive_ptr<Expression> &pExpression) {
+ uassert(15960,
+ "projection fields must be defined by non-empty expressions",
+ pExpression);
+
+ pEO->addField(fieldName, pExpression);
+ }
+
+ void DocumentSourceProject::includePath(const string &fieldPath) {
+ if (Document::idName.compare(fieldPath) == 0) {
+ uassert(15961, str::stream() << projectName <<
+ ": _id cannot be included once it has been excluded",
+ !excludeId);
+
+ return;
+ }
+
+ pEO->includePath(fieldPath);
+ }
+
+ void DocumentSourceProject::excludePath(const string &fieldPath) {
+ if (Document::idName.compare(fieldPath) == 0) {
+ excludeId = true;
+ return;
+ }
+
+ pEO->excludePath(fieldPath);
+ }
+
+ intrusive_ptr<DocumentSource> DocumentSourceProject::createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ /* validate */
+ uassert(15969, str::stream() << projectName <<
+ " specification must be an object",
+ pBsonElement->type() == Object);
+
+ /* chain the projection onto the original source */
+ intrusive_ptr<DocumentSourceProject> pProject(
+ DocumentSourceProject::create(pExpCtx));
+
+ /*
+ Pull out the $project object. This should just be a list of
+ field inclusion or exclusion specifications. Note you can't do
+ both, except for the case of _id.
+ */
+ BSONObj projectObj(pBsonElement->Obj());
+ BSONObjIterator fieldIterator(projectObj);
+ Expression::ObjectCtx objectCtx(
+ Expression::ObjectCtx::DOCUMENT_OK);
+ while(fieldIterator.more()) {
+ BSONElement outFieldElement(fieldIterator.next());
+ string outFieldPath(outFieldElement.fieldName());
+ string inFieldName(outFieldPath);
+ BSONType specType = outFieldElement.type();
+ int fieldInclusion = -1;
+
+ switch(specType) {
+ case NumberDouble: {
+ double inclusion = outFieldElement.numberDouble();
+ fieldInclusion = static_cast<int>(inclusion);
+ goto IncludeExclude;
+ }
+
+ case NumberLong: {
+ long long inclusion = outFieldElement.numberLong();
+ fieldInclusion = static_cast<int>(inclusion);
+ goto IncludeExclude;
+ }
+
+ case NumberInt:
+ /* just a plain integer include/exclude specification */
+ fieldInclusion = outFieldElement.numberInt();
+
+IncludeExclude:
+ uassert(15970, str::stream() <<
+ "field inclusion or exclusion specification for \"" <<
+ outFieldPath <<
+ "\" must be true, 1, false, or zero",
+ ((fieldInclusion == 0) || (fieldInclusion == 1)));
+
+ if (fieldInclusion == 0)
+ pProject->excludePath(outFieldPath);
+ else
+ pProject->includePath(outFieldPath);
+ break;
+
+ case Bool:
+ /* just a plain boolean include/exclude specification */
+ fieldInclusion = (outFieldElement.Bool() ? 1 : 0);
+ goto IncludeExclude;
+
+ case String:
+ /* include a field, with rename */
+ fieldInclusion = 1;
+ inFieldName = outFieldElement.String();
+ pProject->addField(
+ outFieldPath,
+ ExpressionFieldPath::create(
+ Expression::removeFieldPrefix(inFieldName)));
+ break;
+
+ case Object: {
+ intrusive_ptr<Expression> pDocument(
+ Expression::parseObject(&outFieldElement, &objectCtx));
+
+ /* add The document expression to the projection */
+ pProject->addField(outFieldPath, pDocument);
+ break;
+ }
+
+ default:
+ uassert(15971, str::stream() <<
+ "invalid BSON type (" << specType <<
+ ") for " << projectName <<
+ " field " << outFieldPath, false);
+ }
+
+ }
+
+ return pProject;
+ }
+
+ void DocumentSourceProject::DependencyRemover::path(
+ const string &path, bool include) {
+ if (include)
+ pTracker->removeDependency(path);
+ }
+
+ void DocumentSourceProject::DependencyChecker::path(
+ const string &path, bool include) {
+ /* if the specified path is included, there's nothing to check */
+ if (include)
+ return;
+
+ /* if the specified path is excluded, see if it is required */
+ intrusive_ptr<const DocumentSource> pSource;
+ if (pTracker->getDependency(&pSource, path)) {
+ uassert(15984, str::stream() <<
+ "unable to satisfy dependency on " <<
+ FieldPath::getPrefix() <<
+ path << " in pipeline step " <<
+ pSource->getPipelineStep() <<
+ " (" << pSource->getSourceName() << "), because step " <<
+ pThis->getPipelineStep() << " ("
+ << pThis->getSourceName() << ") excludes it",
+ false); // printf() is way easier to read than this crap
+ }
+ }
+
+ void DocumentSourceProject::manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker) {
+ /*
+ Look at all the products (inclusions and computed fields) of this
+ projection. For each one that is a dependency, remove it from the
+ list of dependencies, because this product will satisfy that
+ dependency.
+ */
+ DependencyRemover dependencyRemover(pTracker);
+ pEO->emitPaths(&dependencyRemover);
+
+ /*
+ Look at the exclusions of this projection. If any of them are
+ dependencies, inform the user (error/usassert) that the dependency
+ can't be satisfied.
+
+ Note we need to do this after the product examination above because
+ it is possible for there to be an exclusion field name that matches
+ a new computed product field name. The latter would satisfy the
+ dependency.
+ */
+ DependencyChecker dependencyChecker(pTracker, this);
+ pEO->emitPaths(&dependencyChecker);
+
+ /*
+ Look at the products of this projection. For inclusions, add the
+ field names to the list of dependencies. For computed expressions,
+ add their dependencies to the list of dependencies.
+ */
+ pEO->addDependencies(pTracker, this);
+ }
+
+}
diff --git a/src/mongo/db/pipeline/document_source_sort.cpp b/src/mongo/db/pipeline/document_source_sort.cpp
index ebb39dbc1a2..77b9a15c181 100755
--- a/src/mongo/db/pipeline/document_source_sort.cpp
+++ b/src/mongo/db/pipeline/document_source_sort.cpp
@@ -1,226 +1,226 @@
-/**
-* Copyright (C) 2011 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "pch.h"
-
-#include "db/pipeline/document_source.h"
-
-#include "db/jsobj.h"
-#include "db/pipeline/dependency_tracker.h"
-#include "db/pipeline/doc_mem_monitor.h"
-#include "db/pipeline/document.h"
-#include "db/pipeline/expression.h"
-#include "db/pipeline/expression_context.h"
-#include "db/pipeline/value.h"
-
-
-namespace mongo {
- const char DocumentSourceSort::sortName[] = "$sort";
-
- DocumentSourceSort::~DocumentSourceSort() {
- }
-
- const char *DocumentSourceSort::getSourceName() const {
- return sortName;
- }
-
- bool DocumentSourceSort::eof() {
- if (!populated)
- populate();
-
- return (docIterator == documents.end());
- }
-
- bool DocumentSourceSort::advance() {
- DocumentSource::advance(); // check for interrupts
-
- if (!populated)
- populate();
-
- verify(docIterator != documents.end());
-
- ++docIterator;
- if (docIterator == documents.end()) {
- pCurrent.reset();
- count = 0;
- return false;
- }
- pCurrent = *docIterator;
-
- return true;
- }
-
- intrusive_ptr<Document> DocumentSourceSort::getCurrent() {
- if (!populated)
- populate();
-
- return pCurrent;
- }
-
- void DocumentSourceSort::sourceToBson(
- BSONObjBuilder *pBuilder, bool explain) const {
- BSONObjBuilder insides;
- sortKeyToBson(&insides, false);
- pBuilder->append(sortName, insides.done());
- }
-
- intrusive_ptr<DocumentSourceSort> DocumentSourceSort::create(
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- intrusive_ptr<DocumentSourceSort> pSource(
- new DocumentSourceSort(pExpCtx));
- return pSource;
- }
-
- DocumentSourceSort::DocumentSourceSort(
- const intrusive_ptr<ExpressionContext> &pExpCtx):
- DocumentSource(pExpCtx),
- populated(false) {
- }
-
- void DocumentSourceSort::addKey(const string &fieldPath, bool ascending) {
- intrusive_ptr<ExpressionFieldPath> pE(
- ExpressionFieldPath::create(fieldPath));
- vSortKey.push_back(pE);
- vAscending.push_back(ascending);
- }
-
- void DocumentSourceSort::sortKeyToBson(
- BSONObjBuilder *pBuilder, bool usePrefix) const {
- /* add the key fields */
- const size_t n = vSortKey.size();
- for(size_t i = 0; i < n; ++i) {
- /* create the "field name" */
- stringstream ss;
- vSortKey[i]->writeFieldPath(ss, usePrefix);
-
- /* append a named integer based on the sort order */
- pBuilder->append(ss.str(), (vAscending[i] ? 1 : -1));
- }
- }
-
- intrusive_ptr<DocumentSource> DocumentSourceSort::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- uassert(15973, str::stream() << " the " <<
- sortName << " key specification must be an object",
- pBsonElement->type() == Object);
-
- intrusive_ptr<DocumentSourceSort> pSort(
- DocumentSourceSort::create(pExpCtx));
-
- /* check for then iterate over the sort object */
- size_t sortKeys = 0;
- for(BSONObjIterator keyIterator(pBsonElement->Obj().begin());
- keyIterator.more();) {
- BSONElement keyField(keyIterator.next());
- const char *pKeyFieldName = keyField.fieldName();
- int sortOrder = 0;
-
- uassert(15974, str::stream() << sortName <<
- " key ordering must be specified using a number",
- keyField.isNumber());
- sortOrder = (int)keyField.numberInt();
-
- uassert(15975, str::stream() << sortName <<
- " key ordering must be 1 (for ascending) or -1 (for descending",
- ((sortOrder == 1) || (sortOrder == -1)));
-
- pSort->addKey(pKeyFieldName, (sortOrder > 0));
- ++sortKeys;
- }
-
- uassert(15976, str::stream() << sortName <<
- " must have at least one sort key", (sortKeys > 0));
-
- return pSort;
- }
-
- void DocumentSourceSort::populate() {
- /* make sure we've got a sort key */
- verify(vSortKey.size());
-
- /* track and warn about how much physical memory has been used */
- DocMemMonitor dmm(this);
-
- /* pull everything from the underlying source */
- for(bool hasNext = !pSource->eof(); hasNext;
- hasNext = pSource->advance()) {
- intrusive_ptr<Document> pDocument(pSource->getCurrent());
- documents.push_back(pDocument);
-
- dmm.addToTotal(pDocument->getApproximateSize());
- }
-
- /* sort the list */
- Comparator comparator(this);
- sort(documents.begin(), documents.end(), comparator);
-
- /* start the sort iterator */
- docIterator = documents.begin();
-
- if (docIterator != documents.end())
- pCurrent = *docIterator;
- populated = true;
- }
-
- int DocumentSourceSort::compare(
- const intrusive_ptr<Document> &pL, const intrusive_ptr<Document> &pR) {
-
- /*
- populate() already checked that there is a non-empty sort key,
- so we shouldn't have to worry about that here.
-
- However, the tricky part is what to do is none of the sort keys are
- present. In this case, consider the document less.
- */
- const size_t n = vSortKey.size();
- for(size_t i = 0; i < n; ++i) {
- /* evaluate the sort keys */
- ExpressionFieldPath *pE = vSortKey[i].get();
- intrusive_ptr<const Value> pLeft(pE->evaluate(pL));
- intrusive_ptr<const Value> pRight(pE->evaluate(pR));
-
- /*
- Compare the two values; if they differ, return. If they are
- the same, move on to the next key.
- */
- int cmp = Value::compare(pLeft, pRight);
- if (cmp) {
- /* if necessary, adjust the return value by the key ordering */
- if (!vAscending[i])
- cmp = -cmp;
-
- return cmp;
- }
- }
-
- /*
- If we got here, everything matched (or didn't exist), so we'll
- consider the documents equal for purposes of this sort.
- */
- return 0;
- }
-
- void DocumentSourceSort::manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker) {
- /* get the dependencies out of the matcher */
- for(SortPaths::iterator i(vSortKey.begin()); i != vSortKey.end(); ++i) {
- string fieldPath((*i)->getFieldPath(false));
- pTracker->addDependency(fieldPath, this);
- }
- }
-
-}
+/**
+* Copyright (C) 2011 10gen Inc.
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU Affero General Public License, version 3,
+* as published by the Free Software Foundation.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU Affero General Public License for more details.
+*
+* You should have received a copy of the GNU Affero General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#include "pch.h"
+
+#include "db/pipeline/document_source.h"
+
+#include "db/jsobj.h"
+#include "db/pipeline/dependency_tracker.h"
+#include "db/pipeline/doc_mem_monitor.h"
+#include "db/pipeline/document.h"
+#include "db/pipeline/expression.h"
+#include "db/pipeline/expression_context.h"
+#include "db/pipeline/value.h"
+
+
+namespace mongo {
+ const char DocumentSourceSort::sortName[] = "$sort";
+
+ DocumentSourceSort::~DocumentSourceSort() {
+ }
+
+ const char *DocumentSourceSort::getSourceName() const {
+ return sortName;
+ }
+
+ bool DocumentSourceSort::eof() {
+ if (!populated)
+ populate();
+
+ return (docIterator == documents.end());
+ }
+
+ bool DocumentSourceSort::advance() {
+ DocumentSource::advance(); // check for interrupts
+
+ if (!populated)
+ populate();
+
+ verify(docIterator != documents.end());
+
+ ++docIterator;
+ if (docIterator == documents.end()) {
+ pCurrent.reset();
+ count = 0;
+ return false;
+ }
+ pCurrent = *docIterator;
+
+ return true;
+ }
+
+ intrusive_ptr<Document> DocumentSourceSort::getCurrent() {
+ if (!populated)
+ populate();
+
+ return pCurrent;
+ }
+
+ void DocumentSourceSort::sourceToBson(
+ BSONObjBuilder *pBuilder, bool explain) const {
+ BSONObjBuilder insides;
+ sortKeyToBson(&insides, false);
+ pBuilder->append(sortName, insides.done());
+ }
+
+ intrusive_ptr<DocumentSourceSort> DocumentSourceSort::create(
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ intrusive_ptr<DocumentSourceSort> pSource(
+ new DocumentSourceSort(pExpCtx));
+ return pSource;
+ }
+
+ DocumentSourceSort::DocumentSourceSort(
+ const intrusive_ptr<ExpressionContext> &pExpCtx):
+ DocumentSource(pExpCtx),
+ populated(false) {
+ }
+
+ void DocumentSourceSort::addKey(const string &fieldPath, bool ascending) {
+ intrusive_ptr<ExpressionFieldPath> pE(
+ ExpressionFieldPath::create(fieldPath));
+ vSortKey.push_back(pE);
+ vAscending.push_back(ascending);
+ }
+
+ void DocumentSourceSort::sortKeyToBson(
+ BSONObjBuilder *pBuilder, bool usePrefix) const {
+ /* add the key fields */
+ const size_t n = vSortKey.size();
+ for(size_t i = 0; i < n; ++i) {
+ /* create the "field name" */
+ stringstream ss;
+ vSortKey[i]->writeFieldPath(ss, usePrefix);
+
+ /* append a named integer based on the sort order */
+ pBuilder->append(ss.str(), (vAscending[i] ? 1 : -1));
+ }
+ }
+
+ intrusive_ptr<DocumentSource> DocumentSourceSort::createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ uassert(15973, str::stream() << " the " <<
+ sortName << " key specification must be an object",
+ pBsonElement->type() == Object);
+
+ intrusive_ptr<DocumentSourceSort> pSort(
+ DocumentSourceSort::create(pExpCtx));
+
+ /* check for then iterate over the sort object */
+ size_t sortKeys = 0;
+ for(BSONObjIterator keyIterator(pBsonElement->Obj().begin());
+ keyIterator.more();) {
+ BSONElement keyField(keyIterator.next());
+ const char *pKeyFieldName = keyField.fieldName();
+ int sortOrder = 0;
+
+ uassert(15974, str::stream() << sortName <<
+ " key ordering must be specified using a number",
+ keyField.isNumber());
+ sortOrder = (int)keyField.numberInt();
+
+ uassert(15975, str::stream() << sortName <<
+ " key ordering must be 1 (for ascending) or -1 (for descending",
+ ((sortOrder == 1) || (sortOrder == -1)));
+
+ pSort->addKey(pKeyFieldName, (sortOrder > 0));
+ ++sortKeys;
+ }
+
+ uassert(15976, str::stream() << sortName <<
+ " must have at least one sort key", (sortKeys > 0));
+
+ return pSort;
+ }
+
+ void DocumentSourceSort::populate() {
+ /* make sure we've got a sort key */
+ verify(vSortKey.size());
+
+ /* track and warn about how much physical memory has been used */
+ DocMemMonitor dmm(this);
+
+ /* pull everything from the underlying source */
+ for(bool hasNext = !pSource->eof(); hasNext;
+ hasNext = pSource->advance()) {
+ intrusive_ptr<Document> pDocument(pSource->getCurrent());
+ documents.push_back(pDocument);
+
+ dmm.addToTotal(pDocument->getApproximateSize());
+ }
+
+ /* sort the list */
+ Comparator comparator(this);
+ sort(documents.begin(), documents.end(), comparator);
+
+ /* start the sort iterator */
+ docIterator = documents.begin();
+
+ if (docIterator != documents.end())
+ pCurrent = *docIterator;
+ populated = true;
+ }
+
+ int DocumentSourceSort::compare(
+ const intrusive_ptr<Document> &pL, const intrusive_ptr<Document> &pR) {
+
+ /*
+ populate() already checked that there is a non-empty sort key,
+ so we shouldn't have to worry about that here.
+
+ However, the tricky part is what to do is none of the sort keys are
+ present. In this case, consider the document less.
+ */
+ const size_t n = vSortKey.size();
+ for(size_t i = 0; i < n; ++i) {
+ /* evaluate the sort keys */
+ ExpressionFieldPath *pE = vSortKey[i].get();
+ intrusive_ptr<const Value> pLeft(pE->evaluate(pL));
+ intrusive_ptr<const Value> pRight(pE->evaluate(pR));
+
+ /*
+ Compare the two values; if they differ, return. If they are
+ the same, move on to the next key.
+ */
+ int cmp = Value::compare(pLeft, pRight);
+ if (cmp) {
+ /* if necessary, adjust the return value by the key ordering */
+ if (!vAscending[i])
+ cmp = -cmp;
+
+ return cmp;
+ }
+ }
+
+ /*
+ If we got here, everything matched (or didn't exist), so we'll
+ consider the documents equal for purposes of this sort.
+ */
+ return 0;
+ }
+
+ void DocumentSourceSort::manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker) {
+ /* get the dependencies out of the matcher */
+ for(SortPaths::iterator i(vSortKey.begin()); i != vSortKey.end(); ++i) {
+ string fieldPath((*i)->getFieldPath(false));
+ pTracker->addDependency(fieldPath, this);
+ }
+ }
+
+}
diff --git a/src/mongo/db/pipeline/document_source_unwind.cpp b/src/mongo/db/pipeline/document_source_unwind.cpp
index 58999bb6942..ec5d2236800 100755
--- a/src/mongo/db/pipeline/document_source_unwind.cpp
+++ b/src/mongo/db/pipeline/document_source_unwind.cpp
@@ -1,249 +1,249 @@
-/**
- * Copyright 2011 (c) 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "db/pipeline/document_source.h"
-
-#include "db/jsobj.h"
-#include "db/pipeline/document.h"
-#include "db/pipeline/expression.h"
-#include "db/pipeline/value.h"
-
-namespace mongo {
-
- const char DocumentSourceUnwind::unwindName[] = "$unwind";
-
- DocumentSourceUnwind::~DocumentSourceUnwind() {
- }
-
- DocumentSourceUnwind::DocumentSourceUnwind(
- const intrusive_ptr<ExpressionContext> &pExpCtx):
- DocumentSource(pExpCtx),
- unwindPath(),
- pNoUnwindDocument(),
- pUnwindArray(),
- pUnwinder(),
- pUnwindValue() {
- }
-
- const char *DocumentSourceUnwind::getSourceName() const {
- return unwindName;
- }
-
- bool DocumentSourceUnwind::eof() {
- /*
- If we're unwinding an array, and there are more elements, then we
- can return more documents.
- */
- if (pUnwinder.get() && pUnwinder->more())
- return false;
-
- return pSource->eof();
- }
-
- bool DocumentSourceUnwind::advance() {
- DocumentSource::advance(); // check for interrupts
-
- if (pUnwinder.get() && pUnwinder->more()) {
- pUnwindValue = pUnwinder->next();
- return true;
- }
-
- /* release the last document and advance */
- resetArray();
- return pSource->advance();
- }
-
- intrusive_ptr<Document> DocumentSourceUnwind::getCurrent() {
- if (!pNoUnwindDocument.get()) {
- intrusive_ptr<Document> pInDocument(pSource->getCurrent());
-
- /* create the result document */
- pNoUnwindDocument = pInDocument;
- fieldIndex.clear();
-
- /*
- First we'll look to see if the path is there. If it isn't,
- we'll pass this document through. If it is, we record the
- indexes of the fields down the field path so that we can
- quickly replace them as we clone the documents along the
- field path.
-
- We have to clone all the documents along the field path so
- that we don't share the end value across documents that have
- come out of this pipeline operator.
- */
- intrusive_ptr<Document> pCurrent(pInDocument);
- const size_t pathLength = unwindPath.getPathLength();
- for(size_t i = 0; i < pathLength; ++i) {
- size_t idx = pCurrent->getFieldIndex(
- unwindPath.getFieldName(i));
- if (idx == pCurrent->getFieldCount() ) {
- /* this document doesn't contain the target field */
- resetArray();
- return pInDocument;
- break;
- }
-
- fieldIndex.push_back(idx);
- Document::FieldPair fp(pCurrent->getField(idx));
- intrusive_ptr<const Value> pPathValue(fp.second);
- if (i < pathLength - 1) {
- if (pPathValue->getType() != Object) {
- /* can't walk down the field path */
- resetArray();
- uassert(15977, str::stream() << unwindName <<
- ": cannot traverse field path past scalar value for \"" <<
- fp.first << "\"", false);
- break;
- }
-
- /* move down the object tree */
- pCurrent = pPathValue->getDocument();
- }
- else /* (i == pathLength - 1) */ {
- if (pPathValue->getType() != Array) {
- /* last item on path must be an array to unwind */
- resetArray();
- uassert(15978, str::stream() << unwindName <<
- ": value at end of field path must be an array",
- false);
- break;
- }
-
- /* keep track of the array we're unwinding */
- pUnwindArray = pPathValue;
- if (pUnwindArray->getArrayLength() == 0) {
- /*
- The $unwind of an empty array is a NULL value. If we
- encounter this, use the non-unwind path, but replace
- pOutField with a null.
-
- Make sure unwind value is clear so the array is
- removed.
- */
- pUnwindValue.reset();
- intrusive_ptr<Document> pClone(clonePath());
- resetArray();
- return pClone;
- }
-
- /* get the iterator we'll use to unwind the array */
- pUnwinder = pUnwindArray->getArray();
- verify(pUnwinder->more()); // we just checked above...
- pUnwindValue = pUnwinder->next();
- }
- }
- }
-
- /*
- If we're unwinding a field, create an alternate document. In the
- alternate (clone), replace the unwound array field with the element
- at the appropriate index.
- */
- if (pUnwindArray.get()) {
- /* clone the document with an array we're unwinding */
- intrusive_ptr<Document> pUnwindDocument(clonePath());
-
- return pUnwindDocument;
- }
-
- return pNoUnwindDocument;
- }
-
- intrusive_ptr<Document> DocumentSourceUnwind::clonePath() const {
- /*
- For this to be valid, we must already have pNoUnwindDocument set,
- and have set up the vector of indices for that document in fieldIndex.
- */
- verify(pNoUnwindDocument.get());
-
- intrusive_ptr<Document> pClone(pNoUnwindDocument->clone());
- intrusive_ptr<Document> pCurrent(pClone);
- const size_t n = fieldIndex.size();
- verify(n);
- for(size_t i = 0; i < n; ++i) {
- const size_t fi = fieldIndex[i];
- Document::FieldPair fp(pCurrent->getField(fi));
- if (i + 1 < n) {
- /*
- For every object in the path but the last, clone it and
- continue on down.
- */
- intrusive_ptr<Document> pNext(
- fp.second->getDocument()->clone());
- pCurrent->setField(fi, fp.first, Value::createDocument(pNext));
- pCurrent = pNext;
- }
- else {
- /* for the last, subsitute the next unwound value */
- pCurrent->setField(fi, fp.first, pUnwindValue);
- }
- }
-
- return pClone;
- }
-
- void DocumentSourceUnwind::sourceToBson(
- BSONObjBuilder *pBuilder, bool explain) const {
- pBuilder->append(unwindName, unwindPath.getPath(true));
- }
-
- intrusive_ptr<DocumentSourceUnwind> DocumentSourceUnwind::create(
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- intrusive_ptr<DocumentSourceUnwind> pSource(
- new DocumentSourceUnwind(pExpCtx));
- return pSource;
- }
-
- void DocumentSourceUnwind::unwindField(const FieldPath &rFieldPath) {
- /* can't set more than one unwind field */
- uassert(15979, str::stream() << unwindName <<
- "can't unwind more than one path at once",
- !unwindPath.getPathLength());
-
- uassert(15980, "the path of the field to unwind cannot be empty",
- false);
-
- /* record the field path */
- unwindPath = rFieldPath;
- }
-
- intrusive_ptr<DocumentSource> DocumentSourceUnwind::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pExpCtx) {
- /*
- The value of $unwind should just be a field path.
- */
- uassert(15981, str::stream() << "the " << unwindName <<
- " field path must be specified as a string",
- pBsonElement->type() == String);
-
- string prefixedPathString(pBsonElement->String());
- string pathString(Expression::removeFieldPrefix(prefixedPathString));
- intrusive_ptr<DocumentSourceUnwind> pUnwind(
- DocumentSourceUnwind::create(pExpCtx));
- pUnwind->unwindPath = FieldPath(pathString);
-
- return pUnwind;
- }
-
- void DocumentSourceUnwind::manageDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker) {
- pTracker->addDependency(unwindPath.getPath(false), this);
- }
-
-}
+/**
+ * Copyright 2011 (c) 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "db/pipeline/document_source.h"
+
+#include "db/jsobj.h"
+#include "db/pipeline/document.h"
+#include "db/pipeline/expression.h"
+#include "db/pipeline/value.h"
+
+namespace mongo {
+
+ const char DocumentSourceUnwind::unwindName[] = "$unwind";
+
+ DocumentSourceUnwind::~DocumentSourceUnwind() {
+ }
+
+ DocumentSourceUnwind::DocumentSourceUnwind(
+ const intrusive_ptr<ExpressionContext> &pExpCtx):
+ DocumentSource(pExpCtx),
+ unwindPath(),
+ pNoUnwindDocument(),
+ pUnwindArray(),
+ pUnwinder(),
+ pUnwindValue() {
+ }
+
+ const char *DocumentSourceUnwind::getSourceName() const {
+ return unwindName;
+ }
+
+ bool DocumentSourceUnwind::eof() {
+ /*
+ If we're unwinding an array, and there are more elements, then we
+ can return more documents.
+ */
+ if (pUnwinder.get() && pUnwinder->more())
+ return false;
+
+ return pSource->eof();
+ }
+
+ bool DocumentSourceUnwind::advance() {
+ DocumentSource::advance(); // check for interrupts
+
+ if (pUnwinder.get() && pUnwinder->more()) {
+ pUnwindValue = pUnwinder->next();
+ return true;
+ }
+
+ /* release the last document and advance */
+ resetArray();
+ return pSource->advance();
+ }
+
+ intrusive_ptr<Document> DocumentSourceUnwind::getCurrent() {
+ if (!pNoUnwindDocument.get()) {
+ intrusive_ptr<Document> pInDocument(pSource->getCurrent());
+
+ /* create the result document */
+ pNoUnwindDocument = pInDocument;
+ fieldIndex.clear();
+
+ /*
+ First we'll look to see if the path is there. If it isn't,
+ we'll pass this document through. If it is, we record the
+ indexes of the fields down the field path so that we can
+ quickly replace them as we clone the documents along the
+ field path.
+
+ We have to clone all the documents along the field path so
+ that we don't share the end value across documents that have
+ come out of this pipeline operator.
+ */
+ intrusive_ptr<Document> pCurrent(pInDocument);
+ const size_t pathLength = unwindPath.getPathLength();
+ for(size_t i = 0; i < pathLength; ++i) {
+ size_t idx = pCurrent->getFieldIndex(
+ unwindPath.getFieldName(i));
+ if (idx == pCurrent->getFieldCount() ) {
+ /* this document doesn't contain the target field */
+ resetArray();
+ return pInDocument;
+ break;
+ }
+
+ fieldIndex.push_back(idx);
+ Document::FieldPair fp(pCurrent->getField(idx));
+ intrusive_ptr<const Value> pPathValue(fp.second);
+ if (i < pathLength - 1) {
+ if (pPathValue->getType() != Object) {
+ /* can't walk down the field path */
+ resetArray();
+ uassert(15977, str::stream() << unwindName <<
+ ": cannot traverse field path past scalar value for \"" <<
+ fp.first << "\"", false);
+ break;
+ }
+
+ /* move down the object tree */
+ pCurrent = pPathValue->getDocument();
+ }
+ else /* (i == pathLength - 1) */ {
+ if (pPathValue->getType() != Array) {
+ /* last item on path must be an array to unwind */
+ resetArray();
+ uassert(15978, str::stream() << unwindName <<
+ ": value at end of field path must be an array",
+ false);
+ break;
+ }
+
+ /* keep track of the array we're unwinding */
+ pUnwindArray = pPathValue;
+ if (pUnwindArray->getArrayLength() == 0) {
+ /*
+ The $unwind of an empty array is a NULL value. If we
+ encounter this, use the non-unwind path, but replace
+ pOutField with a null.
+
+ Make sure unwind value is clear so the array is
+ removed.
+ */
+ pUnwindValue.reset();
+ intrusive_ptr<Document> pClone(clonePath());
+ resetArray();
+ return pClone;
+ }
+
+ /* get the iterator we'll use to unwind the array */
+ pUnwinder = pUnwindArray->getArray();
+ verify(pUnwinder->more()); // we just checked above...
+ pUnwindValue = pUnwinder->next();
+ }
+ }
+ }
+
+ /*
+ If we're unwinding a field, create an alternate document. In the
+ alternate (clone), replace the unwound array field with the element
+ at the appropriate index.
+ */
+ if (pUnwindArray.get()) {
+ /* clone the document with an array we're unwinding */
+ intrusive_ptr<Document> pUnwindDocument(clonePath());
+
+ return pUnwindDocument;
+ }
+
+ return pNoUnwindDocument;
+ }
+
+ intrusive_ptr<Document> DocumentSourceUnwind::clonePath() const {
+ /*
+ For this to be valid, we must already have pNoUnwindDocument set,
+ and have set up the vector of indices for that document in fieldIndex.
+ */
+ verify(pNoUnwindDocument.get());
+
+ intrusive_ptr<Document> pClone(pNoUnwindDocument->clone());
+ intrusive_ptr<Document> pCurrent(pClone);
+ const size_t n = fieldIndex.size();
+ verify(n);
+ for(size_t i = 0; i < n; ++i) {
+ const size_t fi = fieldIndex[i];
+ Document::FieldPair fp(pCurrent->getField(fi));
+ if (i + 1 < n) {
+ /*
+ For every object in the path but the last, clone it and
+ continue on down.
+ */
+ intrusive_ptr<Document> pNext(
+ fp.second->getDocument()->clone());
+ pCurrent->setField(fi, fp.first, Value::createDocument(pNext));
+ pCurrent = pNext;
+ }
+ else {
+ /* for the last, subsitute the next unwound value */
+ pCurrent->setField(fi, fp.first, pUnwindValue);
+ }
+ }
+
+ return pClone;
+ }
+
+ void DocumentSourceUnwind::sourceToBson(
+ BSONObjBuilder *pBuilder, bool explain) const {
+ pBuilder->append(unwindName, unwindPath.getPath(true));
+ }
+
+ intrusive_ptr<DocumentSourceUnwind> DocumentSourceUnwind::create(
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ intrusive_ptr<DocumentSourceUnwind> pSource(
+ new DocumentSourceUnwind(pExpCtx));
+ return pSource;
+ }
+
+ void DocumentSourceUnwind::unwindField(const FieldPath &rFieldPath) {
+ /* can't set more than one unwind field */
+ uassert(15979, str::stream() << unwindName <<
+ "can't unwind more than one path at once",
+ !unwindPath.getPathLength());
+
+ uassert(15980, "the path of the field to unwind cannot be empty",
+ false);
+
+ /* record the field path */
+ unwindPath = rFieldPath;
+ }
+
+ intrusive_ptr<DocumentSource> DocumentSourceUnwind::createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pExpCtx) {
+ /*
+ The value of $unwind should just be a field path.
+ */
+ uassert(15981, str::stream() << "the " << unwindName <<
+ " field path must be specified as a string",
+ pBsonElement->type() == String);
+
+ string prefixedPathString(pBsonElement->String());
+ string pathString(Expression::removeFieldPrefix(prefixedPathString));
+ intrusive_ptr<DocumentSourceUnwind> pUnwind(
+ DocumentSourceUnwind::create(pExpCtx));
+ pUnwind->unwindPath = FieldPath(pathString);
+
+ return pUnwind;
+ }
+
+ void DocumentSourceUnwind::manageDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker) {
+ pTracker->addDependency(unwindPath.getPath(false), this);
+ }
+
+}
diff --git a/src/mongo/db/pipeline/expression.h b/src/mongo/db/pipeline/expression.h
index e2d2abbd672..eda7cdb7805 100755
--- a/src/mongo/db/pipeline/expression.h
+++ b/src/mongo/db/pipeline/expression.h
@@ -1,830 +1,830 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include "pch.h"
-
-#include "db/pipeline/field_path.h"
-#include "util/intrusive_counter.h"
-#include "util/iterator.h"
-
-
-namespace mongo {
-
- class BSONArrayBuilder;
- class BSONElement;
- class BSONObjBuilder;
- class Builder;
- class DependencyTracker;
- class Document;
- class DocumentSource;
- class ExpressionContext;
- class Value;
-
-
- class Expression :
- public IntrusiveCounterUnsigned {
- public:
- virtual ~Expression() {};
-
- /*
- Optimize the Expression.
-
- This provides an opportunity to do constant folding, or to
- collapse nested operators that have the same precedence, such as
- $add, $and, or $or.
-
- The Expression should be replaced with the return value, which may
- or may not be the same object. In the case of constant folding,
- a computed expression may be replaced by a constant.
-
- @returns the optimized Expression
- */
- virtual intrusive_ptr<Expression> optimize() = 0;
-
- /**
- Add this expression's field dependencies to the dependency tracker.
-
- Expressions are trees, so this is often recursive.
-
- @params pTracker the tracker to add the dependencies to
- */
- virtual void addDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker,
- const DocumentSource *pSource) const = 0;
-
- /*
- Evaluate the Expression using the given document as input.
-
- @returns the computed value
- */
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const = 0;
-
- /*
- Add the Expression (and any descendant Expressions) into a BSON
- object that is under construction.
-
- Unevaluated Expressions always materialize as objects. Evaluation
- may produce a scalar or another object, either of which will be
- substituted inline.
-
- @param pBuilder the builder to add the expression to
- @param fieldName the name the object should be given
- @param requireExpression specify true if the value must appear
- as an expression; this is used by DocumentSources like
- $project which distinguish between field inclusion and virtual
- field specification; See ExpressionConstant.
- */
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName,
- bool requireExpression) const = 0;
-
- /*
- Add the Expression (and any descendant Expressions) into a BSON
- array that is under construction.
-
- Unevaluated Expressions always materialize as objects. Evaluation
- may produce a scalar or another object, either of which will be
- substituted inline.
-
- @param pBuilder the builder to add the expression to
- */
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const = 0;
-
- /*
- Convert the expression into a BSONObj that corresponds to the
- db.collection.find() predicate language. This is intended for
- use by DocumentSourceFilter.
-
- This is more limited than the full expression language supported
- by all available expressions in a DocumentSource processing
- pipeline, and will fail with an assertion if an attempt is made
- to go outside the bounds of the recognized patterns, which don't
- include full computed expressions. There are other methods available
- on DocumentSourceFilter which can be used to analyze a filter
- predicate and break it up into appropriate expressions which can
- be translated within these constraints. As a result, the default
- implementation is to fail with an assertion; only a subset of
- operators will be able to fulfill this request.
-
- @param pBuilder the builder to add the expression to.
- */
- virtual void toMatcherBson(BSONObjBuilder *pBuilder) const;
-
- /*
- Utility class for parseObject() below.
-
- Only one array can be unwound in a processing pipeline. If the
- UNWIND_OK option is used, unwindOk() will return true, and a field
- can be declared as unwound using unwind(), after which unwindUsed()
- will return true. Only specify UNWIND_OK if it is OK to unwind an
- array in the current context.
-
- DOCUMENT_OK indicates that it is OK to use a Document in the current
- context.
- */
- class ObjectCtx {
- public:
- ObjectCtx(int options);
- static const int UNWIND_OK = 0x0001;
- static const int DOCUMENT_OK = 0x0002;
-
- bool unwindOk() const;
- bool unwindUsed() const;
- void unwind(string fieldName);
-
- bool documentOk() const;
-
- private:
- int options;
- string unwindField;
- };
-
- /*
- Parse a BSONElement Object. The object could represent a functional
- expression or a Document expression.
-
- @param pBsonElement the element representing the object
- @param pCtx a MiniCtx representing the options above
- @returns the parsed Expression
- */
- static intrusive_ptr<Expression> parseObject(
- BSONElement *pBsonElement, ObjectCtx *pCtx);
-
- static const char unwindName[];
-
- /*
- Parse a BSONElement Object which has already been determined to be
- functional expression.
-
- @param pOpName the name of the (prefix) operator
- @param pBsonElement the BSONElement to parse
- @returns the parsed Expression
- */
- static intrusive_ptr<Expression> parseExpression(
- const char *pOpName, BSONElement *pBsonElement);
-
-
- /*
- Parse a BSONElement which is an operand in an Expression.
-
- @param pBsonElement the expected operand's BSONElement
- @returns the parsed operand, as an Expression
- */
- static intrusive_ptr<Expression> parseOperand(
- BSONElement *pBsonElement);
-
- /*
- Produce a field path string with the field prefix removed.
-
- Throws an error if the field prefix is not present.
-
- @param prefixedField the prefixed field
- @returns the field path with the prefix removed
- */
- static string removeFieldPrefix(const string &prefixedField);
-
- /*
- Enumeration of comparison operators. These are shared between a
- few expression implementations, so they are factored out here.
-
- Any changes to these values require adjustment of the lookup
- table in the implementation.
- */
- enum CmpOp {
- EQ = 0, // return true for a == b, false otherwise
- NE = 1, // return true for a != b, false otherwise
- GT = 2, // return true for a > b, false otherwise
- GTE = 3, // return true for a >= b, false otherwise
- LT = 4, // return true for a < b, false otherwise
- LTE = 5, // return true for a <= b, false otherwise
- CMP = 6, // return -1, 0, 1 for a < b, a == b, a > b
- };
-
- static int signum(int i);
-
- protected:
- typedef vector<intrusive_ptr<Expression> > ExpressionVector;
-
- };
-
-
- class ExpressionNary :
- public Expression {
- public:
- // virtuals from Expression
- virtual intrusive_ptr<Expression> optimize();
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName,
- bool requireExpression) const;
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
- virtual void addDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker,
- const DocumentSource *pSource) const;
-
- /*
- Add an operand to the n-ary expression.
-
- @param pExpression the expression to add
- */
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- /*
- Return a factory function that will make Expression nodes of
- the same type as this. This will be used to create constant
- expressions for constant folding for optimize(). Only return
- a factory function if this operator is both associative and
- commutative. The default implementation returns NULL; optimize()
- will recognize that and stop.
-
- Note that ExpressionNary::optimize() promises that if it uses this
- to fold constants, then if optimize() returns an ExpressionNary,
- any remaining constant will be the last one in vpOperand. Derived
- classes may take advantage of this to do further optimizations in
- their optimize().
-
- @returns pointer to a factory function or NULL
- */
- virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
-
- /*
- Get the name of the operator.
-
- @returns the name of the operator; this string belongs to the class
- implementation, and should not be deleted
- and should not
- */
- virtual const char *getOpName() const = 0;
-
- protected:
- ExpressionNary();
-
- ExpressionVector vpOperand;
-
- /*
- Add the expression to the builder.
-
- If there is only one operand (a unary operator), then the operand
- is added directly, without an array. For more than one operand,
- a named array is created. In both cases, the result is an object.
-
- @param pBuilder the (blank) builder to add the expression to
- @param pOpName the name of the operator
- */
- virtual void toBson(BSONObjBuilder *pBuilder,
- const char *pOpName) const;
-
- /*
- Checks the current size of vpOperand; if the size equal to or
- greater than maxArgs, fires a user assertion indicating that this
- operator cannot have this many arguments.
-
- The equal is there because this is intended to be used in
- addOperand() to check for the limit *before* adding the requested
- argument.
-
- @param maxArgs the maximum number of arguments the operator accepts
- */
- void checkArgLimit(unsigned maxArgs) const;
-
- /*
- Checks the current size of vpOperand; if the size is not equal to
- reqArgs, fires a user assertion indicating that this must have
- exactly reqArgs arguments.
-
- This is meant to be used in evaluate(), *before* the evaluation
- takes place.
-
- @param reqArgs the number of arguments this operator requires
- */
- void checkArgCount(unsigned reqArgs) const;
- };
-
-
- class ExpressionAdd :
- public ExpressionNary {
- public:
- // virtuals from Expression
- virtual ~ExpressionAdd();
- virtual intrusive_ptr<Expression> optimize();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
-
- // virtuals from ExpressionNary
- virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
-
- /*
- Create an expression that finds the sum of n operands.
-
- @returns addition expression
- */
- static intrusive_ptr<ExpressionNary> create();
-
- protected:
- // virtuals from ExpressionNary
- virtual void toBson(BSONObjBuilder *pBuilder,
- const char *pOpName) const;
-
- private:
- ExpressionAdd();
-
- /*
- If the operator can be optimized, we save the original here.
-
- This is necessary because addition must follow its original operand
- ordering strictly if a string is detected, otherwise string
- concatenation may appear to have re-ordered the operands.
- */
- intrusive_ptr<ExpressionAdd> pAdd;
- mutable bool useOriginal;
- };
-
-
- class ExpressionAnd :
- public ExpressionNary {
- public:
- // virtuals from Expression
- virtual ~ExpressionAnd();
- virtual intrusive_ptr<Expression> optimize();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void toMatcherBson(BSONObjBuilder *pBuilder) const;
-
- // virtuals from ExpressionNary
- virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
-
- /*
- Create an expression that finds the conjunction of n operands.
- The conjunction uses short-circuit logic; the expressions are
- evaluated in the order they were added to the conjunction, and
- the evaluation stops and returns false on the first operand that
- evaluates to false.
-
- @returns conjunction expression
- */
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionAnd();
- };
-
-
- class ExpressionCoerceToBool :
- public Expression {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionCoerceToBool();
- virtual intrusive_ptr<Expression> optimize();
- virtual void addDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker,
- const DocumentSource *pSource) const;
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName,
- bool requireExpression) const;
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
-
- static intrusive_ptr<ExpressionCoerceToBool> create(
- const intrusive_ptr<Expression> &pExpression);
-
- private:
- ExpressionCoerceToBool(const intrusive_ptr<Expression> &pExpression);
-
- intrusive_ptr<Expression> pExpression;
- };
-
-
- class ExpressionCompare :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionCompare();
- virtual intrusive_ptr<Expression> optimize();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- /*
- Shorthands for creating various comparisons expressions.
- Provide for conformance with the uniform function pointer signature
- required for parsing.
-
- These create a particular comparision operand, without any
- operands. Those must be added via ExpressionNary::addOperand().
- */
- static intrusive_ptr<ExpressionNary> createCmp();
- static intrusive_ptr<ExpressionNary> createEq();
- static intrusive_ptr<ExpressionNary> createNe();
- static intrusive_ptr<ExpressionNary> createGt();
- static intrusive_ptr<ExpressionNary> createGte();
- static intrusive_ptr<ExpressionNary> createLt();
- static intrusive_ptr<ExpressionNary> createLte();
-
- private:
- friend class ExpressionFieldRange;
- ExpressionCompare(CmpOp cmpOp);
-
- CmpOp cmpOp;
- };
-
-
- class ExpressionCond :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionCond();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionCond();
- };
-
-
- class ExpressionConstant :
- public Expression {
- public:
- // virtuals from Expression
- virtual ~ExpressionConstant();
- virtual intrusive_ptr<Expression> optimize();
- virtual void addDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker,
- const DocumentSource *pSource) const;
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName,
- bool requireExpression) const;
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
-
- static intrusive_ptr<ExpressionConstant> createFromBsonElement(
- BSONElement *pBsonElement);
- static intrusive_ptr<ExpressionConstant> create(
- const intrusive_ptr<const Value> &pValue);
-
- /*
- Get the constant value represented by this Expression.
-
- @returns the value
- */
- intrusive_ptr<const Value> getValue() const;
-
- private:
- ExpressionConstant(BSONElement *pBsonElement);
- ExpressionConstant(const intrusive_ptr<const Value> &pValue);
-
- intrusive_ptr<const Value> pValue;
- };
-
-
- class ExpressionDayOfMonth :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionDayOfMonth();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionDayOfMonth();
- };
-
-
- class ExpressionDayOfWeek :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionDayOfWeek();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionDayOfWeek();
- };
-
-
- class ExpressionDayOfYear :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionDayOfYear();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionDayOfYear();
- };
-
-
- class ExpressionDivide :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionDivide();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionDivide();
- };
-
-
- class ExpressionFieldPath :
- public Expression {
- public:
- // virtuals from Expression
- virtual ~ExpressionFieldPath();
- virtual intrusive_ptr<Expression> optimize();
- virtual void addDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker,
- const DocumentSource *pSource) const;
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName,
- bool requireExpression) const;
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
-
- /*
- Create a field path expression.
-
- Evaluation will extract the value associated with the given field
- path from the source document.
-
- @param fieldPath the field path string, without any leading document
- indicator
- @returns the newly created field path expression
- */
- static intrusive_ptr<ExpressionFieldPath> create(
- const string &fieldPath);
-
- /*
- Return a string representation of the field path.
-
- @param fieldPrefix whether or not to include the document field
- indicator prefix
- @returns the dot-delimited field path
- */
- string getFieldPath(bool fieldPrefix) const;
-
- /*
- Write a string representation of the field path to a stream.
-
- @param the stream to write to
- @param fieldPrefix whether or not to include the document field
- indicator prefix
- */
- void writeFieldPath(ostream &outStream, bool fieldPrefix) const;
-
- private:
- ExpressionFieldPath(const string &fieldPath);
-
- /*
- Internal implementation of evaluate(), used recursively.
-
- The internal implementation doesn't just use a loop because of
- the possibility that we need to skip over an array. If the path
- is "a.b.c", and a is an array, then we fan out from there, and
- traverse "b.c" for each element of a:[...]. This requires that
- a be an array of objects in order to navigate more deeply.
-
- @param index current path field index to extract
- @param pathLength maximum number of fields on field path
- @param pDocument current document traversed to (not the top-level one)
- @returns the field found; could be an array
- */
- intrusive_ptr<const Value> evaluatePath(
- size_t index, const size_t pathLength,
- intrusive_ptr<Document> pDocument) const;
-
- FieldPath fieldPath;
- };
-
-
- class ExpressionFieldRange :
- public Expression {
- public:
- // virtuals from expression
- virtual ~ExpressionFieldRange();
- virtual intrusive_ptr<Expression> optimize();
- virtual void addDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker,
- const DocumentSource *pSource) const;
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName,
- bool requireExpression) const;
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
- virtual void toMatcherBson(BSONObjBuilder *pBuilder) const;
-
- /*
- Create a field range expression.
-
- Field ranges are meant to match up with classic Matcher semantics,
- and therefore are conjunctions. For example, these appear in
- mongo shell predicates in one of these forms:
- { a : C } -> (a == C) // degenerate "point" range
- { a : { $lt : C } } -> (a < C) // open range
- { a : { $gt : C1, $lte : C2 } } -> ((a > C1) && (a <= C2)) // closed
-
- When initially created, a field range only includes one end of
- the range. Additional points may be added via intersect().
-
- Note that NE and CMP are not supported.
-
- @param pFieldPath the field path for extracting the field value
- @param cmpOp the comparison operator
- @param pValue the value to compare against
- @returns the newly created field range expression
- */
- static intrusive_ptr<ExpressionFieldRange> create(
- const intrusive_ptr<ExpressionFieldPath> &pFieldPath,
- CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
-
- /*
- Add an intersecting range.
-
- This can be done any number of times after creation. The
- range is internally optimized for each new addition. If the new
- intersection extends or reduces the values within the range, the
- internal representation is adjusted to reflect that.
-
- Note that NE and CMP are not supported.
-
- @param cmpOp the comparison operator
- @param pValue the value to compare against
- */
- void intersect(CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
-
- private:
- ExpressionFieldRange(const intrusive_ptr<ExpressionFieldPath> &pFieldPath,
- CmpOp cmpOp,
- const intrusive_ptr<const Value> &pValue);
-
- intrusive_ptr<ExpressionFieldPath> pFieldPath;
-
- class Range {
- public:
- Range(CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
- Range(const Range &rRange);
-
- Range *intersect(const Range *pRange) const;
- bool contains(const intrusive_ptr<const Value> &pValue) const;
-
- Range(const intrusive_ptr<const Value> &pBottom, bool bottomOpen,
- const intrusive_ptr<const Value> &pTop, bool topOpen);
-
- bool bottomOpen;
- bool topOpen;
- intrusive_ptr<const Value> pBottom;
- intrusive_ptr<const Value> pTop;
- };
-
- scoped_ptr<Range> pRange;
-
- /*
- Add to a generic Builder.
-
- The methods to append items to an object and an array differ by
- their inclusion of a field name. For more complicated objects,
- it makes sense to abstract that out and use a generic builder that
- always looks the same, and then implement addToBsonObj() and
- addToBsonArray() by using the common method.
- */
- void addToBson(Builder *pBuilder) const;
- };
-
-
- class ExpressionHour :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionHour();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionHour();
- };
-
-
- class ExpressionIfNull :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionIfNull();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionIfNull();
- };
-
-
- class ExpressionIsoDate :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionIsoDate();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionIsoDate();
-
- static const char argYear[];
- static const char argMonth[];
- static const char argDayOfMonth[];
- static const char argHour[];
- static const char argMinute[];
- static const char argSecond[];
-
- static const unsigned flagYear;
- static const unsigned flagMonth;
- static const unsigned flagDayOfMonth;
- static const unsigned flagHour;
- static const unsigned flagMinute;
- static const unsigned flagSecond;
- unsigned flag;
-
- /**
- Get a named long argument out of the given document.
-
- @param pArgs the evaluated document with the named arguments in it
- @param pName the name of the argument
- @param defaultValue the value to return if the argument isn't found
- @returns the value if found, otherwise zero
- @throws uassert for non-whole numbers or non-numbers
- */
- int getIntArg(
- const intrusive_ptr<Document> &pArgs,
- const char *pName, int defaultValue) const;
-
- /**
- Check that the named argument fits in an integer.
-
- @params pName the name of the argument
- @params value the long value of the argument
- @returns the integer value
- @throws uassert if the value is out of range
- */
- int checkIntRange(const char *pName, long long value) const;
- };
-
-
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#pragma once
+
+#include "pch.h"
+
+#include "db/pipeline/field_path.h"
+#include "util/intrusive_counter.h"
+#include "util/iterator.h"
+
+
+namespace mongo {
+
+ class BSONArrayBuilder;
+ class BSONElement;
+ class BSONObjBuilder;
+ class Builder;
+ class DependencyTracker;
+ class Document;
+ class DocumentSource;
+ class ExpressionContext;
+ class Value;
+
+
+ class Expression :
+ public IntrusiveCounterUnsigned {
+ public:
+ virtual ~Expression() {};
+
+ /*
+ Optimize the Expression.
+
+ This provides an opportunity to do constant folding, or to
+ collapse nested operators that have the same precedence, such as
+ $add, $and, or $or.
+
+ The Expression should be replaced with the return value, which may
+ or may not be the same object. In the case of constant folding,
+ a computed expression may be replaced by a constant.
+
+ @returns the optimized Expression
+ */
+ virtual intrusive_ptr<Expression> optimize() = 0;
+
+ /**
+ Add this expression's field dependencies to the dependency tracker.
+
+ Expressions are trees, so this is often recursive.
+
+ @params pTracker the tracker to add the dependencies to
+ */
+ virtual void addDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker,
+ const DocumentSource *pSource) const = 0;
+
+ /*
+ Evaluate the Expression using the given document as input.
+
+ @returns the computed value
+ */
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const = 0;
+
+ /*
+ Add the Expression (and any descendant Expressions) into a BSON
+ object that is under construction.
+
+ Unevaluated Expressions always materialize as objects. Evaluation
+ may produce a scalar or another object, either of which will be
+ substituted inline.
+
+ @param pBuilder the builder to add the expression to
+ @param fieldName the name the object should be given
+ @param requireExpression specify true if the value must appear
+ as an expression; this is used by DocumentSources like
+ $project which distinguish between field inclusion and virtual
+ field specification; See ExpressionConstant.
+ */
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName,
+ bool requireExpression) const = 0;
+
+ /*
+ Add the Expression (and any descendant Expressions) into a BSON
+ array that is under construction.
+
+ Unevaluated Expressions always materialize as objects. Evaluation
+ may produce a scalar or another object, either of which will be
+ substituted inline.
+
+ @param pBuilder the builder to add the expression to
+ */
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const = 0;
+
+ /*
+ Convert the expression into a BSONObj that corresponds to the
+ db.collection.find() predicate language. This is intended for
+ use by DocumentSourceFilter.
+
+ This is more limited than the full expression language supported
+ by all available expressions in a DocumentSource processing
+ pipeline, and will fail with an assertion if an attempt is made
+ to go outside the bounds of the recognized patterns, which don't
+ include full computed expressions. There are other methods available
+ on DocumentSourceFilter which can be used to analyze a filter
+ predicate and break it up into appropriate expressions which can
+ be translated within these constraints. As a result, the default
+ implementation is to fail with an assertion; only a subset of
+ operators will be able to fulfill this request.
+
+ @param pBuilder the builder to add the expression to.
+ */
+ virtual void toMatcherBson(BSONObjBuilder *pBuilder) const;
+
+ /*
+ Utility class for parseObject() below.
+
+ Only one array can be unwound in a processing pipeline. If the
+ UNWIND_OK option is used, unwindOk() will return true, and a field
+ can be declared as unwound using unwind(), after which unwindUsed()
+ will return true. Only specify UNWIND_OK if it is OK to unwind an
+ array in the current context.
+
+ DOCUMENT_OK indicates that it is OK to use a Document in the current
+ context.
+ */
+ class ObjectCtx {
+ public:
+ ObjectCtx(int options);
+ static const int UNWIND_OK = 0x0001;
+ static const int DOCUMENT_OK = 0x0002;
+
+ bool unwindOk() const;
+ bool unwindUsed() const;
+ void unwind(string fieldName);
+
+ bool documentOk() const;
+
+ private:
+ int options;
+ string unwindField;
+ };
+
+ /*
+ Parse a BSONElement Object. The object could represent a functional
+ expression or a Document expression.
+
+ @param pBsonElement the element representing the object
+ @param pCtx a MiniCtx representing the options above
+ @returns the parsed Expression
+ */
+ static intrusive_ptr<Expression> parseObject(
+ BSONElement *pBsonElement, ObjectCtx *pCtx);
+
+ static const char unwindName[];
+
+ /*
+ Parse a BSONElement Object which has already been determined to be
+ functional expression.
+
+ @param pOpName the name of the (prefix) operator
+ @param pBsonElement the BSONElement to parse
+ @returns the parsed Expression
+ */
+ static intrusive_ptr<Expression> parseExpression(
+ const char *pOpName, BSONElement *pBsonElement);
+
+
+ /*
+ Parse a BSONElement which is an operand in an Expression.
+
+ @param pBsonElement the expected operand's BSONElement
+ @returns the parsed operand, as an Expression
+ */
+ static intrusive_ptr<Expression> parseOperand(
+ BSONElement *pBsonElement);
+
+ /*
+ Produce a field path string with the field prefix removed.
+
+ Throws an error if the field prefix is not present.
+
+ @param prefixedField the prefixed field
+ @returns the field path with the prefix removed
+ */
+ static string removeFieldPrefix(const string &prefixedField);
+
+ /*
+ Enumeration of comparison operators. These are shared between a
+ few expression implementations, so they are factored out here.
+
+ Any changes to these values require adjustment of the lookup
+ table in the implementation.
+ */
+ enum CmpOp {
+ EQ = 0, // return true for a == b, false otherwise
+ NE = 1, // return true for a != b, false otherwise
+ GT = 2, // return true for a > b, false otherwise
+ GTE = 3, // return true for a >= b, false otherwise
+ LT = 4, // return true for a < b, false otherwise
+ LTE = 5, // return true for a <= b, false otherwise
+ CMP = 6, // return -1, 0, 1 for a < b, a == b, a > b
+ };
+
+ static int signum(int i);
+
+ protected:
+ typedef vector<intrusive_ptr<Expression> > ExpressionVector;
+
+ };
+
+
+ class ExpressionNary :
+ public Expression {
+ public:
+ // virtuals from Expression
+ virtual intrusive_ptr<Expression> optimize();
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName,
+ bool requireExpression) const;
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
+ virtual void addDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker,
+ const DocumentSource *pSource) const;
+
+ /*
+ Add an operand to the n-ary expression.
+
+ @param pExpression the expression to add
+ */
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ /*
+ Return a factory function that will make Expression nodes of
+ the same type as this. This will be used to create constant
+ expressions for constant folding for optimize(). Only return
+ a factory function if this operator is both associative and
+ commutative. The default implementation returns NULL; optimize()
+ will recognize that and stop.
+
+ Note that ExpressionNary::optimize() promises that if it uses this
+ to fold constants, then if optimize() returns an ExpressionNary,
+ any remaining constant will be the last one in vpOperand. Derived
+ classes may take advantage of this to do further optimizations in
+ their optimize().
+
+ @returns pointer to a factory function or NULL
+ */
+ virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
+
+ /*
+ Get the name of the operator.
+
+ @returns the name of the operator; this string belongs to the class
+ implementation, and should not be deleted
+ and should not
+ */
+ virtual const char *getOpName() const = 0;
+
+ protected:
+ ExpressionNary();
+
+ ExpressionVector vpOperand;
+
+ /*
+ Add the expression to the builder.
+
+ If there is only one operand (a unary operator), then the operand
+ is added directly, without an array. For more than one operand,
+ a named array is created. In both cases, the result is an object.
+
+ @param pBuilder the (blank) builder to add the expression to
+ @param pOpName the name of the operator
+ */
+ virtual void toBson(BSONObjBuilder *pBuilder,
+ const char *pOpName) const;
+
+ /*
+ Checks the current size of vpOperand; if the size equal to or
+ greater than maxArgs, fires a user assertion indicating that this
+ operator cannot have this many arguments.
+
+ The equal is there because this is intended to be used in
+ addOperand() to check for the limit *before* adding the requested
+ argument.
+
+ @param maxArgs the maximum number of arguments the operator accepts
+ */
+ void checkArgLimit(unsigned maxArgs) const;
+
+ /*
+ Checks the current size of vpOperand; if the size is not equal to
+ reqArgs, fires a user assertion indicating that this must have
+ exactly reqArgs arguments.
+
+ This is meant to be used in evaluate(), *before* the evaluation
+ takes place.
+
+ @param reqArgs the number of arguments this operator requires
+ */
+ void checkArgCount(unsigned reqArgs) const;
+ };
+
+
+ class ExpressionAdd :
+ public ExpressionNary {
+ public:
+ // virtuals from Expression
+ virtual ~ExpressionAdd();
+ virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+
+ // virtuals from ExpressionNary
+ virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
+
+ /*
+ Create an expression that finds the sum of n operands.
+
+ @returns addition expression
+ */
+ static intrusive_ptr<ExpressionNary> create();
+
+ protected:
+ // virtuals from ExpressionNary
+ virtual void toBson(BSONObjBuilder *pBuilder,
+ const char *pOpName) const;
+
+ private:
+ ExpressionAdd();
+
+ /*
+ If the operator can be optimized, we save the original here.
+
+ This is necessary because addition must follow its original operand
+ ordering strictly if a string is detected, otherwise string
+ concatenation may appear to have re-ordered the operands.
+ */
+ intrusive_ptr<ExpressionAdd> pAdd;
+ mutable bool useOriginal;
+ };
+
+
+ class ExpressionAnd :
+ public ExpressionNary {
+ public:
+ // virtuals from Expression
+ virtual ~ExpressionAnd();
+ virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void toMatcherBson(BSONObjBuilder *pBuilder) const;
+
+ // virtuals from ExpressionNary
+ virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
+
+ /*
+ Create an expression that finds the conjunction of n operands.
+ The conjunction uses short-circuit logic; the expressions are
+ evaluated in the order they were added to the conjunction, and
+ the evaluation stops and returns false on the first operand that
+ evaluates to false.
+
+ @returns conjunction expression
+ */
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionAnd();
+ };
+
+
+ class ExpressionCoerceToBool :
+ public Expression {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionCoerceToBool();
+ virtual intrusive_ptr<Expression> optimize();
+ virtual void addDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker,
+ const DocumentSource *pSource) const;
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName,
+ bool requireExpression) const;
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
+
+ static intrusive_ptr<ExpressionCoerceToBool> create(
+ const intrusive_ptr<Expression> &pExpression);
+
+ private:
+ ExpressionCoerceToBool(const intrusive_ptr<Expression> &pExpression);
+
+ intrusive_ptr<Expression> pExpression;
+ };
+
+
+ class ExpressionCompare :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionCompare();
+ virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ /*
+ Shorthands for creating various comparisons expressions.
+ Provide for conformance with the uniform function pointer signature
+ required for parsing.
+
+ These create a particular comparision operand, without any
+ operands. Those must be added via ExpressionNary::addOperand().
+ */
+ static intrusive_ptr<ExpressionNary> createCmp();
+ static intrusive_ptr<ExpressionNary> createEq();
+ static intrusive_ptr<ExpressionNary> createNe();
+ static intrusive_ptr<ExpressionNary> createGt();
+ static intrusive_ptr<ExpressionNary> createGte();
+ static intrusive_ptr<ExpressionNary> createLt();
+ static intrusive_ptr<ExpressionNary> createLte();
+
+ private:
+ friend class ExpressionFieldRange;
+ ExpressionCompare(CmpOp cmpOp);
+
+ CmpOp cmpOp;
+ };
+
+
+ class ExpressionCond :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionCond();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionCond();
+ };
+
+
+ class ExpressionConstant :
+ public Expression {
+ public:
+ // virtuals from Expression
+ virtual ~ExpressionConstant();
+ virtual intrusive_ptr<Expression> optimize();
+ virtual void addDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker,
+ const DocumentSource *pSource) const;
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName,
+ bool requireExpression) const;
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
+
+ static intrusive_ptr<ExpressionConstant> createFromBsonElement(
+ BSONElement *pBsonElement);
+ static intrusive_ptr<ExpressionConstant> create(
+ const intrusive_ptr<const Value> &pValue);
+
+ /*
+ Get the constant value represented by this Expression.
+
+ @returns the value
+ */
+ intrusive_ptr<const Value> getValue() const;
+
+ private:
+ ExpressionConstant(BSONElement *pBsonElement);
+ ExpressionConstant(const intrusive_ptr<const Value> &pValue);
+
+ intrusive_ptr<const Value> pValue;
+ };
+
+
+ class ExpressionDayOfMonth :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionDayOfMonth();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionDayOfMonth();
+ };
+
+
+ class ExpressionDayOfWeek :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionDayOfWeek();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionDayOfWeek();
+ };
+
+
+ class ExpressionDayOfYear :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionDayOfYear();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionDayOfYear();
+ };
+
+
+ class ExpressionDivide :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionDivide();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionDivide();
+ };
+
+
+ class ExpressionFieldPath :
+ public Expression {
+ public:
+ // virtuals from Expression
+ virtual ~ExpressionFieldPath();
+ virtual intrusive_ptr<Expression> optimize();
+ virtual void addDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker,
+ const DocumentSource *pSource) const;
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName,
+ bool requireExpression) const;
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
+
+ /*
+ Create a field path expression.
+
+ Evaluation will extract the value associated with the given field
+ path from the source document.
+
+ @param fieldPath the field path string, without any leading document
+ indicator
+ @returns the newly created field path expression
+ */
+ static intrusive_ptr<ExpressionFieldPath> create(
+ const string &fieldPath);
+
+ /*
+ Return a string representation of the field path.
+
+ @param fieldPrefix whether or not to include the document field
+ indicator prefix
+ @returns the dot-delimited field path
+ */
+ string getFieldPath(bool fieldPrefix) const;
+
+ /*
+ Write a string representation of the field path to a stream.
+
+ @param the stream to write to
+ @param fieldPrefix whether or not to include the document field
+ indicator prefix
+ */
+ void writeFieldPath(ostream &outStream, bool fieldPrefix) const;
+
+ private:
+ ExpressionFieldPath(const string &fieldPath);
+
+ /*
+ Internal implementation of evaluate(), used recursively.
+
+ The internal implementation doesn't just use a loop because of
+ the possibility that we need to skip over an array. If the path
+ is "a.b.c", and a is an array, then we fan out from there, and
+ traverse "b.c" for each element of a:[...]. This requires that
+ a be an array of objects in order to navigate more deeply.
+
+ @param index current path field index to extract
+ @param pathLength maximum number of fields on field path
+ @param pDocument current document traversed to (not the top-level one)
+ @returns the field found; could be an array
+ */
+ intrusive_ptr<const Value> evaluatePath(
+ size_t index, const size_t pathLength,
+ intrusive_ptr<Document> pDocument) const;
+
+ FieldPath fieldPath;
+ };
+
+
+ class ExpressionFieldRange :
+ public Expression {
+ public:
+ // virtuals from expression
+ virtual ~ExpressionFieldRange();
+ virtual intrusive_ptr<Expression> optimize();
+ virtual void addDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker,
+ const DocumentSource *pSource) const;
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName,
+ bool requireExpression) const;
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
+ virtual void toMatcherBson(BSONObjBuilder *pBuilder) const;
+
+ /*
+ Create a field range expression.
+
+ Field ranges are meant to match up with classic Matcher semantics,
+ and therefore are conjunctions. For example, these appear in
+ mongo shell predicates in one of these forms:
+ { a : C } -> (a == C) // degenerate "point" range
+ { a : { $lt : C } } -> (a < C) // open range
+ { a : { $gt : C1, $lte : C2 } } -> ((a > C1) && (a <= C2)) // closed
+
+ When initially created, a field range only includes one end of
+ the range. Additional points may be added via intersect().
+
+ Note that NE and CMP are not supported.
+
+ @param pFieldPath the field path for extracting the field value
+ @param cmpOp the comparison operator
+ @param pValue the value to compare against
+ @returns the newly created field range expression
+ */
+ static intrusive_ptr<ExpressionFieldRange> create(
+ const intrusive_ptr<ExpressionFieldPath> &pFieldPath,
+ CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
+
+ /*
+ Add an intersecting range.
+
+ This can be done any number of times after creation. The
+ range is internally optimized for each new addition. If the new
+ intersection extends or reduces the values within the range, the
+ internal representation is adjusted to reflect that.
+
+ Note that NE and CMP are not supported.
+
+ @param cmpOp the comparison operator
+ @param pValue the value to compare against
+ */
+ void intersect(CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
+
+ private:
+ ExpressionFieldRange(const intrusive_ptr<ExpressionFieldPath> &pFieldPath,
+ CmpOp cmpOp,
+ const intrusive_ptr<const Value> &pValue);
+
+ intrusive_ptr<ExpressionFieldPath> pFieldPath;
+
+ class Range {
+ public:
+ Range(CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
+ Range(const Range &rRange);
+
+ Range *intersect(const Range *pRange) const;
+ bool contains(const intrusive_ptr<const Value> &pValue) const;
+
+ Range(const intrusive_ptr<const Value> &pBottom, bool bottomOpen,
+ const intrusive_ptr<const Value> &pTop, bool topOpen);
+
+ bool bottomOpen;
+ bool topOpen;
+ intrusive_ptr<const Value> pBottom;
+ intrusive_ptr<const Value> pTop;
+ };
+
+ scoped_ptr<Range> pRange;
+
+ /*
+ Add to a generic Builder.
+
+ The methods to append items to an object and an array differ by
+ their inclusion of a field name. For more complicated objects,
+ it makes sense to abstract that out and use a generic builder that
+ always looks the same, and then implement addToBsonObj() and
+ addToBsonArray() by using the common method.
+ */
+ void addToBson(Builder *pBuilder) const;
+ };
+
+
+ class ExpressionHour :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionHour();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionHour();
+ };
+
+
+ class ExpressionIfNull :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionIfNull();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionIfNull();
+ };
+
+
+ class ExpressionIsoDate :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionIsoDate();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionIsoDate();
+
+ static const char argYear[];
+ static const char argMonth[];
+ static const char argDayOfMonth[];
+ static const char argHour[];
+ static const char argMinute[];
+ static const char argSecond[];
+
+ static const unsigned flagYear;
+ static const unsigned flagMonth;
+ static const unsigned flagDayOfMonth;
+ static const unsigned flagHour;
+ static const unsigned flagMinute;
+ static const unsigned flagSecond;
+ unsigned flag;
+
+ /**
+ Get a named long argument out of the given document.
+
+ @param pArgs the evaluated document with the named arguments in it
+ @param pName the name of the argument
+ @param defaultValue the value to return if the argument isn't found
+ @returns the value if found, otherwise zero
+ @throws uassert for non-whole numbers or non-numbers
+ */
+ int getIntArg(
+ const intrusive_ptr<Document> &pArgs,
+ const char *pName, int defaultValue) const;
+
+ /**
+ Check that the named argument fits in an integer.
+
+ @params pName the name of the argument
+ @params value the long value of the argument
+ @returns the integer value
+ @throws uassert if the value is out of range
+ */
+ int checkIntRange(const char *pName, long long value) const;
+ };
+
+
class ExpressionLiteral :
public ExpressionNary {
public:
@@ -842,558 +842,558 @@ namespace mongo {
};
- class ExpressionMinute :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionMinute();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionMinute();
- };
-
-
- class ExpressionMod :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionMod();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionMod();
- };
-
-
- class ExpressionMultiply :
- public ExpressionNary {
- public:
- // virtuals from Expression
- virtual ~ExpressionMultiply();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
-
- // virtuals from ExpressionNary
- virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
-
- /*
- Create an expression that finds the product of n operands.
-
- @returns multiplication expression
- */
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionMultiply();
- };
-
-
- class ExpressionMonth :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionMonth();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionMonth();
- };
-
-
- class ExpressionNoOp :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionNoOp();
- virtual intrusive_ptr<Expression> optimize();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionNoOp();
- };
-
-
- class ExpressionNot :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionNot();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionNot();
- };
-
-
- class ExpressionObject :
- public Expression {
- public:
- // virtuals from Expression
- virtual ~ExpressionObject();
- virtual intrusive_ptr<Expression> optimize();
- virtual void addDependencies(
- const intrusive_ptr<DependencyTracker> &pTracker,
- const DocumentSource *pSource) const;
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName,
- bool requireExpression) const;
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
-
- /*
- evaluate(), but return a Document instead of a Value-wrapped
- Document.
-
- @param pDocument the input Document
- @returns the result document
- */
- intrusive_ptr<Document> evaluateDocument(
- const intrusive_ptr<Document> &pDocument) const;
-
- /*
- evaluate(), but add the evaluated fields to a given document
- instead of creating a new one.
-
- @param pResult the Document to add the evaluated expressions to
- @param pDocument the input Document
- @param excludeId for exclusions, exclude the _id, if present
- */
- void addToDocument(const intrusive_ptr<Document> &pResult,
- const intrusive_ptr<Document> &pDocument,
- bool excludeId = false) const;
-
- /*
- Estimate the number of fields that will result from evaluating
- this over pDocument. Does not include _id. This is an estimate
- (really an upper bound) because we can't account for undefined
- fields without actually doing the evaluation. But this is still
- useful as an argument to Document::create(), if you plan to use
- addToDocument().
-
- @param pDocument the input document
- @returns estimated number of fields that will result
- */
- size_t getSizeHint(const intrusive_ptr<Document> &pDocument) const;
-
- /*
- Create an empty expression. Until fields are added, this
- will evaluate to an empty document (object).
- */
- static intrusive_ptr<ExpressionObject> create();
-
- /*
- Add a field to the document expression.
-
- @param fieldPath the path the evaluated expression will have in the
- result Document
- @param pExpression the expression to evaluate obtain this field's
- Value in the result Document
- */
- void addField(const string &fieldPath,
- const intrusive_ptr<Expression> &pExpression);
-
- /*
- Add a field path to the set of those to be included.
-
- Note that including a nested field implies including everything on
- the path leading down to it.
-
- @param fieldPath the name of the field to be included
- */
- void includePath(const string &fieldPath);
-
- /*
- Add a field path to the set of those to be excluded.
-
- Note that excluding a nested field implies including everything on
- the path leading down to it (because you're stating you want to see
- all the other fields that aren't being excluded).
-
- @param fieldName the name of the field to be excluded
- */
- void excludePath(const string &fieldPath);
-
- /**
- Get an iterator that can be used to iterate over all the result
- field names in this ExpressionObject.
-
- @returns the (intrusive_ptr'ed) iterator
- */
- Iterator<string> *getFieldIterator() const;
-
- /*
- Return the expression for a field.
-
- @param fieldName the field name for the expression to return
- @returns the expression used to compute the field, if it is present,
- otherwise NULL.
- */
- intrusive_ptr<Expression> getField(const string &fieldName) const;
-
- /*
- Get a count of the added fields.
-
- @returns how many fields have been added
- */
- size_t getFieldCount() const;
-
- /*
- Get a count of the exclusions.
-
- @returns how many fields have been excluded.
- */
- size_t getExclusionCount() const;
-
- /*
- Specialized BSON conversion that allows for writing out a
- $project specification. This creates a standalone object, which must
- be added to a containing object with a name
-
- @param pBuilder where to write the object to
- @param requireExpression see Expression::addToBsonObj
- */
- void documentToBson(BSONObjBuilder *pBuilder,
- bool requireExpression) const;
-
- /*
- Visitor abstraction used by emitPaths(). Each path is recorded by
- calling path().
- */
- class PathSink {
- public:
- virtual ~PathSink() {};
-
- /**
- Record a path.
-
- @param path the dotted path string
- @param include if true, the path is included; if false, the path
- is excluded
- */
- virtual void path(const string &path, bool include) = 0;
- };
-
- /**
- Emit the field paths that have been included or excluded. "Included"
- includes paths that are referenced in expressions for computed
- fields.
-
- @param pSink where to write the paths to
- @param pvPath pointer to a vector of strings describing the path on
- descent; the top-level call should pass an empty vector
- */
- void emitPaths(PathSink *pPathSink) const;
-
- private:
- ExpressionObject();
-
- void includePath(
- const FieldPath *pPath, size_t pathi, size_t pathn,
- bool excludeLast);
-
- bool excludePaths;
- set<string> path;
-
- /* these two vectors are maintained in parallel */
- vector<string> vFieldName;
- vector<intrusive_ptr<Expression> > vpExpression;
-
-
- /*
- Utility function used by documentToBson(). Emits inclusion
- and exclusion paths by recursively walking down the nested
- ExpressionObject trees these have created.
-
- @param pSink where to write the paths to
- @param pvPath pointer to a vector of strings describing the path on
- descent; the top-level call should pass an empty vector
- */
- void emitPaths(PathSink *pPathSink, vector<string> *pvPath) const;
-
- /*
- Utility object for collecting emitPaths() results in a BSON
- object.
- */
- class BuilderPathSink :
- public PathSink {
- public:
- // virtuals from PathSink
- virtual void path(const string &path, bool include);
-
- /*
- Create a PathSink that writes paths to a BSONObjBuilder,
- to create an object in the form of { path:is_included,...}
-
- This object uses a builder pointer that won't guarantee the
- lifetime of the builder, so make sure it outlasts the use of
- this for an emitPaths() call.
-
- @param pBuilder to the builder to write paths to
- */
- BuilderPathSink(BSONObjBuilder *pBuilder);
-
- private:
- BSONObjBuilder *pBuilder;
- };
-
- /* utility class used by emitPaths() */
- class PathPusher :
- boost::noncopyable {
- public:
- PathPusher(vector<string> *pvPath, const string &s);
- ~PathPusher();
-
- private:
- vector<string> *pvPath;
- };
- };
-
-
- class ExpressionOr :
- public ExpressionNary {
- public:
- // virtuals from Expression
- virtual ~ExpressionOr();
- virtual intrusive_ptr<Expression> optimize();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void toMatcherBson(BSONObjBuilder *pBuilder) const;
-
- // virtuals from ExpressionNary
- virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
-
- /*
- Create an expression that finds the conjunction of n operands.
- The conjunction uses short-circuit logic; the expressions are
- evaluated in the order they were added to the conjunction, and
- the evaluation stops and returns false on the first operand that
- evaluates to false.
-
- @returns conjunction expression
- */
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionOr();
- };
-
-
- class ExpressionSecond :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionSecond();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionSecond();
- };
-
-
- class ExpressionStrcasecmp :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionStrcasecmp();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionStrcasecmp();
- };
-
-
- class ExpressionSubstr :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionSubstr();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionSubstr();
- };
-
-
- class ExpressionSubtract :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionSubtract();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionSubtract();
- };
-
-
- class ExpressionToLower :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionToLower();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionToLower();
- };
-
-
- class ExpressionToUpper :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionToUpper();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionToUpper();
- };
-
-
- class ExpressionWeek :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionWeek();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionWeek();
- };
-
-
- class ExpressionYear :
- public ExpressionNary {
- public:
- // virtuals from ExpressionNary
- virtual ~ExpressionYear();
- virtual intrusive_ptr<const Value> evaluate(
- const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
-
- static intrusive_ptr<ExpressionNary> create();
-
- private:
- ExpressionYear();
- };
-}
-
-
-/* ======================= INLINED IMPLEMENTATIONS ========================== */
-
-namespace mongo {
-
- inline bool Expression::ObjectCtx::unwindOk() const {
- return ((options & UNWIND_OK) != 0);
- }
-
- inline bool Expression::ObjectCtx::unwindUsed() const {
- return (unwindField.size() != 0);
- }
-
- inline int Expression::signum(int i) {
- if (i < 0)
- return -1;
- if (i > 0)
- return 1;
- return 0;
- }
-
- inline intrusive_ptr<const Value> ExpressionConstant::getValue() const {
- return pValue;
- }
-
- inline string ExpressionFieldPath::getFieldPath(bool fieldPrefix) const {
- return fieldPath.getPath(fieldPrefix);
- }
-
- inline void ExpressionFieldPath::writeFieldPath(
- ostream &outStream, bool fieldPrefix) const {
- return fieldPath.writePath(outStream, fieldPrefix);
- }
-
- inline size_t ExpressionObject::getFieldCount() const {
- return vFieldName.size();
- }
-
- inline ExpressionObject::BuilderPathSink::BuilderPathSink(
- BSONObjBuilder *pB):
- pBuilder(pB) {
- }
-
- inline ExpressionObject::PathPusher::PathPusher(
- vector<string> *pTheVPath, const string &s):
- pvPath(pTheVPath) {
- pvPath->push_back(s);
- }
-
- inline ExpressionObject::PathPusher::~PathPusher() {
- pvPath->pop_back();
- }
-
-}
+ class ExpressionMinute :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionMinute();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionMinute();
+ };
+
+
+ class ExpressionMod :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionMod();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionMod();
+ };
+
+
+ class ExpressionMultiply :
+ public ExpressionNary {
+ public:
+ // virtuals from Expression
+ virtual ~ExpressionMultiply();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+
+ // virtuals from ExpressionNary
+ virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
+
+ /*
+ Create an expression that finds the product of n operands.
+
+ @returns multiplication expression
+ */
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionMultiply();
+ };
+
+
+ class ExpressionMonth :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionMonth();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionMonth();
+ };
+
+
+ class ExpressionNoOp :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionNoOp();
+ virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionNoOp();
+ };
+
+
+ class ExpressionNot :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionNot();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionNot();
+ };
+
+
+ class ExpressionObject :
+ public Expression {
+ public:
+ // virtuals from Expression
+ virtual ~ExpressionObject();
+ virtual intrusive_ptr<Expression> optimize();
+ virtual void addDependencies(
+ const intrusive_ptr<DependencyTracker> &pTracker,
+ const DocumentSource *pSource) const;
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName,
+ bool requireExpression) const;
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
+
+ /*
+ evaluate(), but return a Document instead of a Value-wrapped
+ Document.
+
+ @param pDocument the input Document
+ @returns the result document
+ */
+ intrusive_ptr<Document> evaluateDocument(
+ const intrusive_ptr<Document> &pDocument) const;
+
+ /*
+ evaluate(), but add the evaluated fields to a given document
+ instead of creating a new one.
+
+ @param pResult the Document to add the evaluated expressions to
+ @param pDocument the input Document
+ @param excludeId for exclusions, exclude the _id, if present
+ */
+ void addToDocument(const intrusive_ptr<Document> &pResult,
+ const intrusive_ptr<Document> &pDocument,
+ bool excludeId = false) const;
+
+ /*
+ Estimate the number of fields that will result from evaluating
+ this over pDocument. Does not include _id. This is an estimate
+ (really an upper bound) because we can't account for undefined
+ fields without actually doing the evaluation. But this is still
+ useful as an argument to Document::create(), if you plan to use
+ addToDocument().
+
+ @param pDocument the input document
+ @returns estimated number of fields that will result
+ */
+ size_t getSizeHint(const intrusive_ptr<Document> &pDocument) const;
+
+ /*
+ Create an empty expression. Until fields are added, this
+ will evaluate to an empty document (object).
+ */
+ static intrusive_ptr<ExpressionObject> create();
+
+ /*
+ Add a field to the document expression.
+
+ @param fieldPath the path the evaluated expression will have in the
+ result Document
+ @param pExpression the expression to evaluate obtain this field's
+ Value in the result Document
+ */
+ void addField(const string &fieldPath,
+ const intrusive_ptr<Expression> &pExpression);
+
+ /*
+ Add a field path to the set of those to be included.
+
+ Note that including a nested field implies including everything on
+ the path leading down to it.
+
+ @param fieldPath the name of the field to be included
+ */
+ void includePath(const string &fieldPath);
+
+ /*
+ Add a field path to the set of those to be excluded.
+
+ Note that excluding a nested field implies including everything on
+ the path leading down to it (because you're stating you want to see
+ all the other fields that aren't being excluded).
+
+ @param fieldName the name of the field to be excluded
+ */
+ void excludePath(const string &fieldPath);
+
+ /**
+ Get an iterator that can be used to iterate over all the result
+ field names in this ExpressionObject.
+
+ @returns the (intrusive_ptr'ed) iterator
+ */
+ Iterator<string> *getFieldIterator() const;
+
+ /*
+ Return the expression for a field.
+
+ @param fieldName the field name for the expression to return
+ @returns the expression used to compute the field, if it is present,
+ otherwise NULL.
+ */
+ intrusive_ptr<Expression> getField(const string &fieldName) const;
+
+ /*
+ Get a count of the added fields.
+
+ @returns how many fields have been added
+ */
+ size_t getFieldCount() const;
+
+ /*
+ Get a count of the exclusions.
+
+ @returns how many fields have been excluded.
+ */
+ size_t getExclusionCount() const;
+
+ /*
+ Specialized BSON conversion that allows for writing out a
+ $project specification. This creates a standalone object, which must
+ be added to a containing object with a name
+
+ @param pBuilder where to write the object to
+ @param requireExpression see Expression::addToBsonObj
+ */
+ void documentToBson(BSONObjBuilder *pBuilder,
+ bool requireExpression) const;
+
+ /*
+ Visitor abstraction used by emitPaths(). Each path is recorded by
+ calling path().
+ */
+ class PathSink {
+ public:
+ virtual ~PathSink() {};
+
+ /**
+ Record a path.
+
+ @param path the dotted path string
+ @param include if true, the path is included; if false, the path
+ is excluded
+ */
+ virtual void path(const string &path, bool include) = 0;
+ };
+
+ /**
+ Emit the field paths that have been included or excluded. "Included"
+ includes paths that are referenced in expressions for computed
+ fields.
+
+ @param pSink where to write the paths to
+ @param pvPath pointer to a vector of strings describing the path on
+ descent; the top-level call should pass an empty vector
+ */
+ void emitPaths(PathSink *pPathSink) const;
+
+ private:
+ ExpressionObject();
+
+ void includePath(
+ const FieldPath *pPath, size_t pathi, size_t pathn,
+ bool excludeLast);
+
+ bool excludePaths;
+ set<string> path;
+
+ /* these two vectors are maintained in parallel */
+ vector<string> vFieldName;
+ vector<intrusive_ptr<Expression> > vpExpression;
+
+
+ /*
+ Utility function used by documentToBson(). Emits inclusion
+ and exclusion paths by recursively walking down the nested
+ ExpressionObject trees these have created.
+
+ @param pSink where to write the paths to
+ @param pvPath pointer to a vector of strings describing the path on
+ descent; the top-level call should pass an empty vector
+ */
+ void emitPaths(PathSink *pPathSink, vector<string> *pvPath) const;
+
+ /*
+ Utility object for collecting emitPaths() results in a BSON
+ object.
+ */
+ class BuilderPathSink :
+ public PathSink {
+ public:
+ // virtuals from PathSink
+ virtual void path(const string &path, bool include);
+
+ /*
+ Create a PathSink that writes paths to a BSONObjBuilder,
+ to create an object in the form of { path:is_included,...}
+
+ This object uses a builder pointer that won't guarantee the
+ lifetime of the builder, so make sure it outlasts the use of
+ this for an emitPaths() call.
+
+ @param pBuilder to the builder to write paths to
+ */
+ BuilderPathSink(BSONObjBuilder *pBuilder);
+
+ private:
+ BSONObjBuilder *pBuilder;
+ };
+
+ /* utility class used by emitPaths() */
+ class PathPusher :
+ boost::noncopyable {
+ public:
+ PathPusher(vector<string> *pvPath, const string &s);
+ ~PathPusher();
+
+ private:
+ vector<string> *pvPath;
+ };
+ };
+
+
+ class ExpressionOr :
+ public ExpressionNary {
+ public:
+ // virtuals from Expression
+ virtual ~ExpressionOr();
+ virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void toMatcherBson(BSONObjBuilder *pBuilder) const;
+
+ // virtuals from ExpressionNary
+ virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
+
+ /*
+ Create an expression that finds the conjunction of n operands.
+ The conjunction uses short-circuit logic; the expressions are
+ evaluated in the order they were added to the conjunction, and
+ the evaluation stops and returns false on the first operand that
+ evaluates to false.
+
+ @returns conjunction expression
+ */
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionOr();
+ };
+
+
+ class ExpressionSecond :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionSecond();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionSecond();
+ };
+
+
+ class ExpressionStrcasecmp :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionStrcasecmp();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionStrcasecmp();
+ };
+
+
+ class ExpressionSubstr :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionSubstr();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionSubstr();
+ };
+
+
+ class ExpressionSubtract :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionSubtract();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionSubtract();
+ };
+
+
+ class ExpressionToLower :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionToLower();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionToLower();
+ };
+
+
+ class ExpressionToUpper :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionToUpper();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionToUpper();
+ };
+
+
+ class ExpressionWeek :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionWeek();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionWeek();
+ };
+
+
+ class ExpressionYear :
+ public ExpressionNary {
+ public:
+ // virtuals from ExpressionNary
+ virtual ~ExpressionYear();
+ virtual intrusive_ptr<const Value> evaluate(
+ const intrusive_ptr<Document> &pDocument) const;
+ virtual const char *getOpName() const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+
+ static intrusive_ptr<ExpressionNary> create();
+
+ private:
+ ExpressionYear();
+ };
+}
+
+
+/* ======================= INLINED IMPLEMENTATIONS ========================== */
+
+namespace mongo {
+
+ inline bool Expression::ObjectCtx::unwindOk() const {
+ return ((options & UNWIND_OK) != 0);
+ }
+
+ inline bool Expression::ObjectCtx::unwindUsed() const {
+ return (unwindField.size() != 0);
+ }
+
+ inline int Expression::signum(int i) {
+ if (i < 0)
+ return -1;
+ if (i > 0)
+ return 1;
+ return 0;
+ }
+
+ inline intrusive_ptr<const Value> ExpressionConstant::getValue() const {
+ return pValue;
+ }
+
+ inline string ExpressionFieldPath::getFieldPath(bool fieldPrefix) const {
+ return fieldPath.getPath(fieldPrefix);
+ }
+
+ inline void ExpressionFieldPath::writeFieldPath(
+ ostream &outStream, bool fieldPrefix) const {
+ return fieldPath.writePath(outStream, fieldPrefix);
+ }
+
+ inline size_t ExpressionObject::getFieldCount() const {
+ return vFieldName.size();
+ }
+
+ inline ExpressionObject::BuilderPathSink::BuilderPathSink(
+ BSONObjBuilder *pB):
+ pBuilder(pB) {
+ }
+
+ inline ExpressionObject::PathPusher::PathPusher(
+ vector<string> *pTheVPath, const string &s):
+ pvPath(pTheVPath) {
+ pvPath->push_back(s);
+ }
+
+ inline ExpressionObject::PathPusher::~PathPusher() {
+ pvPath->pop_back();
+ }
+
+}
diff --git a/src/mongo/db/pipeline/expression_context.cpp b/src/mongo/db/pipeline/expression_context.cpp
index dcc8680d9ce..c2229486b15 100755
--- a/src/mongo/db/pipeline/expression_context.cpp
+++ b/src/mongo/db/pipeline/expression_context.cpp
@@ -1,48 +1,48 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-
-#include "db/interrupt_status.h"
-#include "db/pipeline/expression_context.h"
-
-namespace mongo {
-
- ExpressionContext::~ExpressionContext() {
- }
-
- inline ExpressionContext::ExpressionContext(InterruptStatus *pS):
- inShard(false),
- inRouter(false),
- intCheckCounter(1),
- pStatus(pS) {
- }
-
- void ExpressionContext::checkForInterrupt() {
- /*
- Only really check periodically; the check gets a mutex, and could
- be expensive, at least in relative terms.
- */
- if ((++intCheckCounter % 128) == 0) {
- pStatus->checkForInterrupt();
- }
- }
-
- ExpressionContext *ExpressionContext::create(InterruptStatus *pStatus) {
- return new ExpressionContext(pStatus);
- }
-
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+
+#include "db/interrupt_status.h"
+#include "db/pipeline/expression_context.h"
+
+namespace mongo {
+
+ ExpressionContext::~ExpressionContext() {
+ }
+
+ inline ExpressionContext::ExpressionContext(InterruptStatus *pS):
+ inShard(false),
+ inRouter(false),
+ intCheckCounter(1),
+ pStatus(pS) {
+ }
+
+ void ExpressionContext::checkForInterrupt() {
+ /*
+ Only really check periodically; the check gets a mutex, and could
+ be expensive, at least in relative terms.
+ */
+ if ((++intCheckCounter % 128) == 0) {
+ pStatus->checkForInterrupt();
+ }
+ }
+
+ ExpressionContext *ExpressionContext::create(InterruptStatus *pStatus) {
+ return new ExpressionContext(pStatus);
+ }
+
+}
diff --git a/src/mongo/db/pipeline/expression_context.h b/src/mongo/db/pipeline/expression_context.h
index 893826af222..054d2038cc0 100755
--- a/src/mongo/db/pipeline/expression_context.h
+++ b/src/mongo/db/pipeline/expression_context.h
@@ -1,78 +1,78 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include "pch.h"
-
-#include "util/intrusive_counter.h"
-
-namespace mongo {
-
- class InterruptStatus;
-
- class ExpressionContext :
- public IntrusiveCounterUnsigned {
- public:
- virtual ~ExpressionContext();
-
- void setInShard(bool b);
- void setInRouter(bool b);
-
- bool getInShard() const;
- bool getInRouter() const;
-
- /**
- Used by a pipeline to check for interrupts so that killOp() works.
-
- @throws if the operation has been interrupted
- */
- void checkForInterrupt();
-
- static ExpressionContext *create(InterruptStatus *pStatus);
-
- private:
- ExpressionContext(InterruptStatus *pStatus);
-
- bool inShard;
- bool inRouter;
- unsigned intCheckCounter; // interrupt check counter
- InterruptStatus *const pStatus;
- };
-}
-
-
-/* ======================= INLINED IMPLEMENTATIONS ========================== */
-
-namespace mongo {
-
- inline void ExpressionContext::setInShard(bool b) {
- inShard = b;
- }
-
- inline void ExpressionContext::setInRouter(bool b) {
- inRouter = b;
- }
-
- inline bool ExpressionContext::getInShard() const {
- return inShard;
- }
-
- inline bool ExpressionContext::getInRouter() const {
- return inRouter;
- }
-
-};
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#pragma once
+
+#include "pch.h"
+
+#include "util/intrusive_counter.h"
+
+namespace mongo {
+
+ class InterruptStatus;
+
+ class ExpressionContext :
+ public IntrusiveCounterUnsigned {
+ public:
+ virtual ~ExpressionContext();
+
+ void setInShard(bool b);
+ void setInRouter(bool b);
+
+ bool getInShard() const;
+ bool getInRouter() const;
+
+ /**
+ Used by a pipeline to check for interrupts so that killOp() works.
+
+ @throws if the operation has been interrupted
+ */
+ void checkForInterrupt();
+
+ static ExpressionContext *create(InterruptStatus *pStatus);
+
+ private:
+ ExpressionContext(InterruptStatus *pStatus);
+
+ bool inShard;
+ bool inRouter;
+ unsigned intCheckCounter; // interrupt check counter
+ InterruptStatus *const pStatus;
+ };
+}
+
+
+/* ======================= INLINED IMPLEMENTATIONS ========================== */
+
+namespace mongo {
+
+ inline void ExpressionContext::setInShard(bool b) {
+ inShard = b;
+ }
+
+ inline void ExpressionContext::setInRouter(bool b) {
+ inRouter = b;
+ }
+
+ inline bool ExpressionContext::getInShard() const {
+ return inShard;
+ }
+
+ inline bool ExpressionContext::getInRouter() const {
+ return inRouter;
+ }
+
+};
diff --git a/src/mongo/db/pipeline/field_path.cpp b/src/mongo/db/pipeline/field_path.cpp
index e79d2b1e940..459c4b69e68 100755
--- a/src/mongo/db/pipeline/field_path.cpp
+++ b/src/mongo/db/pipeline/field_path.cpp
@@ -1,90 +1,90 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "db/pipeline/field_path.h"
-#include "util/mongoutils/str.h"
-
-namespace mongo {
-
- using namespace mongoutils;
-
- const char FieldPath::prefix[] = "$";
-
- FieldPath::~FieldPath() {
- }
-
- FieldPath::FieldPath():
- vFieldName() {
- }
-
- FieldPath::FieldPath(const string &fieldPath):
- vFieldName() {
- /*
- The field path could be using dot notation.
- Break the field path up by peeling off successive pieces.
- */
- size_t startpos = 0;
- while(true) {
- /* find the next dot */
- const size_t dotpos = fieldPath.find('.', startpos);
-
- /* if there are no more dots, use the remainder of the string */
- if (dotpos == fieldPath.npos) {
- vFieldName.push_back(fieldPath.substr(startpos, dotpos));
- break;
- }
-
- /* use the string up to the dot */
- const size_t length = dotpos - startpos;
- uassert(15998, str::stream() <<
- "field names cannot be zero length (in path \"" <<
- fieldPath << "\")",
- length > 0);
-
- vFieldName.push_back(fieldPath.substr(startpos, length));
-
- /* next time, search starting one spot after that */
- startpos = dotpos + 1;
- }
- }
-
- string FieldPath::getPath(bool fieldPrefix) const {
- stringstream ss;
- writePath(ss, fieldPrefix);
- return ss.str();
- }
-
- void FieldPath::writePath(ostream &outStream, bool fieldPrefix) const {
- if (fieldPrefix)
- outStream << prefix;
-
- outStream << vFieldName[0];
-
- const size_t n = vFieldName.size();
- for(size_t i = 1; i < n; ++i)
- outStream << "." << vFieldName[i];
- }
-
- FieldPath &FieldPath::operator=(const FieldPath &rRHS) {
- if (this != &rRHS) {
- vFieldName = rRHS.vFieldName;
- }
-
- return *this;
- }
-
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "db/pipeline/field_path.h"
+#include "util/mongoutils/str.h"
+
+namespace mongo {
+
+ using namespace mongoutils;
+
+ const char FieldPath::prefix[] = "$";
+
+ FieldPath::~FieldPath() {
+ }
+
+ FieldPath::FieldPath():
+ vFieldName() {
+ }
+
+ FieldPath::FieldPath(const string &fieldPath):
+ vFieldName() {
+ /*
+ The field path could be using dot notation.
+ Break the field path up by peeling off successive pieces.
+ */
+ size_t startpos = 0;
+ while(true) {
+ /* find the next dot */
+ const size_t dotpos = fieldPath.find('.', startpos);
+
+ /* if there are no more dots, use the remainder of the string */
+ if (dotpos == fieldPath.npos) {
+ vFieldName.push_back(fieldPath.substr(startpos, dotpos));
+ break;
+ }
+
+ /* use the string up to the dot */
+ const size_t length = dotpos - startpos;
+ uassert(15998, str::stream() <<
+ "field names cannot be zero length (in path \"" <<
+ fieldPath << "\")",
+ length > 0);
+
+ vFieldName.push_back(fieldPath.substr(startpos, length));
+
+ /* next time, search starting one spot after that */
+ startpos = dotpos + 1;
+ }
+ }
+
+ string FieldPath::getPath(bool fieldPrefix) const {
+ stringstream ss;
+ writePath(ss, fieldPrefix);
+ return ss.str();
+ }
+
+ void FieldPath::writePath(ostream &outStream, bool fieldPrefix) const {
+ if (fieldPrefix)
+ outStream << prefix;
+
+ outStream << vFieldName[0];
+
+ const size_t n = vFieldName.size();
+ for(size_t i = 1; i < n; ++i)
+ outStream << "." << vFieldName[i];
+ }
+
+ FieldPath &FieldPath::operator=(const FieldPath &rRHS) {
+ if (this != &rRHS) {
+ vFieldName = rRHS.vFieldName;
+ }
+
+ return *this;
+ }
+
+}
diff --git a/src/mongo/db/pipeline/field_path.h b/src/mongo/db/pipeline/field_path.h
index 618eb283e3e..f6f59ad3295 100755
--- a/src/mongo/db/pipeline/field_path.h
+++ b/src/mongo/db/pipeline/field_path.h
@@ -1,109 +1,109 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include "pch.h"
-
-namespace mongo {
-
- class FieldPath {
- public:
- virtual ~FieldPath();
-
- /**
- Constructor.
-
- @param fieldPath the dotted field path string
- */
- FieldPath(const string &fieldPath);
-
- /**
- Constructor.
- */
- FieldPath();
-
- /**
- Get the number of path elements in the field path.
-
- @returns the number of path elements
- */
- size_t getPathLength() const;
-
- /**
- Get a particular path element from the path.
-
- @param i the index of the path element
- @returns the path element
- */
- string getFieldName(size_t i) const;
-
- /**
- Get the full path.
-
- @param fieldPrefix whether or not to include the field prefix
- @returns the complete field path
- */
- string getPath(bool fieldPrefix) const;
-
- /**
- Write the full path.
-
- @param outStream where to write the path to
- @param fieldPrefix whether or not to include the field prefix
- */
- void writePath(ostream &outStream, bool fieldPrefix) const;
-
- /**
- Assignment operator.
-
- @param rRHS right hand side of the assignment
- */
- FieldPath &operator=(const FieldPath &rRHS);
-
- /**
- Get the prefix string.
-
- @returns the prefix string
- */
- static const char *getPrefix();
-
- static const char prefix[];
-
- private:
- vector<string> vFieldName;
- };
-}
-
-
-/* ======================= INLINED IMPLEMENTATIONS ========================== */
-
-namespace mongo {
-
- inline size_t FieldPath::getPathLength() const {
- return vFieldName.size();
- }
-
- inline string FieldPath::getFieldName(size_t i) const {
- return vFieldName[i];
- }
-
- inline const char *FieldPath::getPrefix() {
- return prefix;
- }
-
-}
-
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#pragma once
+
+#include "pch.h"
+
+namespace mongo {
+
+ class FieldPath {
+ public:
+ virtual ~FieldPath();
+
+ /**
+ Constructor.
+
+ @param fieldPath the dotted field path string
+ */
+ FieldPath(const string &fieldPath);
+
+ /**
+ Constructor.
+ */
+ FieldPath();
+
+ /**
+ Get the number of path elements in the field path.
+
+ @returns the number of path elements
+ */
+ size_t getPathLength() const;
+
+ /**
+ Get a particular path element from the path.
+
+ @param i the index of the path element
+ @returns the path element
+ */
+ string getFieldName(size_t i) const;
+
+ /**
+ Get the full path.
+
+ @param fieldPrefix whether or not to include the field prefix
+ @returns the complete field path
+ */
+ string getPath(bool fieldPrefix) const;
+
+ /**
+ Write the full path.
+
+ @param outStream where to write the path to
+ @param fieldPrefix whether or not to include the field prefix
+ */
+ void writePath(ostream &outStream, bool fieldPrefix) const;
+
+ /**
+ Assignment operator.
+
+ @param rRHS right hand side of the assignment
+ */
+ FieldPath &operator=(const FieldPath &rRHS);
+
+ /**
+ Get the prefix string.
+
+ @returns the prefix string
+ */
+ static const char *getPrefix();
+
+ static const char prefix[];
+
+ private:
+ vector<string> vFieldName;
+ };
+}
+
+
+/* ======================= INLINED IMPLEMENTATIONS ========================== */
+
+namespace mongo {
+
+ inline size_t FieldPath::getPathLength() const {
+ return vFieldName.size();
+ }
+
+ inline string FieldPath::getFieldName(size_t i) const {
+ return vFieldName[i];
+ }
+
+ inline const char *FieldPath::getPrefix() {
+ return prefix;
+ }
+
+}
+
diff --git a/src/mongo/db/pipeline/value.h b/src/mongo/db/pipeline/value.h
index ddfcade02d4..f3eada1cbce 100755
--- a/src/mongo/db/pipeline/value.h
+++ b/src/mongo/db/pipeline/value.h
@@ -1,469 +1,469 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include "pch.h"
-#include "bson/bsontypes.h"
-#include "bson/oid.h"
-#include "util/intrusive_counter.h"
-
-namespace mongo {
- class BSONElement;
- class Builder;
- class Document;
- class Value;
-
- class ValueIterator :
- public IntrusiveCounterUnsigned {
- public:
- virtual ~ValueIterator();
-
- /*
- Ask if there are more fields to return.
-
- @returns true if there are more fields, false otherwise
- */
- virtual bool more() const = 0;
-
- /*
- Move the iterator to point to the next field and return it.
-
- @returns the next field's <name, Value>
- */
- virtual intrusive_ptr<const Value> next() = 0;
- };
-
-
- /*
- Values are immutable, so these are passed around as
- intrusive_ptr<const Value>.
- */
- class Value :
- public IntrusiveCounterUnsigned {
- public:
- ~Value();
-
- /*
- Construct a Value from a BSONElement.
-
- This ignores the name of the element, and only uses the value,
- whatever type it is.
-
- @returns a new Value initialized from the bsonElement
- */
- static intrusive_ptr<const Value> createFromBsonElement(
- BSONElement *pBsonElement);
-
- /*
- Construct an integer-valued Value.
-
- For commonly used values, consider using one of the singleton
- instances defined below.
-
- @param value the value
- @returns a Value with the given value
- */
- static intrusive_ptr<const Value> createInt(int value);
-
- /*
- Construct an long(long)-valued Value.
-
- For commonly used values, consider using one of the singleton
- instances defined below.
-
- @param value the value
- @returns a Value with the given value
- */
- static intrusive_ptr<const Value> createLong(long long value);
-
- /*
- Construct a double-valued Value.
-
- @param value the value
- @returns a Value with the given value
- */
- static intrusive_ptr<const Value> createDouble(double value);
-
- /*
- Construct a string-valued Value.
-
- @param value the value
- @returns a Value with the given value
- */
- static intrusive_ptr<const Value> createString(const string &value);
-
- /*
- Construct a date-valued Value.
-
- @param value the value
- @returns a Value with the given value
- */
- static intrusive_ptr<const Value> createDate(const Date_t &value);
-
- /*
- Construct a document-valued Value.
-
- @param value the value
- @returns a Value with the given value
- */
- static intrusive_ptr<const Value> createDocument(
- const intrusive_ptr<Document> &pDocument);
-
- /*
- Construct an array-valued Value.
-
- @param value the value
- @returns a Value with the given value
- */
- static intrusive_ptr<const Value> createArray(
- const vector<intrusive_ptr<const Value> > &vpValue);
-
- /*
- Get the BSON type of the field.
-
- If the type is jstNULL, no value getter will work.
-
- @return the BSON type of the field.
- */
- BSONType getType() const;
-
- /*
- Getters.
-
- @returns the Value's value; asserts if the requested value type is
- incorrect.
- */
- double getDouble() const;
- string getString() const;
- intrusive_ptr<Document> getDocument() const;
- intrusive_ptr<ValueIterator> getArray() const;
- OID getOid() const;
- bool getBool() const;
- Date_t getDate() const;
- string getRegex() const;
- string getSymbol() const;
- int getInt() const;
- unsigned long long getTimestamp() const;
- long long getLong() const;
-
- /*
- Get the length of an array value.
-
- @returns the length of the array, if this is array-valued; otherwise
- throws an error
- */
- size_t getArrayLength() const;
-
- /*
- Add this value to the BSON object under construction.
- */
- void addToBsonObj(BSONObjBuilder *pBuilder, string fieldName) const;
-
- /*
- Add this field to the BSON array under construction.
-
- As part of an array, the Value's name will be ignored.
- */
- void addToBsonArray(BSONArrayBuilder *pBuilder) const;
-
- /*
- Get references to singleton instances of commonly used field values.
- */
- static intrusive_ptr<const Value> getUndefined();
- static intrusive_ptr<const Value> getNull();
- static intrusive_ptr<const Value> getTrue();
- static intrusive_ptr<const Value> getFalse();
- static intrusive_ptr<const Value> getMinusOne();
- static intrusive_ptr<const Value> getZero();
- static intrusive_ptr<const Value> getOne();
-
- /*
- Coerce (cast) a value to a native bool, using JSON rules.
-
- @returns the bool value
- */
- bool coerceToBool() const;
-
- /*
- Coerce (cast) a value to a Boolean Value, using JSON rules.
-
- @returns the Boolean Value value
- */
- intrusive_ptr<const Value> coerceToBoolean() const;
-
- /*
- Coerce (cast) a value to an int, using JSON rules.
-
- @returns the int value
- */
- int coerceToInt() const;
-
- /*
- Coerce (cast) a value to a long long, using JSON rules.
-
- @returns the long value
- */
- long long coerceToLong() const;
-
- /*
- Coerce (cast) a value to a double, using JSON rules.
-
- @returns the double value
- */
- double coerceToDouble() const;
-
- /*
- Coerce (cast) a value to a date, using JSON rules.
-
- @returns the date value
- */
- Date_t coerceToDate() const;
-
- /*
- Coerce (cast) a value to a string, using JSON rules.
-
- @returns the date value
- */
- string coerceToString() const;
-
- /*
- Compare two Values.
-
- @param rL left value
- @param rR right value
- @returns an integer less than zero, zero, or an integer greater than
- zero, depending on whether rL < rR, rL == rR, or rL > rR
- */
- static int compare(const intrusive_ptr<const Value> &rL,
- const intrusive_ptr<const Value> &rR);
-
-
- /*
- Figure out what the widest of two numeric types is.
-
- Widest can be thought of as "most capable," or "able to hold the
- largest or most precise value." The progression is Int, Long, Double.
-
- @param rL left value
- @param rR right value
- @returns a BSONType of NumberInt, NumberLong, or NumberDouble
- */
- static BSONType getWidestNumeric(BSONType lType, BSONType rType);
-
- /*
- Get the approximate storage size of the value, in bytes.
-
- @returns approximate storage size of the value.
- */
- size_t getApproximateSize() const;
-
- /*
- Calculate a hash value.
-
- Meant to be used to create composite hashes suitable for
- boost classes such as unordered_map<>.
-
- @param seed value to augment with this' hash
- */
- void hash_combine(size_t &seed) const;
-
- /*
- struct Hash is defined to enable the use of Values as
- keys in boost::unordered_map<>.
-
- Values are always referenced as immutables in the form
- intrusive_ptr<const Value>, so these operate on that construction.
- */
- struct Hash :
- unary_function<intrusive_ptr<const Value>, size_t> {
- size_t operator()(const intrusive_ptr<const Value> &rV) const;
- };
-
- protected:
- Value(); // creates null value
- Value(BSONType type); // creates an empty (unitialized value) of type
- // mostly useful for Undefined
- Value(bool boolValue);
- Value(int intValue);
-
- private:
- Value(BSONElement *pBsonElement);
-
- Value(long long longValue);
- Value(double doubleValue);
- Value(const Date_t &dateValue);
- Value(const string &stringValue);
- Value(const intrusive_ptr<Document> &pDocument);
- Value(const vector<intrusive_ptr<const Value> > &vpValue);
-
- void addToBson(Builder *pBuilder) const;
-
- BSONType type;
-
- /* store value in one of these */
- union {
- double doubleValue;
- bool boolValue;
- int intValue;
- unsigned long long timestampValue;
- long long longValue;
-
- } simple; // values that don't need a ctor/dtor
- OID oidValue;
- Date_t dateValue;
- string stringValue; // String, Regex, Symbol
- intrusive_ptr<Document> pDocumentValue;
- vector<intrusive_ptr<const Value> > vpValue; // for arrays
-
-
- /*
- These are often used as the result of boolean or comparison
- expressions.
-
- These are obtained via public static getters defined above.
- */
- static const intrusive_ptr<const Value> pFieldUndefined;
- static const intrusive_ptr<const Value> pFieldNull;
- static const intrusive_ptr<const Value> pFieldTrue;
- static const intrusive_ptr<const Value> pFieldFalse;
- static const intrusive_ptr<const Value> pFieldMinusOne;
- static const intrusive_ptr<const Value> pFieldZero;
- static const intrusive_ptr<const Value> pFieldOne;
-
- /* this implementation is used for getArray() */
- class vi :
- public ValueIterator {
- public:
- // virtuals from ValueIterator
- virtual ~vi();
- virtual bool more() const;
- virtual intrusive_ptr<const Value> next();
-
- private:
- friend class Value;
- vi(const intrusive_ptr<const Value> &pSource,
- const vector<intrusive_ptr<const Value> > *pvpValue);
-
- size_t size;
- size_t nextIndex;
- const vector<intrusive_ptr<const Value> > *pvpValue;
- }; /* class vi */
-
- };
-
- /*
- Equality operator for values.
-
- Useful for unordered_map<>, etc.
- */
- inline bool operator==(const intrusive_ptr<const Value> &v1,
- const intrusive_ptr<const Value> &v2) {
- return (Value::compare(v1, v2) == 0);
- }
-
- /*
- For performance reasons, there are various sharable static values
- defined in class Value, obtainable by methods such as getUndefined(),
- getTrue(), getOne(), etc. We don't want these to go away as they are
- used by a multitude of threads evaluating pipelines. In order to avoid
- having to use atomic integers in the intrusive reference counter, this
- class overrides the reference counting methods to do nothing, making it
- safe to use for static Values.
-
- At this point, only the constructors necessary for the static Values in
- common use have been defined. The remainder can be defined if necessary.
- */
- class ValueStatic :
- public Value {
- public:
- // virtuals from IntrusiveCounterUnsigned
- virtual void addRef() const;
- virtual void release() const;
-
- // constructors
- ValueStatic();
- ValueStatic(BSONType type);
- ValueStatic(bool boolValue);
- ValueStatic(int intValue);
- };
-}
-
-/* ======================= INLINED IMPLEMENTATIONS ========================== */
-
-namespace mongo {
-
- inline BSONType Value::getType() const {
- return type;
- }
-
- inline size_t Value::getArrayLength() const {
- verify(getType() == Array);
- return vpValue.size();
- }
-
- inline intrusive_ptr<const Value> Value::getUndefined() {
- return pFieldUndefined;
- }
-
- inline intrusive_ptr<const Value> Value::getNull() {
- return pFieldNull;
- }
-
- inline intrusive_ptr<const Value> Value::getTrue() {
- return pFieldTrue;
- }
-
- inline intrusive_ptr<const Value> Value::getFalse() {
- return pFieldFalse;
- }
-
- inline intrusive_ptr<const Value> Value::getMinusOne() {
- return pFieldMinusOne;
- }
-
- inline intrusive_ptr<const Value> Value::getZero() {
- return pFieldZero;
- }
-
- inline intrusive_ptr<const Value> Value::getOne() {
- return pFieldOne;
- }
-
- inline size_t Value::Hash::operator()(
- const intrusive_ptr<const Value> &rV) const {
- size_t seed = 0xf0afbeef;
- rV->hash_combine(seed);
- return seed;
- }
-
- inline ValueStatic::ValueStatic():
- Value() {
- }
-
- inline ValueStatic::ValueStatic(BSONType type):
- Value(type) {
- }
-
- inline ValueStatic::ValueStatic(bool boolValue):
- Value(boolValue) {
- }
-
- inline ValueStatic::ValueStatic(int intValue):
- Value(intValue) {
- }
-
-};
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#pragma once
+
+#include "pch.h"
+#include "bson/bsontypes.h"
+#include "bson/oid.h"
+#include "util/intrusive_counter.h"
+
+namespace mongo {
+ class BSONElement;
+ class Builder;
+ class Document;
+ class Value;
+
+ class ValueIterator :
+ public IntrusiveCounterUnsigned {
+ public:
+ virtual ~ValueIterator();
+
+ /*
+ Ask if there are more fields to return.
+
+ @returns true if there are more fields, false otherwise
+ */
+ virtual bool more() const = 0;
+
+ /*
+ Move the iterator to point to the next field and return it.
+
+ @returns the next field's <name, Value>
+ */
+ virtual intrusive_ptr<const Value> next() = 0;
+ };
+
+
+ /*
+ Values are immutable, so these are passed around as
+ intrusive_ptr<const Value>.
+ */
+ class Value :
+ public IntrusiveCounterUnsigned {
+ public:
+ ~Value();
+
+ /*
+ Construct a Value from a BSONElement.
+
+ This ignores the name of the element, and only uses the value,
+ whatever type it is.
+
+ @returns a new Value initialized from the bsonElement
+ */
+ static intrusive_ptr<const Value> createFromBsonElement(
+ BSONElement *pBsonElement);
+
+ /*
+ Construct an integer-valued Value.
+
+ For commonly used values, consider using one of the singleton
+ instances defined below.
+
+ @param value the value
+ @returns a Value with the given value
+ */
+ static intrusive_ptr<const Value> createInt(int value);
+
+ /*
+ Construct an long(long)-valued Value.
+
+ For commonly used values, consider using one of the singleton
+ instances defined below.
+
+ @param value the value
+ @returns a Value with the given value
+ */
+ static intrusive_ptr<const Value> createLong(long long value);
+
+ /*
+ Construct a double-valued Value.
+
+ @param value the value
+ @returns a Value with the given value
+ */
+ static intrusive_ptr<const Value> createDouble(double value);
+
+ /*
+ Construct a string-valued Value.
+
+ @param value the value
+ @returns a Value with the given value
+ */
+ static intrusive_ptr<const Value> createString(const string &value);
+
+ /*
+ Construct a date-valued Value.
+
+ @param value the value
+ @returns a Value with the given value
+ */
+ static intrusive_ptr<const Value> createDate(const Date_t &value);
+
+ /*
+ Construct a document-valued Value.
+
+ @param value the value
+ @returns a Value with the given value
+ */
+ static intrusive_ptr<const Value> createDocument(
+ const intrusive_ptr<Document> &pDocument);
+
+ /*
+ Construct an array-valued Value.
+
+ @param value the value
+ @returns a Value with the given value
+ */
+ static intrusive_ptr<const Value> createArray(
+ const vector<intrusive_ptr<const Value> > &vpValue);
+
+ /*
+ Get the BSON type of the field.
+
+ If the type is jstNULL, no value getter will work.
+
+ @return the BSON type of the field.
+ */
+ BSONType getType() const;
+
+ /*
+ Getters.
+
+ @returns the Value's value; asserts if the requested value type is
+ incorrect.
+ */
+ double getDouble() const;
+ string getString() const;
+ intrusive_ptr<Document> getDocument() const;
+ intrusive_ptr<ValueIterator> getArray() const;
+ OID getOid() const;
+ bool getBool() const;
+ Date_t getDate() const;
+ string getRegex() const;
+ string getSymbol() const;
+ int getInt() const;
+ unsigned long long getTimestamp() const;
+ long long getLong() const;
+
+ /*
+ Get the length of an array value.
+
+ @returns the length of the array, if this is array-valued; otherwise
+ throws an error
+ */
+ size_t getArrayLength() const;
+
+ /*
+ Add this value to the BSON object under construction.
+ */
+ void addToBsonObj(BSONObjBuilder *pBuilder, string fieldName) const;
+
+ /*
+ Add this field to the BSON array under construction.
+
+ As part of an array, the Value's name will be ignored.
+ */
+ void addToBsonArray(BSONArrayBuilder *pBuilder) const;
+
+ /*
+ Get references to singleton instances of commonly used field values.
+ */
+ static intrusive_ptr<const Value> getUndefined();
+ static intrusive_ptr<const Value> getNull();
+ static intrusive_ptr<const Value> getTrue();
+ static intrusive_ptr<const Value> getFalse();
+ static intrusive_ptr<const Value> getMinusOne();
+ static intrusive_ptr<const Value> getZero();
+ static intrusive_ptr<const Value> getOne();
+
+ /*
+ Coerce (cast) a value to a native bool, using JSON rules.
+
+ @returns the bool value
+ */
+ bool coerceToBool() const;
+
+ /*
+ Coerce (cast) a value to a Boolean Value, using JSON rules.
+
+ @returns the Boolean Value value
+ */
+ intrusive_ptr<const Value> coerceToBoolean() const;
+
+ /*
+ Coerce (cast) a value to an int, using JSON rules.
+
+ @returns the int value
+ */
+ int coerceToInt() const;
+
+ /*
+ Coerce (cast) a value to a long long, using JSON rules.
+
+ @returns the long value
+ */
+ long long coerceToLong() const;
+
+ /*
+ Coerce (cast) a value to a double, using JSON rules.
+
+ @returns the double value
+ */
+ double coerceToDouble() const;
+
+ /*
+ Coerce (cast) a value to a date, using JSON rules.
+
+ @returns the date value
+ */
+ Date_t coerceToDate() const;
+
+ /*
+ Coerce (cast) a value to a string, using JSON rules.
+
+ @returns the date value
+ */
+ string coerceToString() const;
+
+ /*
+ Compare two Values.
+
+ @param rL left value
+ @param rR right value
+ @returns an integer less than zero, zero, or an integer greater than
+ zero, depending on whether rL < rR, rL == rR, or rL > rR
+ */
+ static int compare(const intrusive_ptr<const Value> &rL,
+ const intrusive_ptr<const Value> &rR);
+
+
+ /*
+ Figure out what the widest of two numeric types is.
+
+ Widest can be thought of as "most capable," or "able to hold the
+ largest or most precise value." The progression is Int, Long, Double.
+
+ @param rL left value
+ @param rR right value
+ @returns a BSONType of NumberInt, NumberLong, or NumberDouble
+ */
+ static BSONType getWidestNumeric(BSONType lType, BSONType rType);
+
+ /*
+ Get the approximate storage size of the value, in bytes.
+
+ @returns approximate storage size of the value.
+ */
+ size_t getApproximateSize() const;
+
+ /*
+ Calculate a hash value.
+
+ Meant to be used to create composite hashes suitable for
+ boost classes such as unordered_map<>.
+
+ @param seed value to augment with this' hash
+ */
+ void hash_combine(size_t &seed) const;
+
+ /*
+ struct Hash is defined to enable the use of Values as
+ keys in boost::unordered_map<>.
+
+ Values are always referenced as immutables in the form
+ intrusive_ptr<const Value>, so these operate on that construction.
+ */
+ struct Hash :
+ unary_function<intrusive_ptr<const Value>, size_t> {
+ size_t operator()(const intrusive_ptr<const Value> &rV) const;
+ };
+
+ protected:
+ Value(); // creates null value
+ Value(BSONType type); // creates an empty (unitialized value) of type
+ // mostly useful for Undefined
+ Value(bool boolValue);
+ Value(int intValue);
+
+ private:
+ Value(BSONElement *pBsonElement);
+
+ Value(long long longValue);
+ Value(double doubleValue);
+ Value(const Date_t &dateValue);
+ Value(const string &stringValue);
+ Value(const intrusive_ptr<Document> &pDocument);
+ Value(const vector<intrusive_ptr<const Value> > &vpValue);
+
+ void addToBson(Builder *pBuilder) const;
+
+ BSONType type;
+
+ /* store value in one of these */
+ union {
+ double doubleValue;
+ bool boolValue;
+ int intValue;
+ unsigned long long timestampValue;
+ long long longValue;
+
+ } simple; // values that don't need a ctor/dtor
+ OID oidValue;
+ Date_t dateValue;
+ string stringValue; // String, Regex, Symbol
+ intrusive_ptr<Document> pDocumentValue;
+ vector<intrusive_ptr<const Value> > vpValue; // for arrays
+
+
+ /*
+ These are often used as the result of boolean or comparison
+ expressions.
+
+ These are obtained via public static getters defined above.
+ */
+ static const intrusive_ptr<const Value> pFieldUndefined;
+ static const intrusive_ptr<const Value> pFieldNull;
+ static const intrusive_ptr<const Value> pFieldTrue;
+ static const intrusive_ptr<const Value> pFieldFalse;
+ static const intrusive_ptr<const Value> pFieldMinusOne;
+ static const intrusive_ptr<const Value> pFieldZero;
+ static const intrusive_ptr<const Value> pFieldOne;
+
+ /* this implementation is used for getArray() */
+ class vi :
+ public ValueIterator {
+ public:
+ // virtuals from ValueIterator
+ virtual ~vi();
+ virtual bool more() const;
+ virtual intrusive_ptr<const Value> next();
+
+ private:
+ friend class Value;
+ vi(const intrusive_ptr<const Value> &pSource,
+ const vector<intrusive_ptr<const Value> > *pvpValue);
+
+ size_t size;
+ size_t nextIndex;
+ const vector<intrusive_ptr<const Value> > *pvpValue;
+ }; /* class vi */
+
+ };
+
+ /*
+ Equality operator for values.
+
+ Useful for unordered_map<>, etc.
+ */
+ inline bool operator==(const intrusive_ptr<const Value> &v1,
+ const intrusive_ptr<const Value> &v2) {
+ return (Value::compare(v1, v2) == 0);
+ }
+
+ /*
+ For performance reasons, there are various sharable static values
+ defined in class Value, obtainable by methods such as getUndefined(),
+ getTrue(), getOne(), etc. We don't want these to go away as they are
+ used by a multitude of threads evaluating pipelines. In order to avoid
+ having to use atomic integers in the intrusive reference counter, this
+ class overrides the reference counting methods to do nothing, making it
+ safe to use for static Values.
+
+ At this point, only the constructors necessary for the static Values in
+ common use have been defined. The remainder can be defined if necessary.
+ */
+ class ValueStatic :
+ public Value {
+ public:
+ // virtuals from IntrusiveCounterUnsigned
+ virtual void addRef() const;
+ virtual void release() const;
+
+ // constructors
+ ValueStatic();
+ ValueStatic(BSONType type);
+ ValueStatic(bool boolValue);
+ ValueStatic(int intValue);
+ };
+}
+
+/* ======================= INLINED IMPLEMENTATIONS ========================== */
+
+namespace mongo {
+
+ inline BSONType Value::getType() const {
+ return type;
+ }
+
+ inline size_t Value::getArrayLength() const {
+ verify(getType() == Array);
+ return vpValue.size();
+ }
+
+ inline intrusive_ptr<const Value> Value::getUndefined() {
+ return pFieldUndefined;
+ }
+
+ inline intrusive_ptr<const Value> Value::getNull() {
+ return pFieldNull;
+ }
+
+ inline intrusive_ptr<const Value> Value::getTrue() {
+ return pFieldTrue;
+ }
+
+ inline intrusive_ptr<const Value> Value::getFalse() {
+ return pFieldFalse;
+ }
+
+ inline intrusive_ptr<const Value> Value::getMinusOne() {
+ return pFieldMinusOne;
+ }
+
+ inline intrusive_ptr<const Value> Value::getZero() {
+ return pFieldZero;
+ }
+
+ inline intrusive_ptr<const Value> Value::getOne() {
+ return pFieldOne;
+ }
+
+ inline size_t Value::Hash::operator()(
+ const intrusive_ptr<const Value> &rV) const {
+ size_t seed = 0xf0afbeef;
+ rV->hash_combine(seed);
+ return seed;
+ }
+
+ inline ValueStatic::ValueStatic():
+ Value() {
+ }
+
+ inline ValueStatic::ValueStatic(BSONType type):
+ Value(type) {
+ }
+
+ inline ValueStatic::ValueStatic(bool boolValue):
+ Value(boolValue) {
+ }
+
+ inline ValueStatic::ValueStatic(int intValue):
+ Value(intValue) {
+ }
+
+};
diff --git a/src/mongo/util/intrusive_counter.cpp b/src/mongo/util/intrusive_counter.cpp
index fc01f40b41a..edce3fb39a7 100755
--- a/src/mongo/util/intrusive_counter.cpp
+++ b/src/mongo/util/intrusive_counter.cpp
@@ -1,30 +1,30 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "util/intrusive_counter.h"
-
-namespace mongo {
-
- void IntrusiveCounterUnsigned::addRef() const {
- ++counter;
- }
-
- void IntrusiveCounterUnsigned::release() const {
- if (!--counter)
- delete this;
- }
-
-}
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "util/intrusive_counter.h"
+
+namespace mongo {
+
+ void IntrusiveCounterUnsigned::addRef() const {
+ ++counter;
+ }
+
+ void IntrusiveCounterUnsigned::release() const {
+ if (!--counter)
+ delete this;
+ }
+
+}
diff --git a/src/mongo/util/intrusive_counter.h b/src/mongo/util/intrusive_counter.h
index bcebb6288cf..547f5bfd969 100755
--- a/src/mongo/util/intrusive_counter.h
+++ b/src/mongo/util/intrusive_counter.h
@@ -1,79 +1,79 @@
-/**
- * Copyright (c) 2011 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include <boost/intrusive_ptr.hpp>
-#include <boost/noncopyable.hpp>
-
-namespace mongo {
-
-/*
- IntrusiveCounter is a sharable implementation of a reference counter that
- objects can use to be compatible with boost::intrusive_ptr<>.
-
- Some objects that use IntrusiveCounter are immutable, and only have
- const methods. This may require their pointers to be declared as
- intrusive_ptr<const ClassName> . In order to be able to share pointers to
- these immutables, the methods associated with IntrusiveCounter are declared
- as const, and the counter itself is marked as mutable.
-
- IntrusiveCounter itself is abstract, allowing for multiple implementations.
- For example, IntrusiveCounterUnsigned uses ordinary unsigned integers for
- the reference count, and is good for situations where thread safety is not
- required. For others, other implementations using atomic integers should
- be used. For static objects, the implementations of addRef() and release()
- can be overridden to do nothing.
- */
- class IntrusiveCounter :
- boost::noncopyable {
- public:
- virtual ~IntrusiveCounter() {};
-
- // these are here for the boost intrusive_ptr<> class
- friend inline void intrusive_ptr_add_ref(const IntrusiveCounter *pIC) {
- pIC->addRef(); };
- friend inline void intrusive_ptr_release(const IntrusiveCounter *pIC) {
- pIC->release(); };
-
- virtual void addRef() const = 0;
- virtual void release() const = 0;
- };
-
- class IntrusiveCounterUnsigned :
- public IntrusiveCounter {
- public:
- // virtuals from IntrusiveCounter
- virtual void addRef() const;
- virtual void release() const;
-
- IntrusiveCounterUnsigned();
-
- private:
- mutable unsigned counter;
- };
-
-};
-
-/* ======================= INLINED IMPLEMENTATIONS ========================== */
-
-namespace mongo {
-
- inline IntrusiveCounterUnsigned::IntrusiveCounterUnsigned():
- counter(0) {
- }
-
-};
+/**
+ * Copyright (c) 2011 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#pragma once
+
+#include <boost/intrusive_ptr.hpp>
+#include <boost/noncopyable.hpp>
+
+namespace mongo {
+
+/*
+ IntrusiveCounter is a sharable implementation of a reference counter that
+ objects can use to be compatible with boost::intrusive_ptr<>.
+
+ Some objects that use IntrusiveCounter are immutable, and only have
+ const methods. This may require their pointers to be declared as
+ intrusive_ptr<const ClassName> . In order to be able to share pointers to
+ these immutables, the methods associated with IntrusiveCounter are declared
+ as const, and the counter itself is marked as mutable.
+
+ IntrusiveCounter itself is abstract, allowing for multiple implementations.
+ For example, IntrusiveCounterUnsigned uses ordinary unsigned integers for
+ the reference count, and is good for situations where thread safety is not
+ required. For others, other implementations using atomic integers should
+ be used. For static objects, the implementations of addRef() and release()
+ can be overridden to do nothing.
+ */
+ class IntrusiveCounter :
+ boost::noncopyable {
+ public:
+ virtual ~IntrusiveCounter() {};
+
+ // these are here for the boost intrusive_ptr<> class
+ friend inline void intrusive_ptr_add_ref(const IntrusiveCounter *pIC) {
+ pIC->addRef(); };
+ friend inline void intrusive_ptr_release(const IntrusiveCounter *pIC) {
+ pIC->release(); };
+
+ virtual void addRef() const = 0;
+ virtual void release() const = 0;
+ };
+
+ class IntrusiveCounterUnsigned :
+ public IntrusiveCounter {
+ public:
+ // virtuals from IntrusiveCounter
+ virtual void addRef() const;
+ virtual void release() const;
+
+ IntrusiveCounterUnsigned();
+
+ private:
+ mutable unsigned counter;
+ };
+
+};
+
+/* ======================= INLINED IMPLEMENTATIONS ========================== */
+
+namespace mongo {
+
+ inline IntrusiveCounterUnsigned::IntrusiveCounterUnsigned():
+ counter(0) {
+ }
+
+};
diff --git a/src/mongo/util/string_writer.h b/src/mongo/util/string_writer.h
index e83881bf6f6..ee1b61c95f4 100755
--- a/src/mongo/util/string_writer.h
+++ b/src/mongo/util/string_writer.h
@@ -1,28 +1,28 @@
-/**
- * Copyright 2011 (c) 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include "pch.h"
-
-namespace mongo {
-
- class StringWriter {
- public:
- virtual ~StringWriter() {};
- virtual void writeString(stringstream &ss) const = 0;
- };
-}
+/**
+ * Copyright 2011 (c) 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#pragma once
+
+#include "pch.h"
+
+namespace mongo {
+
+ class StringWriter {
+ public:
+ virtual ~StringWriter() {};
+ virtual void writeString(stringstream &ss) const = 0;
+ };
+}