summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xsrc/mongo/db/commands/aggregate.js184
-rwxr-xr-xsrc/mongo/db/commands/document_source_cursor.cpp90
-rwxr-xr-xsrc/mongo/db/commands/pipeline.cpp414
-rwxr-xr-xsrc/mongo/db/commands/pipeline.h206
-rwxr-xr-xsrc/mongo/db/commands/pipeline_command.cpp178
-rwxr-xr-xsrc/mongo/db/commands/pipeline_d.cpp234
-rwxr-xr-xsrc/mongo/db/commands/pipeline_d.h40
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator.cpp86
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator.h100
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_add_to_set.cpp50
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_avg.cpp150
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_first.cpp16
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_last.cpp14
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_min_max.cpp22
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_push.cpp50
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_single_value.cpp4
-rwxr-xr-xsrc/mongo/db/pipeline/accumulator_sum.cpp10
-rwxr-xr-xsrc/mongo/db/pipeline/builder.cpp44
-rwxr-xr-xsrc/mongo/db/pipeline/builder.h74
-rwxr-xr-xsrc/mongo/db/pipeline/doc_mem_monitor.cpp60
-rwxr-xr-xsrc/mongo/db/pipeline/doc_mem_monitor.h78
-rwxr-xr-xsrc/mongo/db/pipeline/document.cpp90
-rwxr-xr-xsrc/mongo/db/pipeline/document.h112
-rwxr-xr-xsrc/mongo/db/pipeline/document_source.cpp24
-rwxr-xr-xsrc/mongo/db/pipeline/document_source.h912
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_bson_array.cpp48
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_command_futures.cpp144
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_filter.cpp62
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_group.cpp276
-rw-r--r--src/mongo/db/pipeline/document_source_limit.cpp18
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_match.cpp56
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_out.cpp18
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_project.cpp122
-rw-r--r--src/mongo/db/pipeline/document_source_skip.cpp14
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_sort.cpp202
-rwxr-xr-xsrc/mongo/db/pipeline/document_source_unwind.cpp256
-rwxr-xr-xsrc/mongo/db/pipeline/expression.cpp2494
-rwxr-xr-xsrc/mongo/db/pipeline/expression.h992
-rwxr-xr-xsrc/mongo/db/pipeline/expression_context.cpp6
-rwxr-xr-xsrc/mongo/db/pipeline/expression_context.h28
-rwxr-xr-xsrc/mongo/db/pipeline/field_path.cpp38
-rwxr-xr-xsrc/mongo/db/pipeline/field_path.h60
-rwxr-xr-xsrc/mongo/db/pipeline/value.cpp462
-rwxr-xr-xsrc/mongo/db/pipeline/value.h114
-rw-r--r--src/mongo/s/commands_public.cpp234
45 files changed, 4351 insertions, 4535 deletions
diff --git a/src/mongo/db/commands/aggregate.js b/src/mongo/db/commands/aggregate.js
deleted file mode 100755
index 7741e3121ff..00000000000
--- a/src/mongo/db/commands/aggregate.js
+++ /dev/null
@@ -1,184 +0,0 @@
-/* sample aggregate command queries */
-
-// make sure we're using the right db; this is the same as "use mydb;" in shell
-db = db.getSisterDB("mydb");
-
-// just passing through fields
-var p1 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- tags : 1,
- pageViews : 1
- }}
-]});
-
-// unwinding an array
-var p2 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tag : { $unwind : "tags" },
- pageViews : 1
- }}
-]});
-
-// pulling values out of subdocuments
-var p3 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- otherfoo : "other.foo",
- otherbar : "other.bar"
- }}
-]});
-
-// projection includes a computed value
-var p4 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- daveWroteIt : { $eq:["$author", "dave"] }
- }}
-]});
-
-// projection includes a virtual (fabricated) document
-var p5 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- pageViews : 1,
- tag : { $unwind : "tags" }
- }},
- { $project : {
- author : 1,
- subDocument : { foo : "pageViews", bar : "tag" }
- }}
-]});
-
-// multi-step aggregate
-// nested expressions in computed fields
-var p6 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tag : { $unwind : "tags" },
- pageViews : 1
- }},
- { $project : {
- author : 1,
- tag : 1,
- pageViews : 1,
- daveWroteIt : { $eq:["$author", "dave"] },
- weLikeIt : { $or:[ { $eq:["$author", "dave"] },
- { $eq:["$tag", "good"] } ] }
- }}
-]});
-
-// slightly more complex computed expression; $ifnull
-var p7 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- theSum : { $add:["$pageViews",
- { $ifnull:["$other.foo",
- "$other.bar"] } ] }
- }}
-]});
-
-// dotted path inclusion; _id exclusion
-var p8 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- _id : 0,
- author : 1,
- tag : { $unwind : "tags" },
- "comments.author" : 1
- }}
-]});
-
-
-// simple matching
-var m1 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $match : { author : "dave" } }
-]});
-
-// combining matching with a projection
-var m2 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- title : 1,
- author : 1,
- pageViews : 1,
- tag : { $unwind : "tags" },
- comments : 1
- }},
- { $match : { tag : "nasty" } }
-]});
-
-
-// group by tag
-var g1 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tag : { $unwind : "tags" },
- pageViews : 1
- }},
- { $group : {
- _id: { tag : 1 },
- docsByTag : { $sum : 1 },
- viewsByTag : { $sum : "$pageViews" }
- }}
-]});
-
-// $max, and averaging in a final projection
-var g2 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tag : { $unwind : "tags" },
- pageViews : 1
- }},
- { $group : {
- _id: { tag : 1 },
- docsByTag : { $sum : 1 },
- viewsByTag : { $sum : "$pageViews" },
- mostViewsByTag : { $max : "$pageViews" },
- }},
- { $project : {
- _id: false,
- tag : "_id.tag",
- mostViewsByTag : 1,
- docsByTag : 1,
- viewsByTag : 1,
- avgByTag : { $divide:["$viewsByTag", "$docsByTag"] }
- }}
-]});
-
-// $push as an accumulator; can pivot data
-var g3 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tag : { $unwind : "tags" }
- }},
- { $group : {
- _id : { tag : 1 },
- authors : { $push : "$author" }
- }}
-]});
-
-// $avg, and averaging in a final projection
-var g4 = db.runCommand(
-{ aggregate : "article", pipeline : [
- { $project : {
- author : 1,
- tag : { $unwind : "tags" },
- pageViews : 1
- }},
- { $group : {
- _id: { tag : 1 },
- docsByTag : { $sum : 1 },
- viewsByTag : { $sum : "$pageViews" },
- avgByTag : { $avg : "$pageViews" },
- }}
-]});
diff --git a/src/mongo/db/commands/document_source_cursor.cpp b/src/mongo/db/commands/document_source_cursor.cpp
index a295819853a..e63bca57efc 100755
--- a/src/mongo/db/commands/document_source_cursor.cpp
+++ b/src/mongo/db/commands/document_source_cursor.cpp
@@ -27,80 +27,80 @@ namespace mongo {
}
bool DocumentSourceCursor::eof() {
- /* if we haven't gotten the first one yet, do so now */
- if (!pCurrent.get())
- findNext();
+ /* if we haven't gotten the first one yet, do so now */
+ if (!pCurrent.get())
+ findNext();
return (pCurrent.get() == NULL);
}
bool DocumentSourceCursor::advance() {
- /* if we haven't gotten the first one yet, do so now */
- if (!pCurrent.get())
- findNext();
+ /* if we haven't gotten the first one yet, do so now */
+ if (!pCurrent.get())
+ findNext();
- findNext();
+ findNext();
return (pCurrent.get() != NULL);
}
intrusive_ptr<Document> DocumentSourceCursor::getCurrent() {
- /* if we haven't gotten the first one yet, do so now */
- if (!pCurrent.get())
- findNext();
+ /* if we haven't gotten the first one yet, do so now */
+ if (!pCurrent.get())
+ findNext();
- return pCurrent;
+ return pCurrent;
}
void DocumentSourceCursor::findNext() {
- /* standard cursor usage pattern */
- while(pCursor->ok()) {
- CoveredIndexMatcher *pCIM; // save intermediate result
- if ((!(pCIM = pCursor->matcher()) ||
- pCIM->matchesCurrent(pCursor.get())) &&
- !pCursor->getsetdup(pCursor->currLoc())) {
-
- /* grab the matching document */
- BSONObj documentObj(pCursor->current());
- pCurrent = Document::createFromBsonObj(&documentObj);
- pCursor->advance();
- return;
- }
-
- pCursor->advance();
- }
-
- /* if we got here, there aren't any more documents */
- pCurrent.reset();
+ /* standard cursor usage pattern */
+ while(pCursor->ok()) {
+ CoveredIndexMatcher *pCIM; // save intermediate result
+ if ((!(pCIM = pCursor->matcher()) ||
+ pCIM->matchesCurrent(pCursor.get())) &&
+ !pCursor->getsetdup(pCursor->currLoc())) {
+
+ /* grab the matching document */
+ BSONObj documentObj(pCursor->current());
+ pCurrent = Document::createFromBsonObj(&documentObj);
+ pCursor->advance();
+ return;
+ }
+
+ pCursor->advance();
+ }
+
+ /* if we got here, there aren't any more documents */
+ pCurrent.reset();
}
void DocumentSourceCursor::setSource(
- const intrusive_ptr<DocumentSource> &pSource) {
- /* this doesn't take a source */
- assert(false);
+ const intrusive_ptr<DocumentSource> &pSource) {
+ /* this doesn't take a source */
+ assert(false);
}
void DocumentSourceCursor::sourceToBson(BSONObjBuilder *pBuilder) const {
- /* this has no analog in the BSON world */
- assert(false);
+ /* this has no analog in the BSON world */
+ assert(false);
}
DocumentSourceCursor::DocumentSourceCursor(
- const shared_ptr<Cursor> &pTheCursor):
- pCurrent(),
- bsonDependencies(),
+ const shared_ptr<Cursor> &pTheCursor):
+ pCurrent(),
+ bsonDependencies(),
pCursor(pTheCursor) {
}
intrusive_ptr<DocumentSourceCursor> DocumentSourceCursor::create(
- const shared_ptr<Cursor> &pCursor) {
- assert(pCursor.get());
- intrusive_ptr<DocumentSourceCursor> pSource(
- new DocumentSourceCursor(pCursor));
- return pSource;
+ const shared_ptr<Cursor> &pCursor) {
+ assert(pCursor.get());
+ intrusive_ptr<DocumentSourceCursor> pSource(
+ new DocumentSourceCursor(pCursor));
+ return pSource;
}
void DocumentSourceCursor::addBsonDependency(
- const shared_ptr<BSONObj> &pBsonObj) {
- bsonDependencies.push_back(pBsonObj);
+ const shared_ptr<BSONObj> &pBsonObj) {
+ bsonDependencies.push_back(pBsonObj);
}
}
diff --git a/src/mongo/db/commands/pipeline.cpp b/src/mongo/db/commands/pipeline.cpp
index 77821f9d0ca..f9e32ca8d70 100755
--- a/src/mongo/db/commands/pipeline.cpp
+++ b/src/mongo/db/commands/pipeline.cpp
@@ -37,8 +37,8 @@ namespace mongo {
}
Pipeline::Pipeline(const intrusive_ptr<ExpressionContext> &pTheCtx):
- collectionName(),
- sourceVector(),
+ collectionName(),
+ sourceVector(),
splitMongodPipeline(DEBUG_BUILD == 1), /* test: always split for DEV */
pCtx(pTheCtx) {
}
@@ -47,47 +47,47 @@ namespace mongo {
/* this structure is used to make a lookup table of operators */
struct StageDesc {
- const char *pName;
- intrusive_ptr<DocumentSource> (*pFactory)(
- BSONElement *, const intrusive_ptr<ExpressionContext> &);
+ const char *pName;
+ intrusive_ptr<DocumentSource> (*pFactory)(
+ BSONElement *, const intrusive_ptr<ExpressionContext> &);
};
/* this table must be in alphabetical order by name for bsearch() */
static const StageDesc stageDesc[] = {
#ifdef NEVER /* disabled for now in favor of $match */
- {DocumentSourceFilter::filterName,
- DocumentSourceFilter::createFromBson},
+ {DocumentSourceFilter::filterName,
+ DocumentSourceFilter::createFromBson},
#endif
- {DocumentSourceGroup::groupName,
- DocumentSourceGroup::createFromBson},
- {DocumentSourceLimit::limitName,
- DocumentSourceLimit::createFromBson},
- {DocumentSourceMatch::matchName,
- DocumentSourceMatch::createFromBson},
+ {DocumentSourceGroup::groupName,
+ DocumentSourceGroup::createFromBson},
+ {DocumentSourceLimit::limitName,
+ DocumentSourceLimit::createFromBson},
+ {DocumentSourceMatch::matchName,
+ DocumentSourceMatch::createFromBson},
#ifdef LATER /* https://jira.mongodb.org/browse/SERVER-3253 */
- {DocumentSourceOut::outName,
- DocumentSourceOut::createFromBson},
+ {DocumentSourceOut::outName,
+ DocumentSourceOut::createFromBson},
#endif
- {DocumentSourceProject::projectName,
- DocumentSourceProject::createFromBson},
- {DocumentSourceSkip::skipName,
- DocumentSourceSkip::createFromBson},
- {DocumentSourceSort::sortName,
- DocumentSourceSort::createFromBson},
- {DocumentSourceUnwind::unwindName,
- DocumentSourceUnwind::createFromBson},
+ {DocumentSourceProject::projectName,
+ DocumentSourceProject::createFromBson},
+ {DocumentSourceSkip::skipName,
+ DocumentSourceSkip::createFromBson},
+ {DocumentSourceSort::sortName,
+ DocumentSourceSort::createFromBson},
+ {DocumentSourceUnwind::unwindName,
+ DocumentSourceUnwind::createFromBson},
};
static const size_t nStageDesc = sizeof(stageDesc) / sizeof(StageDesc);
static int stageDescCmp(const void *pL, const void *pR) {
- return strcmp(((const StageDesc *)pL)->pName,
- ((const StageDesc *)pR)->pName);
+ return strcmp(((const StageDesc *)pL)->pName,
+ ((const StageDesc *)pR)->pName);
}
boost::shared_ptr<Pipeline> Pipeline::parseCommand(
- string &errmsg, BSONObj &cmdObj,
- const intrusive_ptr<ExpressionContext> &pCtx) {
- boost::shared_ptr<Pipeline> pPipeline(new Pipeline(pCtx));
+ string &errmsg, BSONObj &cmdObj,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ boost::shared_ptr<Pipeline> pPipeline(new Pipeline(pCtx));
vector<BSONElement> pipeline;
/* gather the specification for the aggregation */
@@ -108,17 +108,17 @@ namespace mongo {
continue;
}
- /* if the request came from the router, we're in a shard */
- if (!strcmp(pFieldName, fromRouterName)) {
- pCtx->setInShard(cmdElement.Bool());
- continue;
- }
+ /* if the request came from the router, we're in a shard */
+ if (!strcmp(pFieldName, fromRouterName)) {
+ pCtx->setInShard(cmdElement.Bool());
+ continue;
+ }
- /* check for debug options */
- if (!strcmp(pFieldName, splitMongodPipelineName)) {
- pPipeline->splitMongodPipeline = true;
- continue;
- }
+ /* check for debug options */
+ if (!strcmp(pFieldName, splitMongodPipelineName)) {
+ pPipeline->splitMongodPipeline = true;
+ continue;
+ }
/* we didn't recognize a field in the command */
ostringstream sb;
@@ -126,7 +126,7 @@ namespace mongo {
"Pipeline::parseCommand(): unrecognized field \"" <<
cmdElement.fieldName();
errmsg = sb.str();
- return boost::shared_ptr<Pipeline>();
+ return boost::shared_ptr<Pipeline>();
}
/*
@@ -134,19 +134,19 @@ namespace mongo {
Set up the specified document source pipeline.
*/
- SourceVector *pSourceVector = &pPipeline->sourceVector; // shorthand
+ SourceVector *pSourceVector = &pPipeline->sourceVector; // shorthand
/* iterate over the steps in the pipeline */
const size_t nSteps = pipeline.size();
for(size_t iStep = 0; iStep < nSteps; ++iStep) {
/* pull out the pipeline element as an object */
BSONElement pipeElement(pipeline[iStep]);
- uassert(15942, str::stream() << "pipeline element " <<
- iStep << " is not an object",
- pipeElement.type() == Object);
+ uassert(15942, str::stream() << "pipeline element " <<
+ iStep << " is not an object",
+ pipeElement.type() == Object);
BSONObj bsonObj(pipeElement.Obj());
- intrusive_ptr<DocumentSource> pSource;
+ intrusive_ptr<DocumentSource> pSource;
/* use the object to add a DocumentSource to the processing chain */
BSONObjIterator bsonIterator(bsonObj);
@@ -155,196 +155,196 @@ namespace mongo {
const char *pFieldName = bsonElement.fieldName();
/* select the appropriate operation and instantiate */
- StageDesc key;
- key.pName = pFieldName;
- const StageDesc *pDesc = (const StageDesc *)
- bsearch(&key, stageDesc, nStageDesc, sizeof(StageDesc),
- stageDescCmp);
- if (pDesc)
- pSource = (*pDesc->pFactory)(&bsonElement, pCtx);
+ StageDesc key;
+ key.pName = pFieldName;
+ const StageDesc *pDesc = (const StageDesc *)
+ bsearch(&key, stageDesc, nStageDesc, sizeof(StageDesc),
+ stageDescCmp);
+ if (pDesc)
+ pSource = (*pDesc->pFactory)(&bsonElement, pCtx);
else {
ostringstream sb;
sb <<
"Pipeline::run(): unrecognized pipeline op \"" <<
pFieldName;
errmsg = sb.str();
- return shared_ptr<Pipeline>();
+ return shared_ptr<Pipeline>();
}
}
- pSourceVector->push_back(pSource);
+ pSourceVector->push_back(pSource);
+ }
+
+ /* if there aren't any pipeline stages, there's nothing more to do */
+ if (!pSourceVector->size())
+ return pPipeline;
+
+ /*
+ Move filters up where possible.
+
+ CW TODO -- move filter past projections where possible, and noting
+ corresponding field renaming.
+ */
+
+ /*
+ Wherever there is a match immediately following a sort, swap them.
+ This means we sort fewer items. Neither changes the documents in
+ the stream, so this transformation shouldn't affect the result.
+
+ We do this first, because then when we coalesce operators below,
+ any adjacent matches will be combined.
+ */
+ for(size_t srcn = pSourceVector->size(), srci = 1;
+ srci < srcn; ++srci) {
+ intrusive_ptr<DocumentSource> &pSource = pSourceVector->at(srci);
+ if (dynamic_cast<DocumentSourceMatch *>(pSource.get())) {
+ intrusive_ptr<DocumentSource> &pPrevious =
+ pSourceVector->at(srci - 1);
+ if (dynamic_cast<DocumentSourceSort *>(pPrevious.get())) {
+ /* swap this item with the previous */
+ intrusive_ptr<DocumentSource> pTemp(pPrevious);
+ pPrevious = pSource;
+ pSource = pTemp;
+ }
+ }
+ }
+
+ /*
+ Coalesce adjacent filters where possible. Two adjacent filters
+ are equivalent to one filter whose predicate is the conjunction of
+ the two original filters' predicates. For now, capture this by
+ giving any DocumentSource the option to absorb it's successor; this
+ will also allow adjacent projections to coalesce when possible.
+
+ Run through the DocumentSources, and give each one the opportunity
+ to coalesce with its successor. If successful, remove the
+ successor.
+
+ Move all document sources to a temporary list.
+ */
+ SourceVector tempVector(*pSourceVector);
+ pSourceVector->clear();
+
+ /* move the first one to the final list */
+ pSourceVector->push_back(tempVector[0]);
+
+ /* run through the sources, coalescing them or keeping them */
+ for(size_t tempn = tempVector.size(), tempi = 1;
+ tempi < tempn; ++tempi) {
+ /*
+ If we can't coalesce the source with the last, then move it
+ to the final list, and make it the new last. (If we succeeded,
+ then we're still on the same last, and there's no need to move
+ or do anything with the source -- the destruction of tempVector
+ will take care of the rest.)
+ */
+ intrusive_ptr<DocumentSource> &pLastSource = pSourceVector->back();
+ intrusive_ptr<DocumentSource> &pTemp = tempVector.at(tempi);
+ if (!pLastSource->coalesce(pTemp))
+ pSourceVector->push_back(pTemp);
}
- /* if there aren't any pipeline stages, there's nothing more to do */
- if (!pSourceVector->size())
- return pPipeline;
-
- /*
- Move filters up where possible.
-
- CW TODO -- move filter past projections where possible, and noting
- corresponding field renaming.
- */
-
- /*
- Wherever there is a match immediately following a sort, swap them.
- This means we sort fewer items. Neither changes the documents in
- the stream, so this transformation shouldn't affect the result.
-
- We do this first, because then when we coalesce operators below,
- any adjacent matches will be combined.
- */
- for(size_t srcn = pSourceVector->size(), srci = 1;
- srci < srcn; ++srci) {
- intrusive_ptr<DocumentSource> &pSource = pSourceVector->at(srci);
- if (dynamic_cast<DocumentSourceMatch *>(pSource.get())) {
- intrusive_ptr<DocumentSource> &pPrevious =
- pSourceVector->at(srci - 1);
- if (dynamic_cast<DocumentSourceSort *>(pPrevious.get())) {
- /* swap this item with the previous */
- intrusive_ptr<DocumentSource> pTemp(pPrevious);
- pPrevious = pSource;
- pSource = pTemp;
- }
- }
- }
-
- /*
- Coalesce adjacent filters where possible. Two adjacent filters
- are equivalent to one filter whose predicate is the conjunction of
- the two original filters' predicates. For now, capture this by
- giving any DocumentSource the option to absorb it's successor; this
- will also allow adjacent projections to coalesce when possible.
-
- Run through the DocumentSources, and give each one the opportunity
- to coalesce with its successor. If successful, remove the
- successor.
-
- Move all document sources to a temporary list.
- */
- SourceVector tempVector(*pSourceVector);
- pSourceVector->clear();
-
- /* move the first one to the final list */
- pSourceVector->push_back(tempVector[0]);
-
- /* run through the sources, coalescing them or keeping them */
- for(size_t tempn = tempVector.size(), tempi = 1;
- tempi < tempn; ++tempi) {
- /*
- If we can't coalesce the source with the last, then move it
- to the final list, and make it the new last. (If we succeeded,
- then we're still on the same last, and there's no need to move
- or do anything with the source -- the destruction of tempVector
- will take care of the rest.)
- */
- intrusive_ptr<DocumentSource> &pLastSource = pSourceVector->back();
- intrusive_ptr<DocumentSource> &pTemp = tempVector.at(tempi);
- if (!pLastSource->coalesce(pTemp))
- pSourceVector->push_back(pTemp);
- }
-
- /* optimize the elements in the pipeline */
- for(SourceVector::iterator iter(pSourceVector->begin()),
- listEnd(pSourceVector->end()); iter != listEnd; ++iter)
- (*iter)->optimize();
-
- return pPipeline;
+ /* optimize the elements in the pipeline */
+ for(SourceVector::iterator iter(pSourceVector->begin()),
+ listEnd(pSourceVector->end()); iter != listEnd; ++iter)
+ (*iter)->optimize();
+
+ return pPipeline;
}
shared_ptr<Pipeline> Pipeline::splitForSharded() {
- /* create an initialize the shard spec we'll return */
- shared_ptr<Pipeline> pShardPipeline(new Pipeline(pCtx));
- pShardPipeline->collectionName = collectionName;
-
- /* put the source list aside */
- SourceVector tempVector(sourceVector);
- sourceVector.clear();
-
- /*
- Run through the pipeline, looking for points to split it into
- shard pipelines, and the rest.
- */
- while(!tempVector.empty()) {
- intrusive_ptr<DocumentSource> &pSource = tempVector.front();
-
- /* hang on to this in advance, in case it is a group */
- DocumentSourceGroup *pGroup =
- dynamic_cast<DocumentSourceGroup *>(pSource.get());
-
- /* move the source from the tempVector to the shard sourceVector */
- pShardPipeline->sourceVector.push_back(pSource);
- tempVector.erase(tempVector.begin());
-
- /*
- If we found a group, that's a split point.
- */
- if (pGroup) {
- /* start this pipeline with the group merger */
- sourceVector.push_back(pGroup->createMerger());
-
- /* and then add everything that remains and quit */
- for(size_t tempn = tempVector.size(), tempi = 0;
- tempi < tempn; ++tempi)
- sourceVector.push_back(tempVector[tempi]);
- break;
- }
- }
-
- return pShardPipeline;
+ /* create an initialize the shard spec we'll return */
+ shared_ptr<Pipeline> pShardPipeline(new Pipeline(pCtx));
+ pShardPipeline->collectionName = collectionName;
+
+ /* put the source list aside */
+ SourceVector tempVector(sourceVector);
+ sourceVector.clear();
+
+ /*
+ Run through the pipeline, looking for points to split it into
+ shard pipelines, and the rest.
+ */
+ while(!tempVector.empty()) {
+ intrusive_ptr<DocumentSource> &pSource = tempVector.front();
+
+ /* hang on to this in advance, in case it is a group */
+ DocumentSourceGroup *pGroup =
+ dynamic_cast<DocumentSourceGroup *>(pSource.get());
+
+ /* move the source from the tempVector to the shard sourceVector */
+ pShardPipeline->sourceVector.push_back(pSource);
+ tempVector.erase(tempVector.begin());
+
+ /*
+ If we found a group, that's a split point.
+ */
+ if (pGroup) {
+ /* start this pipeline with the group merger */
+ sourceVector.push_back(pGroup->createMerger());
+
+ /* and then add everything that remains and quit */
+ for(size_t tempn = tempVector.size(), tempi = 0;
+ tempi < tempn; ++tempi)
+ sourceVector.push_back(tempVector[tempi]);
+ break;
+ }
+ }
+
+ return pShardPipeline;
}
bool Pipeline::getInitialQuery(BSONObjBuilder *pQueryBuilder) const
{
- if (!sourceVector.size())
- return false;
+ if (!sourceVector.size())
+ return false;
- /* look for an initial $match */
- const intrusive_ptr<DocumentSource> &pMC = sourceVector.front();
- const DocumentSourceMatch *pMatch =
- dynamic_cast<DocumentSourceMatch *>(pMC.get());
+ /* look for an initial $match */
+ const intrusive_ptr<DocumentSource> &pMC = sourceVector.front();
+ const DocumentSourceMatch *pMatch =
+ dynamic_cast<DocumentSourceMatch *>(pMC.get());
- if (!pMatch)
- return false;
+ if (!pMatch)
+ return false;
- /* build the query */
- pMatch->toMatcherBson(pQueryBuilder);
+ /* build the query */
+ pMatch->toMatcherBson(pQueryBuilder);
- return true;
+ return true;
}
void Pipeline::toBson(BSONObjBuilder *pBuilder) const {
- /* create an array out of the pipeline operations */
- BSONArrayBuilder arrayBuilder;
- for(SourceVector::const_iterator iter(sourceVector.begin()),
- listEnd(sourceVector.end()); iter != listEnd; ++iter) {
- intrusive_ptr<DocumentSource> pSource(*iter);
- pSource->addToBsonArray(&arrayBuilder);
- }
-
- /* add the top-level items to the command */
- pBuilder->append(commandName, getCollectionName());
- pBuilder->append(pipelineName, arrayBuilder.arr());
-
- bool btemp;
- if ((btemp = getSplitMongodPipeline())) {
- pBuilder->append(splitMongodPipelineName, btemp);
- }
- if ((btemp = pCtx->getInRouter())) {
- pBuilder->append(fromRouterName, btemp);
- }
+ /* create an array out of the pipeline operations */
+ BSONArrayBuilder arrayBuilder;
+ for(SourceVector::const_iterator iter(sourceVector.begin()),
+ listEnd(sourceVector.end()); iter != listEnd; ++iter) {
+ intrusive_ptr<DocumentSource> pSource(*iter);
+ pSource->addToBsonArray(&arrayBuilder);
+ }
+
+ /* add the top-level items to the command */
+ pBuilder->append(commandName, getCollectionName());
+ pBuilder->append(pipelineName, arrayBuilder.arr());
+
+ bool btemp;
+ if ((btemp = getSplitMongodPipeline())) {
+ pBuilder->append(splitMongodPipelineName, btemp);
+ }
+ if ((btemp = pCtx->getInRouter())) {
+ pBuilder->append(fromRouterName, btemp);
+ }
}
bool Pipeline::run(BSONObjBuilder &result, string &errmsg,
- intrusive_ptr<DocumentSource> pSource) {
- /* chain together the sources we found */
- for(SourceVector::iterator iter(sourceVector.begin()),
- listEnd(sourceVector.end()); iter != listEnd; ++iter) {
- intrusive_ptr<DocumentSource> pTemp(*iter);
- pTemp->setSource(pSource);
- pSource = pTemp;
- }
- /* pSource is left pointing at the last source in the chain */
+ intrusive_ptr<DocumentSource> pSource) {
+ /* chain together the sources we found */
+ for(SourceVector::iterator iter(sourceVector.begin()),
+ listEnd(sourceVector.end()); iter != listEnd; ++iter) {
+ intrusive_ptr<DocumentSource> pTemp(*iter);
+ pTemp->setSource(pSource);
+ pSource = pTemp;
+ }
+ /* pSource is left pointing at the last source in the chain */
/*
Iterate through the resulting documents, and add them to the result.
@@ -352,7 +352,7 @@ namespace mongo {
BSONArrayBuilder resultArray; // where we'll stash the results
for(bool hasDocument = !pSource->eof(); hasDocument;
hasDocument = pSource->advance()) {
- boost::intrusive_ptr<Document> pDocument(pSource->getCurrent());
+ boost::intrusive_ptr<Document> pDocument(pSource->getCurrent());
/* add the document to the result set */
BSONObjBuilder documentBuilder;
diff --git a/src/mongo/db/commands/pipeline.h b/src/mongo/db/commands/pipeline.h
index 6f396efd6fa..de3cd7d95d4 100755
--- a/src/mongo/db/commands/pipeline.h
+++ b/src/mongo/db/commands/pipeline.h
@@ -40,111 +40,111 @@ namespace mongo {
public:
virtual ~Pipeline();
- /**
- Create a pipeline from the command.
-
- @param errmsg where to write errors, if there are any
- @param cmdObj the command object sent from the client
- @returns the pipeline, if created, otherwise a NULL reference
- */
- static boost::shared_ptr<Pipeline> parseCommand(
- string &errmsg, BSONObj &cmdObj,
- const intrusive_ptr<ExpressionContext> &pCtx);
-
- /**
- Get the collection name from the command.
-
- @returns the collection name
- */
- string getCollectionName() const;
-
- /**
- Split the current Pipeline into a Pipeline for each shard, and
- a Pipeline that combines the results within mongos.
-
- This permanently alters this pipeline for the merging operation.
-
- @returns the Spec for the pipeline command that should be sent
- to the shards
- */
- boost::shared_ptr<Pipeline> splitForSharded();
-
- /**
- If the pipeline starts with a $match, dump its BSON predicate
- specification to the supplied builder and return true.
-
- @param pQueryBuilder the builder to put the match BSON into
- @returns true if a match was found and dumped to pQueryBuilder,
- false otherwise
- */
- bool getInitialQuery(BSONObjBuilder *pQueryBuilder) const;
-
- /**
- Write the Pipeline as a BSONObj command. This should be the
- inverse of parseCommand().
-
- This is only intended to be used by the shard command obtained
- from splitForSharded(). Some pipeline operations in the merge
- process do not have equivalent command forms, and using this on
- the mongos Pipeline will cause assertions.
-
- @param the builder to write the command to
- */
- void toBson(BSONObjBuilder *pBuilder) const;
-
- /**
- Run the Pipeline on the given source.
-
- @param result builder to write the result to
- @param errmsg place to put error messages, if any
- @param pSource the document source to use at the head of the chain
- @returns true on success, false if an error occurs
- */
- bool run(BSONObjBuilder &result, string &errmsg,
- intrusive_ptr<DocumentSource> pSource);
-
- /**
- Debugging: should the processing pipeline be split within
- mongod, simulating the real mongos/mongod split? This is determined
- by setting the splitMongodPipeline field in an "aggregate"
- command.
-
- The split itself is handled by the caller, which is currently
- pipeline_command.cpp.
-
- @returns true if the pipeline is to be split
- */
- bool getSplitMongodPipeline() const;
-
- /**
- The aggregation command name.
- */
- static const char commandName[];
-
- /*
- PipelineD is a "sister" class that has additional functionality
- for the Pipeline. It exists because of linkage requirements.
- Pipeline needs to function in mongod and mongos. PipelineD
- contains extra functionality required in mongod, and which can't
- appear in mongos because the required symbols are unavailable
- for linking there. Consider PipelineD to be an extension of this
- class for mongod only.
- */
- friend class PipelineD;
+ /**
+ Create a pipeline from the command.
+
+ @param errmsg where to write errors, if there are any
+ @param cmdObj the command object sent from the client
+ @returns the pipeline, if created, otherwise a NULL reference
+ */
+ static boost::shared_ptr<Pipeline> parseCommand(
+ string &errmsg, BSONObj &cmdObj,
+ const intrusive_ptr<ExpressionContext> &pCtx);
+
+ /**
+ Get the collection name from the command.
+
+ @returns the collection name
+ */
+ string getCollectionName() const;
+
+ /**
+ Split the current Pipeline into a Pipeline for each shard, and
+ a Pipeline that combines the results within mongos.
+
+ This permanently alters this pipeline for the merging operation.
+
+ @returns the Spec for the pipeline command that should be sent
+ to the shards
+ */
+ boost::shared_ptr<Pipeline> splitForSharded();
+
+ /**
+ If the pipeline starts with a $match, dump its BSON predicate
+ specification to the supplied builder and return true.
+
+ @param pQueryBuilder the builder to put the match BSON into
+ @returns true if a match was found and dumped to pQueryBuilder,
+ false otherwise
+ */
+ bool getInitialQuery(BSONObjBuilder *pQueryBuilder) const;
+
+ /**
+ Write the Pipeline as a BSONObj command. This should be the
+ inverse of parseCommand().
+
+ This is only intended to be used by the shard command obtained
+ from splitForSharded(). Some pipeline operations in the merge
+ process do not have equivalent command forms, and using this on
+ the mongos Pipeline will cause assertions.
+
+ @param the builder to write the command to
+ */
+ void toBson(BSONObjBuilder *pBuilder) const;
+
+ /**
+ Run the Pipeline on the given source.
+
+ @param result builder to write the result to
+ @param errmsg place to put error messages, if any
+ @param pSource the document source to use at the head of the chain
+ @returns true on success, false if an error occurs
+ */
+ bool run(BSONObjBuilder &result, string &errmsg,
+ intrusive_ptr<DocumentSource> pSource);
+
+ /**
+ Debugging: should the processing pipeline be split within
+ mongod, simulating the real mongos/mongod split? This is determined
+ by setting the splitMongodPipeline field in an "aggregate"
+ command.
+
+ The split itself is handled by the caller, which is currently
+ pipeline_command.cpp.
+
+ @returns true if the pipeline is to be split
+ */
+ bool getSplitMongodPipeline() const;
+
+ /**
+ The aggregation command name.
+ */
+ static const char commandName[];
+
+ /*
+ PipelineD is a "sister" class that has additional functionality
+ for the Pipeline. It exists because of linkage requirements.
+ Pipeline needs to function in mongod and mongos. PipelineD
+ contains extra functionality required in mongod, and which can't
+ appear in mongos because the required symbols are unavailable
+ for linking there. Consider PipelineD to be an extension of this
+ class for mongod only.
+ */
+ friend class PipelineD;
private:
- static const char pipelineName[];
- static const char fromRouterName[];
- static const char splitMongodPipelineName[];
+ static const char pipelineName[];
+ static const char fromRouterName[];
+ static const char splitMongodPipelineName[];
Pipeline(const intrusive_ptr<ExpressionContext> &pCtx);
- string collectionName;
- typedef vector<intrusive_ptr<DocumentSource> > SourceVector;
- SourceVector sourceVector;
+ string collectionName;
+ typedef vector<intrusive_ptr<DocumentSource> > SourceVector;
+ SourceVector sourceVector;
- bool splitMongodPipeline;
- intrusive_ptr<ExpressionContext> pCtx;
+ bool splitMongodPipeline;
+ intrusive_ptr<ExpressionContext> pCtx;
};
} // namespace mongo
@@ -155,14 +155,14 @@ namespace mongo {
namespace mongo {
inline string Pipeline::getCollectionName() const {
- return collectionName;
+ return collectionName;
}
inline bool Pipeline::getSplitMongodPipeline() const {
- if (!DEBUG_BUILD)
- return false;
+ if (!DEBUG_BUILD)
+ return false;
- return splitMongodPipeline;
+ return splitMongodPipeline;
}
} // namespace mongo
diff --git a/src/mongo/db/commands/pipeline_command.cpp b/src/mongo/db/commands/pipeline_command.cpp
index 682c1f01807..08a23830856 100755
--- a/src/mongo/db/commands/pipeline_command.cpp
+++ b/src/mongo/db/commands/pipeline_command.cpp
@@ -38,7 +38,7 @@ namespace mongo {
// virtuals from Command
virtual ~PipelineCommand();
virtual bool run(const string &db, BSONObj &cmdObj, int options,
- string &errmsg, BSONObjBuilder &result, bool fromRepl);
+ string &errmsg, BSONObjBuilder &result, bool fromRepl);
virtual LockType locktype() const;
virtual bool slaveOk() const;
virtual void help(stringstream &help) const;
@@ -69,94 +69,94 @@ namespace mongo {
}
bool PipelineCommand::run(const string &db, BSONObj &cmdObj,
- int options, string &errmsg,
- BSONObjBuilder &result, bool fromRepl) {
-
- intrusive_ptr<ExpressionContext> pCtx(ExpressionContext::create());
-
- /* try to parse the command; if this fails, then we didn't run */
- shared_ptr<Pipeline> pPipeline(
- Pipeline::parseCommand(errmsg, cmdObj, pCtx));
- if (!pPipeline.get())
- return false;
-
- intrusive_ptr<DocumentSource> pSource(
- PipelineD::prepareCursorSource(pPipeline, db));
-
- /* this is the normal non-debug path */
- if (!pPipeline->getSplitMongodPipeline())
- return pPipeline->run(result, errmsg, pSource);
-
- /* setup as if we're in the router */
- pCtx->setInRouter(true);
-
- /*
- Here, we'll split the pipeline in the same way we would for sharding,
- for testing purposes.
-
- Run the shard pipeline first, then feed the results into the remains
- of the existing pipeline.
-
- Start by splitting the pipeline.
- */
- shared_ptr<Pipeline> pShardSplit(
- pPipeline->splitForSharded());
-
- /*
- Write the split pipeline as we would in order to transmit it to
- the shard servers.
- */
- BSONObjBuilder shardBuilder;
- pShardSplit->toBson(&shardBuilder);
- BSONObj shardBson(shardBuilder.done());
-
- DEV (log() << "\n---- shardBson\n" <<
- shardBson.jsonString(Strict, 1) << "\n----\n").flush();
-
- /* for debugging purposes, show what the pipeline now looks like */
- DEV {
- BSONObjBuilder pipelineBuilder;
- pPipeline->toBson(&pipelineBuilder);
- BSONObj pipelineBson(pipelineBuilder.done());
- (log() << "\n---- pipelineBson\n" <<
- pipelineBson.jsonString(Strict, 1) << "\n----\n").flush();
- }
-
- /* on the shard servers, create the local pipeline */
- intrusive_ptr<ExpressionContext> pShardCtx(ExpressionContext::create());
- shared_ptr<Pipeline> pShardPipeline(
- Pipeline::parseCommand(errmsg, shardBson, pShardCtx));
- if (!pShardPipeline.get()) {
- return false;
- }
-
- /* run the shard pipeline */
- BSONObjBuilder shardResultBuilder;
- string shardErrmsg;
- pShardPipeline->run(shardResultBuilder, shardErrmsg, pSource);
- BSONObj shardResult(shardResultBuilder.done());
-
- /* pick out the shard result, and prepare to read it */
- intrusive_ptr<DocumentSourceBsonArray> pShardSource;
- BSONObjIterator shardIter(shardResult);
- while(shardIter.more()) {
- BSONElement shardElement(shardIter.next());
- const char *pFieldName = shardElement.fieldName();
-
- if (strcmp(pFieldName, "result") == 0) {
- pShardSource = DocumentSourceBsonArray::create(&shardElement);
-
- /*
- Connect the output of the shard pipeline with the mongos
- pipeline that will merge the results.
- */
- return pPipeline->run(result, errmsg, pShardSource);
- }
- }
-
- /* NOTREACHED */
- assert(false);
- return false;
+ int options, string &errmsg,
+ BSONObjBuilder &result, bool fromRepl) {
+
+ intrusive_ptr<ExpressionContext> pCtx(ExpressionContext::create());
+
+ /* try to parse the command; if this fails, then we didn't run */
+ shared_ptr<Pipeline> pPipeline(
+ Pipeline::parseCommand(errmsg, cmdObj, pCtx));
+ if (!pPipeline.get())
+ return false;
+
+ intrusive_ptr<DocumentSource> pSource(
+ PipelineD::prepareCursorSource(pPipeline, db));
+
+ /* this is the normal non-debug path */
+ if (!pPipeline->getSplitMongodPipeline())
+ return pPipeline->run(result, errmsg, pSource);
+
+ /* setup as if we're in the router */
+ pCtx->setInRouter(true);
+
+ /*
+ Here, we'll split the pipeline in the same way we would for sharding,
+ for testing purposes.
+
+ Run the shard pipeline first, then feed the results into the remains
+ of the existing pipeline.
+
+ Start by splitting the pipeline.
+ */
+ shared_ptr<Pipeline> pShardSplit(
+ pPipeline->splitForSharded());
+
+ /*
+ Write the split pipeline as we would in order to transmit it to
+ the shard servers.
+ */
+ BSONObjBuilder shardBuilder;
+ pShardSplit->toBson(&shardBuilder);
+ BSONObj shardBson(shardBuilder.done());
+
+ DEV (log() << "\n---- shardBson\n" <<
+ shardBson.jsonString(Strict, 1) << "\n----\n").flush();
+
+ /* for debugging purposes, show what the pipeline now looks like */
+ DEV {
+ BSONObjBuilder pipelineBuilder;
+ pPipeline->toBson(&pipelineBuilder);
+ BSONObj pipelineBson(pipelineBuilder.done());
+ (log() << "\n---- pipelineBson\n" <<
+ pipelineBson.jsonString(Strict, 1) << "\n----\n").flush();
+ }
+
+ /* on the shard servers, create the local pipeline */
+ intrusive_ptr<ExpressionContext> pShardCtx(ExpressionContext::create());
+ shared_ptr<Pipeline> pShardPipeline(
+ Pipeline::parseCommand(errmsg, shardBson, pShardCtx));
+ if (!pShardPipeline.get()) {
+ return false;
+ }
+
+ /* run the shard pipeline */
+ BSONObjBuilder shardResultBuilder;
+ string shardErrmsg;
+ pShardPipeline->run(shardResultBuilder, shardErrmsg, pSource);
+ BSONObj shardResult(shardResultBuilder.done());
+
+ /* pick out the shard result, and prepare to read it */
+ intrusive_ptr<DocumentSourceBsonArray> pShardSource;
+ BSONObjIterator shardIter(shardResult);
+ while(shardIter.more()) {
+ BSONElement shardElement(shardIter.next());
+ const char *pFieldName = shardElement.fieldName();
+
+ if (strcmp(pFieldName, "result") == 0) {
+ pShardSource = DocumentSourceBsonArray::create(&shardElement);
+
+ /*
+ Connect the output of the shard pipeline with the mongos
+ pipeline that will merge the results.
+ */
+ return pPipeline->run(result, errmsg, pShardSource);
+ }
+ }
+
+ /* NOTREACHED */
+ assert(false);
+ return false;
}
} // namespace mongo
diff --git a/src/mongo/db/commands/pipeline_d.cpp b/src/mongo/db/commands/pipeline_d.cpp
index 262a54f8de9..98da3bc6f2f 100755
--- a/src/mongo/db/commands/pipeline_d.cpp
+++ b/src/mongo/db/commands/pipeline_d.cpp
@@ -25,123 +25,123 @@
namespace mongo {
intrusive_ptr<DocumentSource> PipelineD::prepareCursorSource(
- const shared_ptr<Pipeline> &pPipeline,
- const string &dbName) {
-
- Pipeline::SourceVector *pSources = &pPipeline->sourceVector;
-
- /* look for an initial match */
- BSONObjBuilder queryBuilder;
- bool initQuery = pPipeline->getInitialQuery(&queryBuilder);
- if (initQuery) {
- /*
- This will get built in to the Cursor we'll create, so
- remove the match from the pipeline
- */
- pSources->erase(pSources->begin());
- }
-
- /*
- Create a query object.
-
- This works whether we got an initial query above or not; if not,
- it results in a "{}" query, which will be what we want in that case.
-
- We create a pointer to a shared object instead of a local
- object so that we can preserve it for the Cursor we're going to
- create below. See DocumentSourceCursor::addBsonDependency().
- */
- shared_ptr<BSONObj> pQueryObj(new BSONObj(queryBuilder.obj()));
-
- /*
- Look for an initial sort; we'll try to add this to the
- Cursor we create. If we're successful, then
- */
- const DocumentSourceSort *pSort = NULL;
- BSONObjBuilder sortBuilder;
- if (pSources->size()) {
- const intrusive_ptr<DocumentSource> &pSC = pSources->front();
- pSort = dynamic_cast<DocumentSourceSort *>(pSC.get());
-
- if (pSort) {
- /* build the sort key */
- pSort->sortKeyToBson(&sortBuilder, false);
- }
- }
-
- /* Create the sort object; see comments on the query object above */
- shared_ptr<BSONObj> pSortObj(new BSONObj(sortBuilder.obj()));
-
- /* get the full "namespace" name */
- string fullName(dbName + "." + pPipeline->getCollectionName());
-
- /* for debugging purposes, show what the query and sort are */
- DEV {
- (log() << "\n---- query BSON\n" <<
- pQueryObj->jsonString(Strict, 1) << "\n----\n").flush();
- (log() << "\n---- sort BSON\n" <<
- pSortObj->jsonString(Strict, 1) << "\n----\n").flush();
- (log() << "\n---- fullName\n" <<
- fullName << "\n----\n").flush();
- }
-
- /*
- Create the cursor.
-
- If we try to create a cursor that includes both the match and the
- sort, and the two are incompatible wrt the available indexes, then
- we don't get a cursor back.
-
- So we try to use both first. If that fails, try again, without the
- sort.
-
- If we don't have a sort, jump straight to just creating a cursor
- without the sort.
-
- If we are able to incorporate the sort into the cursor, remove it
- from the head of the pipeline.
-
- LATER - we should be able to find this out before we create the
- cursor. Either way, we can then apply other optimizations there
- are tickets for, such as SERVER-4507.
- */
- shared_ptr<Cursor> pCursor;
- bool initSort = false;
- if (pSort) {
- /* try to create the cursor with the query and the sort */
- shared_ptr<Cursor> pSortedCursor(
- pCursor = NamespaceDetailsTransient::getCursor(
- fullName.c_str(), *pQueryObj, *pSortObj));
-
- if (pSortedCursor.get()) {
- /* success: remove the sort from the pipeline */
- pSources->erase(pSources->begin());
-
- pCursor = pSortedCursor;
- initSort = true;
- }
- }
-
- if (!pCursor.get()) {
- /* try to create the cursor without the sort */
- shared_ptr<Cursor> pUnsortedCursor(
- pCursor = NamespaceDetailsTransient::getCursor(
- fullName.c_str(), *pQueryObj));
-
- pCursor = pUnsortedCursor;
- }
-
- /* wrap the cursor with a DocumentSource and return that */
- intrusive_ptr<DocumentSourceCursor> pSource(
- DocumentSourceCursor::create(pCursor));
-
- /* record any dependencies we created */
- if (initQuery)
- pSource->addBsonDependency(pQueryObj);
- if (initSort)
- pSource->addBsonDependency(pSortObj);
-
- return pSource;
+ const shared_ptr<Pipeline> &pPipeline,
+ const string &dbName) {
+
+ Pipeline::SourceVector *pSources = &pPipeline->sourceVector;
+
+ /* look for an initial match */
+ BSONObjBuilder queryBuilder;
+ bool initQuery = pPipeline->getInitialQuery(&queryBuilder);
+ if (initQuery) {
+ /*
+ This will get built in to the Cursor we'll create, so
+ remove the match from the pipeline
+ */
+ pSources->erase(pSources->begin());
+ }
+
+ /*
+ Create a query object.
+
+ This works whether we got an initial query above or not; if not,
+ it results in a "{}" query, which will be what we want in that case.
+
+ We create a pointer to a shared object instead of a local
+ object so that we can preserve it for the Cursor we're going to
+ create below. See DocumentSourceCursor::addBsonDependency().
+ */
+ shared_ptr<BSONObj> pQueryObj(new BSONObj(queryBuilder.obj()));
+
+ /*
+ Look for an initial sort; we'll try to add this to the
+ Cursor we create. If we're successful, then
+ */
+ const DocumentSourceSort *pSort = NULL;
+ BSONObjBuilder sortBuilder;
+ if (pSources->size()) {
+ const intrusive_ptr<DocumentSource> &pSC = pSources->front();
+ pSort = dynamic_cast<DocumentSourceSort *>(pSC.get());
+
+ if (pSort) {
+ /* build the sort key */
+ pSort->sortKeyToBson(&sortBuilder, false);
+ }
+ }
+
+ /* Create the sort object; see comments on the query object above */
+ shared_ptr<BSONObj> pSortObj(new BSONObj(sortBuilder.obj()));
+
+ /* get the full "namespace" name */
+ string fullName(dbName + "." + pPipeline->getCollectionName());
+
+ /* for debugging purposes, show what the query and sort are */
+ DEV {
+ (log() << "\n---- query BSON\n" <<
+ pQueryObj->jsonString(Strict, 1) << "\n----\n").flush();
+ (log() << "\n---- sort BSON\n" <<
+ pSortObj->jsonString(Strict, 1) << "\n----\n").flush();
+ (log() << "\n---- fullName\n" <<
+ fullName << "\n----\n").flush();
+ }
+
+ /*
+ Create the cursor.
+
+ If we try to create a cursor that includes both the match and the
+ sort, and the two are incompatible wrt the available indexes, then
+ we don't get a cursor back.
+
+ So we try to use both first. If that fails, try again, without the
+ sort.
+
+ If we don't have a sort, jump straight to just creating a cursor
+ without the sort.
+
+ If we are able to incorporate the sort into the cursor, remove it
+ from the head of the pipeline.
+
+ LATER - we should be able to find this out before we create the
+ cursor. Either way, we can then apply other optimizations there
+ are tickets for, such as SERVER-4507.
+ */
+ shared_ptr<Cursor> pCursor;
+ bool initSort = false;
+ if (pSort) {
+ /* try to create the cursor with the query and the sort */
+ shared_ptr<Cursor> pSortedCursor(
+ pCursor = NamespaceDetailsTransient::getCursor(
+ fullName.c_str(), *pQueryObj, *pSortObj));
+
+ if (pSortedCursor.get()) {
+ /* success: remove the sort from the pipeline */
+ pSources->erase(pSources->begin());
+
+ pCursor = pSortedCursor;
+ initSort = true;
+ }
+ }
+
+ if (!pCursor.get()) {
+ /* try to create the cursor without the sort */
+ shared_ptr<Cursor> pUnsortedCursor(
+ pCursor = NamespaceDetailsTransient::getCursor(
+ fullName.c_str(), *pQueryObj));
+
+ pCursor = pUnsortedCursor;
+ }
+
+ /* wrap the cursor with a DocumentSource and return that */
+ intrusive_ptr<DocumentSourceCursor> pSource(
+ DocumentSourceCursor::create(pCursor));
+
+ /* record any dependencies we created */
+ if (initQuery)
+ pSource->addBsonDependency(pQueryObj);
+ if (initSort)
+ pSource->addBsonDependency(pSortObj);
+
+ return pSource;
}
} // namespace mongo
diff --git a/src/mongo/db/commands/pipeline_d.h b/src/mongo/db/commands/pipeline_d.h
index 28b06df7db6..46bd28dd622 100755
--- a/src/mongo/db/commands/pipeline_d.h
+++ b/src/mongo/db/commands/pipeline_d.h
@@ -34,28 +34,28 @@ namespace mongo {
class PipelineD {
public:
- /**
- Create a Cursor wrapped in a DocumentSource, which is suitable
- to be the first source for a pipeline to begin with. This source
- will feed the execution of the pipeline.
-
- This method looks for early pipeline stages that can be folded into
- the underlying cursor, and when a cursor can absorb those, they
- are removed from the head of the pipeline. For example, an
- early match can be removed and replaced with a Cursor that will
- do an index scan.
-
- @param pPipeline the logical "this" for this operation
- @param dbName the name of the database
- @returns a document source that wraps an appropriate cursor to
- be at the beginning of this pipeline
- */
- static intrusive_ptr<DocumentSource> prepareCursorSource(
- const shared_ptr<Pipeline> &pPipeline,
- const string &dbName);
+ /**
+ Create a Cursor wrapped in a DocumentSource, which is suitable
+ to be the first source for a pipeline to begin with. This source
+ will feed the execution of the pipeline.
+
+ This method looks for early pipeline stages that can be folded into
+ the underlying cursor, and when a cursor can absorb those, they
+ are removed from the head of the pipeline. For example, an
+ early match can be removed and replaced with a Cursor that will
+ do an index scan.
+
+ @param pPipeline the logical "this" for this operation
+ @param dbName the name of the database
+ @returns a document source that wraps an appropriate cursor to
+ be at the beginning of this pipeline
+ */
+ static intrusive_ptr<DocumentSource> prepareCursorSource(
+ const shared_ptr<Pipeline> &pPipeline,
+ const string &dbName);
private:
- PipelineD(); // does not exist: prevent instantiation
+ PipelineD(); // does not exist: prevent instantiation
};
} // namespace mongo
diff --git a/src/mongo/db/pipeline/accumulator.cpp b/src/mongo/db/pipeline/accumulator.cpp
index 11bdf88d13b..6a38362d6b4 100755
--- a/src/mongo/db/pipeline/accumulator.cpp
+++ b/src/mongo/db/pipeline/accumulator.cpp
@@ -25,10 +25,10 @@ namespace mongo {
void Accumulator::addOperand(
const intrusive_ptr<Expression> &pExpression) {
- uassert(15943, str::stream() << "group accumulator " <<
- getOpName() << " only accepts one operand",
- vpOperand.size() < 1);
-
+ uassert(15943, str::stream() << "group accumulator " <<
+ getOpName() << " only accepts one operand",
+ vpOperand.size() < 1);
+
ExpressionNary::addOperand(pExpression);
}
@@ -37,54 +37,54 @@ namespace mongo {
}
void Accumulator::opToBson(
- BSONObjBuilder *pBuilder, string opName,
- string fieldName, unsigned depth) const {
- assert(vpOperand.size() == 1);
- BSONObjBuilder builder;
- vpOperand[0]->addToBsonObj(&builder, opName, depth);
- pBuilder->append(fieldName, builder.done());
+ BSONObjBuilder *pBuilder, string opName,
+ string fieldName, unsigned depth) const {
+ assert(vpOperand.size() == 1);
+ BSONObjBuilder builder;
+ vpOperand[0]->addToBsonObj(&builder, opName, depth);
+ pBuilder->append(fieldName, builder.done());
}
void Accumulator::addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
- opToBson(pBuilder, getOpName(), fieldName, depth);
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
+ opToBson(pBuilder, getOpName(), fieldName, depth);
}
void Accumulator::addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const {
- assert(false); // these can't appear in arrays
+ BSONArrayBuilder *pBuilder, unsigned depth) const {
+ assert(false); // these can't appear in arrays
}
void agg_framework_reservedErrors() {
- uassert(16019, "reserved error", false);
- uassert(16020, "reserved error", false);
- uassert(16021, "reserved error", false);
- uassert(16022, "reserved error", false);
- uassert(16023, "reserved error", false);
- uassert(16024, "reserved error", false);
- uassert(16025, "reserved error", false);
- uassert(16026, "reserved error", false);
- uassert(16027, "reserved error", false);
- uassert(16028, "reserved error", false);
- uassert(16029, "reserved error", false);
- uassert(16030, "reserved error", false);
- uassert(16031, "reserved error", false);
- uassert(16032, "reserved error", false);
- uassert(16033, "reserved error", false);
+ uassert(16019, "reserved error", false);
+ uassert(16020, "reserved error", false);
+ uassert(16021, "reserved error", false);
+ uassert(16022, "reserved error", false);
+ uassert(16023, "reserved error", false);
+ uassert(16024, "reserved error", false);
+ uassert(16025, "reserved error", false);
+ uassert(16026, "reserved error", false);
+ uassert(16027, "reserved error", false);
+ uassert(16028, "reserved error", false);
+ uassert(16029, "reserved error", false);
+ uassert(16030, "reserved error", false);
+ uassert(16031, "reserved error", false);
+ uassert(16032, "reserved error", false);
+ uassert(16033, "reserved error", false);
- uassert(16036, "reserved error", false);
- uassert(16037, "reserved error", false);
- uassert(16038, "reserved error", false);
- uassert(16039, "reserved error", false);
- uassert(16040, "reserved error", false);
- uassert(16041, "reserved error", false);
- uassert(16042, "reserved error", false);
- uassert(16043, "reserved error", false);
- uassert(16044, "reserved error", false);
- uassert(16045, "reserved error", false);
- uassert(16046, "reserved error", false);
- uassert(16047, "reserved error", false);
- uassert(16048, "reserved error", false);
- uassert(16049, "reserved error", false);
+ uassert(16036, "reserved error", false);
+ uassert(16037, "reserved error", false);
+ uassert(16038, "reserved error", false);
+ uassert(16039, "reserved error", false);
+ uassert(16040, "reserved error", false);
+ uassert(16041, "reserved error", false);
+ uassert(16042, "reserved error", false);
+ uassert(16043, "reserved error", false);
+ uassert(16044, "reserved error", false);
+ uassert(16045, "reserved error", false);
+ uassert(16046, "reserved error", false);
+ uassert(16047, "reserved error", false);
+ uassert(16048, "reserved error", false);
+ uassert(16049, "reserved error", false);
}
}
diff --git a/src/mongo/db/pipeline/accumulator.h b/src/mongo/db/pipeline/accumulator.h
index a75b2c9abaa..fada64c2d4e 100755
--- a/src/mongo/db/pipeline/accumulator.h
+++ b/src/mongo/db/pipeline/accumulator.h
@@ -30,11 +30,11 @@ namespace mongo {
public ExpressionNary {
public:
// virtuals from ExpressionNary
- virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
- virtual void addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const;
+ virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
+ virtual void addToBsonArray(
+ BSONArrayBuilder *pBuilder, unsigned depth) const;
/*
Get the accumulated value.
@@ -46,18 +46,18 @@ namespace mongo {
protected:
Accumulator();
- /*
- Convenience method for doing this for accumulators. The pattern
- is always the same, so a common implementation works, but requires
- knowing the operator name.
-
- @param pBuilder the builder to add to
- @param fieldName the projected name
- @param opName the operator name
- */
- void opToBson(
- BSONObjBuilder *pBuilder, string fieldName, string opName,
- unsigned depth) const;
+ /*
+ Convenience method for doing this for accumulators. The pattern
+ is always the same, so a common implementation works, but requires
+ knowing the operator name.
+
+ @param pBuilder the builder to add to
+ @param fieldName the projected name
+ @param opName the operator name
+ */
+ void opToBson(
+ BSONObjBuilder *pBuilder, string fieldName, string opName,
+ unsigned depth) const;
};
@@ -65,26 +65,26 @@ namespace mongo {
public Accumulator {
public:
// virtuals from Expression
- virtual intrusive_ptr<const Value> evaluate(
+ virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
virtual intrusive_ptr<const Value> getValue() const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
/*
Create an appending accumulator.
- @param pCtx the expression context
+ @param pCtx the expression context
@returns the created accumulator
*/
static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
private:
AccumulatorAddToSet(const intrusive_ptr<ExpressionContext> &pTheCtx);
typedef boost::unordered_set<intrusive_ptr<const Value>, Value::Hash > SetType;
mutable SetType set;
mutable SetType::iterator itr;
- intrusive_ptr<ExpressionContext> pCtx;
+ intrusive_ptr<ExpressionContext> pCtx;
};
@@ -111,9 +111,9 @@ namespace mongo {
public AccumulatorSingleValue {
public:
// virtuals from Expression
- virtual intrusive_ptr<const Value> evaluate(
+ virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
/*
Create the accumulator.
@@ -121,7 +121,7 @@ namespace mongo {
@returns the created accumulator
*/
static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
private:
AccumulatorFirst();
@@ -132,9 +132,9 @@ namespace mongo {
public AccumulatorSingleValue {
public:
// virtuals from Expression
- virtual intrusive_ptr<const Value> evaluate(
+ virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
/*
Create the accumulator.
@@ -142,7 +142,7 @@ namespace mongo {
@returns the created accumulator
*/
static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
private:
AccumulatorLast();
@@ -153,19 +153,19 @@ namespace mongo {
public Accumulator {
public:
// virtuals from Accumulator
- virtual intrusive_ptr<const Value> evaluate(
+ virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
virtual intrusive_ptr<const Value> getValue() const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
/*
Create a summing accumulator.
- @param pCtx the expression context
+ @param pCtx the expression context
@returns the created accumulator
*/
static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
protected: /* reused by AccumulatorAvg */
AccumulatorSum();
@@ -180,9 +180,9 @@ namespace mongo {
public AccumulatorSingleValue {
public:
// virtuals from Expression
- virtual intrusive_ptr<const Value> evaluate(
+ virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
/*
Create either the max or min accumulator.
@@ -190,9 +190,9 @@ namespace mongo {
@returns the created accumulator
*/
static intrusive_ptr<Accumulator> createMin(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
static intrusive_ptr<Accumulator> createMax(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
private:
AccumulatorMinMax(int theSense);
@@ -205,55 +205,55 @@ namespace mongo {
public Accumulator {
public:
// virtuals from Expression
- virtual intrusive_ptr<const Value> evaluate(
+ virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
virtual intrusive_ptr<const Value> getValue() const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
/*
Create an appending accumulator.
- @param pCtx the expression context
+ @param pCtx the expression context
@returns the created accumulator
*/
static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
private:
AccumulatorPush(const intrusive_ptr<ExpressionContext> &pTheCtx);
mutable vector<intrusive_ptr<const Value> > vpValue;
- intrusive_ptr<ExpressionContext> pCtx;
+ intrusive_ptr<ExpressionContext> pCtx;
};
class AccumulatorAvg :
- public AccumulatorSum {
+ public AccumulatorSum {
typedef AccumulatorSum Super;
public:
// virtuals from Accumulator
- virtual intrusive_ptr<const Value> evaluate(
+ virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
virtual intrusive_ptr<const Value> getValue() const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
/*
Create an averaging accumulator.
- @param pCtx the expression context
+ @param pCtx the expression context
@returns the created accumulator
*/
static intrusive_ptr<Accumulator> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
private:
- static const char subTotalName[];
- static const char countName[];
+ static const char subTotalName[];
+ static const char countName[];
AccumulatorAvg(const intrusive_ptr<ExpressionContext> &pCtx);
- mutable long long count;
- intrusive_ptr<ExpressionContext> pCtx;
+ mutable long long count;
+ intrusive_ptr<ExpressionContext> pCtx;
};
}
diff --git a/src/mongo/db/pipeline/accumulator_add_to_set.cpp b/src/mongo/db/pipeline/accumulator_add_to_set.cpp
index 94df0293de4..2e006caf55d 100755
--- a/src/mongo/db/pipeline/accumulator_add_to_set.cpp
+++ b/src/mongo/db/pipeline/accumulator_add_to_set.cpp
@@ -24,27 +24,27 @@ namespace mongo {
intrusive_ptr<const Value> AccumulatorAddToSet::evaluate(
const intrusive_ptr<Document> &pDocument) const {
assert(vpOperand.size() == 1);
- intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
+ intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
- if (prhs->getType() == Undefined)
- ; /* nothing to add to the array */
- else if (!pCtx->getInRouter())
- set.insert(prhs);
- else {
- /*
- If we're in the router, we need to take apart the arrays we
- receive and put their elements into the array we are collecting.
- If we didn't, then we'd get an array of arrays, with one array
- from each shard that responds.
- */
- assert(prhs->getType() == Array);
-
- intrusive_ptr<ValueIterator> pvi(prhs->getArray());
- while(pvi->more()) {
- intrusive_ptr<const Value> pElement(pvi->next());
- set.insert(pElement);
- }
- }
+ if (prhs->getType() == Undefined)
+ ; /* nothing to add to the array */
+ else if (!pCtx->getInRouter())
+ set.insert(prhs);
+ else {
+ /*
+ If we're in the router, we need to take apart the arrays we
+ receive and put their elements into the array we are collecting.
+ If we didn't, then we'd get an array of arrays, with one array
+ from each shard that responds.
+ */
+ assert(prhs->getType() == Array);
+
+ intrusive_ptr<ValueIterator> pvi(prhs->getArray());
+ while(pvi->more()) {
+ intrusive_ptr<const Value> pElement(pvi->next());
+ set.insert(pElement);
+ }
+ }
return Value::getNull();
}
@@ -60,20 +60,20 @@ namespace mongo {
}
AccumulatorAddToSet::AccumulatorAddToSet(
- const intrusive_ptr<ExpressionContext> &pTheCtx):
+ const intrusive_ptr<ExpressionContext> &pTheCtx):
Accumulator(),
set(),
pCtx(pTheCtx) {
}
intrusive_ptr<Accumulator> AccumulatorAddToSet::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorAddToSet> pAccumulator(
- new AccumulatorAddToSet(pCtx));
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorAddToSet> pAccumulator(
+ new AccumulatorAddToSet(pCtx));
return pAccumulator;
}
const char *AccumulatorAddToSet::getOpName() const {
- return "$addToSet";
+ return "$addToSet";
}
}
diff --git a/src/mongo/db/pipeline/accumulator_avg.cpp b/src/mongo/db/pipeline/accumulator_avg.cpp
index 9f18b1820c8..d9df112f52b 100755
--- a/src/mongo/db/pipeline/accumulator_avg.cpp
+++ b/src/mongo/db/pipeline/accumulator_avg.cpp
@@ -28,96 +28,96 @@ namespace mongo {
intrusive_ptr<const Value> AccumulatorAvg::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- if (!pCtx->getInRouter()) {
- Super::evaluate(pDocument);
- ++count;
- }
- else {
- /*
- If we're in the router, we expect an object that contains
- both a subtotal and a count. This is what getValue() produced
- below.
- */
- intrusive_ptr<const Value> prhs(
- vpOperand[0]->evaluate(pDocument));
- assert(prhs->getType() == Object);
- intrusive_ptr<Document> pShardDoc(prhs->getDocument());
-
- intrusive_ptr<const Value> pSubTotal(
- pShardDoc->getValue(subTotalName));
- assert(pSubTotal.get());
- BSONType subTotalType = pSubTotal->getType();
- if ((totalType == NumberLong) || (subTotalType == NumberLong))
- totalType = NumberLong;
- if ((totalType == NumberDouble) || (subTotalType == NumberDouble))
- totalType = NumberDouble;
-
- if (subTotalType == NumberInt) {
- int v = pSubTotal->getInt();
- longTotal += v;
- doubleTotal += v;
- }
- else if (subTotalType == NumberLong) {
- long long v = pSubTotal->getLong();
- longTotal += v;
- doubleTotal += v;
- }
- else {
- double v = pSubTotal->getDouble();
- doubleTotal += v;
- }
-
- intrusive_ptr<const Value> pCount(pShardDoc->getValue(countName));
- count += pCount->getLong();
- }
+ if (!pCtx->getInRouter()) {
+ Super::evaluate(pDocument);
+ ++count;
+ }
+ else {
+ /*
+ If we're in the router, we expect an object that contains
+ both a subtotal and a count. This is what getValue() produced
+ below.
+ */
+ intrusive_ptr<const Value> prhs(
+ vpOperand[0]->evaluate(pDocument));
+ assert(prhs->getType() == Object);
+ intrusive_ptr<Document> pShardDoc(prhs->getDocument());
+
+ intrusive_ptr<const Value> pSubTotal(
+ pShardDoc->getValue(subTotalName));
+ assert(pSubTotal.get());
+ BSONType subTotalType = pSubTotal->getType();
+ if ((totalType == NumberLong) || (subTotalType == NumberLong))
+ totalType = NumberLong;
+ if ((totalType == NumberDouble) || (subTotalType == NumberDouble))
+ totalType = NumberDouble;
+
+ if (subTotalType == NumberInt) {
+ int v = pSubTotal->getInt();
+ longTotal += v;
+ doubleTotal += v;
+ }
+ else if (subTotalType == NumberLong) {
+ long long v = pSubTotal->getLong();
+ longTotal += v;
+ doubleTotal += v;
+ }
+ else {
+ double v = pSubTotal->getDouble();
+ doubleTotal += v;
+ }
+
+ intrusive_ptr<const Value> pCount(pShardDoc->getValue(countName));
+ count += pCount->getLong();
+ }
return Value::getZero();
}
intrusive_ptr<Accumulator> AccumulatorAvg::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorAvg> pA(new AccumulatorAvg(pCtx));
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorAvg> pA(new AccumulatorAvg(pCtx));
return pA;
}
intrusive_ptr<const Value> AccumulatorAvg::getValue() const {
- if (!pCtx->getInShard()) {
- double avg = 0;
- if (count) {
- if (totalType != NumberDouble)
- avg = static_cast<double>(longTotal / count);
- else
- avg = doubleTotal / count;
- }
-
- return Value::createDouble(avg);
- }
-
- intrusive_ptr<Document> pDocument(Document::create());
-
- intrusive_ptr<const Value> pSubTotal;
- if (totalType == NumberInt)
- pSubTotal = Value::createInt((int)longTotal);
- else if (totalType == NumberLong)
- pSubTotal = Value::createLong(longTotal);
- else
- pSubTotal = Value::createDouble(doubleTotal);
- pDocument->addField(subTotalName, pSubTotal);
-
- intrusive_ptr<const Value> pCount(Value::createLong(count));
- pDocument->addField(countName, pCount);
-
- return Value::createDocument(pDocument);
+ if (!pCtx->getInShard()) {
+ double avg = 0;
+ if (count) {
+ if (totalType != NumberDouble)
+ avg = static_cast<double>(longTotal / count);
+ else
+ avg = doubleTotal / count;
+ }
+
+ return Value::createDouble(avg);
+ }
+
+ intrusive_ptr<Document> pDocument(Document::create());
+
+ intrusive_ptr<const Value> pSubTotal;
+ if (totalType == NumberInt)
+ pSubTotal = Value::createInt((int)longTotal);
+ else if (totalType == NumberLong)
+ pSubTotal = Value::createLong(longTotal);
+ else
+ pSubTotal = Value::createDouble(doubleTotal);
+ pDocument->addField(subTotalName, pSubTotal);
+
+ intrusive_ptr<const Value> pCount(Value::createLong(count));
+ pDocument->addField(countName, pCount);
+
+ return Value::createDocument(pDocument);
}
AccumulatorAvg::AccumulatorAvg(
- const intrusive_ptr<ExpressionContext> &pTheCtx):
+ const intrusive_ptr<ExpressionContext> &pTheCtx):
AccumulatorSum(),
- count(0),
- pCtx(pTheCtx) {
+ count(0),
+ pCtx(pTheCtx) {
}
const char *AccumulatorAvg::getOpName() const {
- return "$avg";
+ return "$avg";
}
}
diff --git a/src/mongo/db/pipeline/accumulator_first.cpp b/src/mongo/db/pipeline/accumulator_first.cpp
index c947aa83996..9c45e409237 100755
--- a/src/mongo/db/pipeline/accumulator_first.cpp
+++ b/src/mongo/db/pipeline/accumulator_first.cpp
@@ -25,25 +25,25 @@ namespace mongo {
const intrusive_ptr<Document> &pDocument) const {
assert(vpOperand.size() == 1);
- /* only remember the first value seen */
- if (!pValue.get())
- pValue = vpOperand[0]->evaluate(pDocument);
+ /* only remember the first value seen */
+ if (!pValue.get())
+ pValue = vpOperand[0]->evaluate(pDocument);
return pValue;
}
AccumulatorFirst::AccumulatorFirst():
- AccumulatorSingleValue() {
+ AccumulatorSingleValue() {
}
intrusive_ptr<Accumulator> AccumulatorFirst::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorFirst> pAccumulator(
- new AccumulatorFirst());
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorFirst> pAccumulator(
+ new AccumulatorFirst());
return pAccumulator;
}
const char *AccumulatorFirst::getOpName() const {
- return "$first";
+ return "$first";
}
}
diff --git a/src/mongo/db/pipeline/accumulator_last.cpp b/src/mongo/db/pipeline/accumulator_last.cpp
index c134fc83159..3d929fc57c5 100755
--- a/src/mongo/db/pipeline/accumulator_last.cpp
+++ b/src/mongo/db/pipeline/accumulator_last.cpp
@@ -25,24 +25,24 @@ namespace mongo {
const intrusive_ptr<Document> &pDocument) const {
assert(vpOperand.size() == 1);
- /* always remember the last value seen */
- pValue = vpOperand[0]->evaluate(pDocument);
+ /* always remember the last value seen */
+ pValue = vpOperand[0]->evaluate(pDocument);
return pValue;
}
AccumulatorLast::AccumulatorLast():
- AccumulatorSingleValue() {
+ AccumulatorSingleValue() {
}
intrusive_ptr<Accumulator> AccumulatorLast::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorLast> pAccumulator(
- new AccumulatorLast());
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorLast> pAccumulator(
+ new AccumulatorLast());
return pAccumulator;
}
const char *AccumulatorLast::getOpName() const {
- return "$last";
+ return "$last";
}
}
diff --git a/src/mongo/db/pipeline/accumulator_min_max.cpp b/src/mongo/db/pipeline/accumulator_min_max.cpp
index 6f078187b44..ce0151847d4 100755
--- a/src/mongo/db/pipeline/accumulator_min_max.cpp
+++ b/src/mongo/db/pipeline/accumulator_min_max.cpp
@@ -24,7 +24,7 @@ namespace mongo {
intrusive_ptr<const Value> AccumulatorMinMax::evaluate(
const intrusive_ptr<Document> &pDocument) const {
assert(vpOperand.size() == 1);
- intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
+ intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
/* if this is the first value, just use it */
if (!pValue.get())
@@ -40,28 +40,28 @@ namespace mongo {
}
AccumulatorMinMax::AccumulatorMinMax(int theSense):
- AccumulatorSingleValue(),
+ AccumulatorSingleValue(),
sense(theSense) {
assert((sense == 1) || (sense == -1));
}
intrusive_ptr<Accumulator> AccumulatorMinMax::createMin(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorMinMax> pAccumulator(
- new AccumulatorMinMax(1));
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorMinMax> pAccumulator(
+ new AccumulatorMinMax(1));
return pAccumulator;
}
intrusive_ptr<Accumulator> AccumulatorMinMax::createMax(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorMinMax> pAccumulator(
- new AccumulatorMinMax(-1));
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorMinMax> pAccumulator(
+ new AccumulatorMinMax(-1));
return pAccumulator;
}
const char *AccumulatorMinMax::getOpName() const {
- if (sense == 1)
- return "$min";
- return "$max";
+ if (sense == 1)
+ return "$min";
+ return "$max";
}
}
diff --git a/src/mongo/db/pipeline/accumulator_push.cpp b/src/mongo/db/pipeline/accumulator_push.cpp
index 2640bc4ecfd..b0c11dcf70f 100755
--- a/src/mongo/db/pipeline/accumulator_push.cpp
+++ b/src/mongo/db/pipeline/accumulator_push.cpp
@@ -24,27 +24,27 @@ namespace mongo {
intrusive_ptr<const Value> AccumulatorPush::evaluate(
const intrusive_ptr<Document> &pDocument) const {
assert(vpOperand.size() == 1);
- intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
+ intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
- if (prhs->getType() == Undefined)
- ; /* nothing to add to the array */
- else if (!pCtx->getInRouter())
- vpValue.push_back(prhs);
- else {
- /*
- If we're in the router, we need to take apart the arrays we
- receive and put their elements into the array we are collecting.
- If we didn't, then we'd get an array of arrays, with one array
- from each shard that responds.
- */
- assert(prhs->getType() == Array);
-
- intrusive_ptr<ValueIterator> pvi(prhs->getArray());
- while(pvi->more()) {
- intrusive_ptr<const Value> pElement(pvi->next());
- vpValue.push_back(pElement);
- }
- }
+ if (prhs->getType() == Undefined)
+ ; /* nothing to add to the array */
+ else if (!pCtx->getInRouter())
+ vpValue.push_back(prhs);
+ else {
+ /*
+ If we're in the router, we need to take apart the arrays we
+ receive and put their elements into the array we are collecting.
+ If we didn't, then we'd get an array of arrays, with one array
+ from each shard that responds.
+ */
+ assert(prhs->getType() == Array);
+
+ intrusive_ptr<ValueIterator> pvi(prhs->getArray());
+ while(pvi->more()) {
+ intrusive_ptr<const Value> pElement(pvi->next());
+ vpValue.push_back(pElement);
+ }
+ }
return Value::getNull();
}
@@ -54,20 +54,20 @@ namespace mongo {
}
AccumulatorPush::AccumulatorPush(
- const intrusive_ptr<ExpressionContext> &pTheCtx):
+ const intrusive_ptr<ExpressionContext> &pTheCtx):
Accumulator(),
vpValue(),
pCtx(pTheCtx) {
}
intrusive_ptr<Accumulator> AccumulatorPush::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorPush> pAccumulator(
- new AccumulatorPush(pCtx));
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorPush> pAccumulator(
+ new AccumulatorPush(pCtx));
return pAccumulator;
}
const char *AccumulatorPush::getOpName() const {
- return "$push";
+ return "$push";
}
}
diff --git a/src/mongo/db/pipeline/accumulator_single_value.cpp b/src/mongo/db/pipeline/accumulator_single_value.cpp
index bfec80387d3..7e2491d121f 100755
--- a/src/mongo/db/pipeline/accumulator_single_value.cpp
+++ b/src/mongo/db/pipeline/accumulator_single_value.cpp
@@ -22,11 +22,11 @@
namespace mongo {
intrusive_ptr<const Value> AccumulatorSingleValue::getValue() const {
- return pValue;
+ return pValue;
}
AccumulatorSingleValue::AccumulatorSingleValue():
- pValue(intrusive_ptr<const Value>()) {
+ pValue(intrusive_ptr<const Value>()) {
}
}
diff --git a/src/mongo/db/pipeline/accumulator_sum.cpp b/src/mongo/db/pipeline/accumulator_sum.cpp
index e6526ac254a..7f268efcb32 100755
--- a/src/mongo/db/pipeline/accumulator_sum.cpp
+++ b/src/mongo/db/pipeline/accumulator_sum.cpp
@@ -24,10 +24,10 @@ namespace mongo {
intrusive_ptr<const Value> AccumulatorSum::evaluate(
const intrusive_ptr<Document> &pDocument) const {
assert(vpOperand.size() == 1);
- intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
+ intrusive_ptr<const Value> prhs(vpOperand[0]->evaluate(pDocument));
/* upgrade to the widest type required to hold the result */
- totalType = Value::getWidestNumeric(totalType, prhs->getType());
+ totalType = Value::getWidestNumeric(totalType, prhs->getType());
if (totalType == NumberInt) {
int v = prhs->coerceToInt();
@@ -48,8 +48,8 @@ namespace mongo {
}
intrusive_ptr<Accumulator> AccumulatorSum::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
- intrusive_ptr<AccumulatorSum> pSummer(new AccumulatorSum());
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ intrusive_ptr<AccumulatorSum> pSummer(new AccumulatorSum());
return pSummer;
}
@@ -69,6 +69,6 @@ namespace mongo {
}
const char *AccumulatorSum::getOpName() const {
- return "$sum";
+ return "$sum";
}
}
diff --git a/src/mongo/db/pipeline/builder.cpp b/src/mongo/db/pipeline/builder.cpp
index cbde3705656..b3f7872ef94 100755
--- a/src/mongo/db/pipeline/builder.cpp
+++ b/src/mongo/db/pipeline/builder.cpp
@@ -23,94 +23,94 @@
namespace mongo {
void BuilderObj::append() {
- pBuilder->appendNull(fieldName);
+ pBuilder->appendNull(fieldName);
}
void BuilderObj::append(bool b) {
- pBuilder->append(fieldName, b);
+ pBuilder->append(fieldName, b);
}
void BuilderObj::append(int i) {
- pBuilder->append(fieldName, i);
+ pBuilder->append(fieldName, i);
}
void BuilderObj::append(long long ll) {
- pBuilder->append(fieldName, ll);
+ pBuilder->append(fieldName, ll);
}
void BuilderObj::append(double d) {
- pBuilder->append(fieldName, d);
+ pBuilder->append(fieldName, d);
}
void BuilderObj::append(string s) {
- pBuilder->append(fieldName, s);
+ pBuilder->append(fieldName, s);
}
void BuilderObj::append(const OID &o) {
- pBuilder->append(fieldName, o);
+ pBuilder->append(fieldName, o);
}
void BuilderObj::append(const Date_t &d) {
- pBuilder->append(fieldName, d);
+ pBuilder->append(fieldName, d);
}
void BuilderObj::append(BSONObjBuilder *pDone) {
- pBuilder->append(fieldName, pDone->done());
+ pBuilder->append(fieldName, pDone->done());
}
void BuilderObj::append(BSONArrayBuilder *pDone) {
- pBuilder->append(fieldName, pDone->arr());
+ pBuilder->append(fieldName, pDone->arr());
}
BuilderObj::BuilderObj(
- BSONObjBuilder *pObjBuilder, string theFieldName):
+ BSONObjBuilder *pObjBuilder, string theFieldName):
pBuilder(pObjBuilder),
fieldName(theFieldName) {
}
void BuilderArray::append() {
- pBuilder->appendNull();
+ pBuilder->appendNull();
}
void BuilderArray::append(bool b) {
- pBuilder->append(b);
+ pBuilder->append(b);
}
void BuilderArray::append(int i) {
- pBuilder->append(i);
+ pBuilder->append(i);
}
void BuilderArray::append(long long ll) {
- pBuilder->append(ll);
+ pBuilder->append(ll);
}
void BuilderArray::append(double d) {
- pBuilder->append(d);
+ pBuilder->append(d);
}
void BuilderArray::append(string s) {
- pBuilder->append(s);
+ pBuilder->append(s);
}
void BuilderArray::append(const OID &o) {
- pBuilder->append(o);
+ pBuilder->append(o);
}
void BuilderArray::append(const Date_t &d) {
- pBuilder->append(d);
+ pBuilder->append(d);
}
void BuilderArray::append(BSONObjBuilder *pDone) {
- pBuilder->append(pDone->done());
+ pBuilder->append(pDone->done());
}
void BuilderArray::append(BSONArrayBuilder *pDone) {
- pBuilder->append(pDone->arr());
+ pBuilder->append(pDone->arr());
}
BuilderArray::BuilderArray(
- BSONArrayBuilder *pArrayBuilder):
+ BSONArrayBuilder *pArrayBuilder):
pBuilder(pArrayBuilder) {
}
diff --git a/src/mongo/db/pipeline/builder.h b/src/mongo/db/pipeline/builder.h
index bdf71cd784c..fc37a72236f 100755
--- a/src/mongo/db/pipeline/builder.h
+++ b/src/mongo/db/pipeline/builder.h
@@ -36,60 +36,60 @@ namespace mongo {
class Builder :
boost::noncopyable {
public:
- virtual ~Builder() {};
+ virtual ~Builder() {};
virtual void append() = 0; // append a null
- virtual void append(bool b) = 0;
- virtual void append(int i) = 0;
- virtual void append(long long ll) = 0;
- virtual void append(double d) = 0;
- virtual void append(string s) = 0;
- virtual void append(const OID &o) = 0;
- virtual void append(const Date_t &d) = 0;
- virtual void append(BSONObjBuilder *pDone) = 0;
- virtual void append(BSONArrayBuilder *pDone) = 0;
+ virtual void append(bool b) = 0;
+ virtual void append(int i) = 0;
+ virtual void append(long long ll) = 0;
+ virtual void append(double d) = 0;
+ virtual void append(string s) = 0;
+ virtual void append(const OID &o) = 0;
+ virtual void append(const Date_t &d) = 0;
+ virtual void append(BSONObjBuilder *pDone) = 0;
+ virtual void append(BSONArrayBuilder *pDone) = 0;
};
class BuilderObj :
- public Builder {
+ public Builder {
public:
- // virtuals from Builder
+ // virtuals from Builder
virtual void append();
- virtual void append(bool b);
- virtual void append(int i);
- virtual void append(long long ll);
- virtual void append(double d);
- virtual void append(string s);
- virtual void append(const OID &o);
- virtual void append(const Date_t &d);
- virtual void append(BSONObjBuilder *pDone);
- virtual void append(BSONArrayBuilder *pDone);
+ virtual void append(bool b);
+ virtual void append(int i);
+ virtual void append(long long ll);
+ virtual void append(double d);
+ virtual void append(string s);
+ virtual void append(const OID &o);
+ virtual void append(const Date_t &d);
+ virtual void append(BSONObjBuilder *pDone);
+ virtual void append(BSONArrayBuilder *pDone);
- BuilderObj(BSONObjBuilder *pBuilder, string fieldName);
+ BuilderObj(BSONObjBuilder *pBuilder, string fieldName);
private:
- BSONObjBuilder *pBuilder;
- string fieldName;
+ BSONObjBuilder *pBuilder;
+ string fieldName;
};
class BuilderArray :
- public Builder {
+ public Builder {
public:
- // virtuals from Builder
+ // virtuals from Builder
virtual void append();
- virtual void append(bool b);
- virtual void append(int i);
- virtual void append(long long ll);
- virtual void append(double d);
- virtual void append(string s);
- virtual void append(const OID &o);
- virtual void append(const Date_t &d);
- virtual void append(BSONObjBuilder *pDone);
- virtual void append(BSONArrayBuilder *pDone);
+ virtual void append(bool b);
+ virtual void append(int i);
+ virtual void append(long long ll);
+ virtual void append(double d);
+ virtual void append(string s);
+ virtual void append(const OID &o);
+ virtual void append(const Date_t &d);
+ virtual void append(BSONObjBuilder *pDone);
+ virtual void append(BSONArrayBuilder *pDone);
- BuilderArray(BSONArrayBuilder *pBuilder);
+ BuilderArray(BSONArrayBuilder *pBuilder);
private:
- BSONArrayBuilder *pBuilder;
+ BSONArrayBuilder *pBuilder;
};
}
diff --git a/src/mongo/db/pipeline/doc_mem_monitor.cpp b/src/mongo/db/pipeline/doc_mem_monitor.cpp
index ffbe9c88854..e4e8323cf96 100755
--- a/src/mongo/db/pipeline/doc_mem_monitor.cpp
+++ b/src/mongo/db/pipeline/doc_mem_monitor.cpp
@@ -21,48 +21,48 @@
namespace mongo {
DocMemMonitor::DocMemMonitor(StringWriter *pW) {
- /*
- Use the default values.
+ /*
+ Use the default values.
- Currently, we warn in log at 5%, and assert at 10%.
- */
- size_t errorRam = SystemInfo::getPhysicalRam() / 10;
- size_t warnRam = errorRam / 2;
+ Currently, we warn in log at 5%, and assert at 10%.
+ */
+ size_t errorRam = SystemInfo::getPhysicalRam() / 10;
+ size_t warnRam = errorRam / 2;
- init(pW, warnRam, errorRam);
+ init(pW, warnRam, errorRam);
}
DocMemMonitor::DocMemMonitor(StringWriter *pW,
- size_t warnLimit, size_t errorLimit) {
- init(pW, warnLimit, errorLimit);
+ size_t warnLimit, size_t errorLimit) {
+ init(pW, warnLimit, errorLimit);
}
void DocMemMonitor::addToTotal(size_t amount) {
- totalUsed += amount;
+ totalUsed += amount;
- if (!warned) {
- if (warnLimit && (totalUsed > warnLimit)) {
- stringstream ss;
- ss << "warning, 5% of physical RAM used for ";
- pWriter->writeString(ss);
- ss << endl;
- warning() << ss.str();
- warned = true;
- }
- }
-
- if (errorLimit) {
- uassert(15944, "terminating request: request heap use exceeded 10% of physical RAM", (totalUsed <= errorLimit));
- }
+ if (!warned) {
+ if (warnLimit && (totalUsed > warnLimit)) {
+ stringstream ss;
+ ss << "warning, 5% of physical RAM used for ";
+ pWriter->writeString(ss);
+ ss << endl;
+ warning() << ss.str();
+ warned = true;
+ }
+ }
+
+ if (errorLimit) {
+ uassert(15944, "terminating request: request heap use exceeded 10% of physical RAM", (totalUsed <= errorLimit));
+ }
}
void DocMemMonitor::init(StringWriter *pW,
- size_t warnLimit, size_t errorLimit) {
- this->pWriter = pW;
- this->warnLimit = warnLimit;
- this->errorLimit = errorLimit;
+ size_t warnLimit, size_t errorLimit) {
+ this->pWriter = pW;
+ this->warnLimit = warnLimit;
+ this->errorLimit = errorLimit;
- warned = false;
- totalUsed = 0;
+ warned = false;
+ totalUsed = 0;
}
}
diff --git a/src/mongo/db/pipeline/doc_mem_monitor.h b/src/mongo/db/pipeline/doc_mem_monitor.h
index e368acc906a..ca5ac23e16b 100755
--- a/src/mongo/db/pipeline/doc_mem_monitor.h
+++ b/src/mongo/db/pipeline/doc_mem_monitor.h
@@ -39,56 +39,56 @@ namespace mongo {
*/
class DocMemMonitor {
public:
- /*
- Constructor.
+ /*
+ Constructor.
- Uses default limits for warnings and errors.
+ Uses default limits for warnings and errors.
- The StringWriter parameter must outlive the DocMemMonitor instance.
+ The StringWriter parameter must outlive the DocMemMonitor instance.
- @param pWriter string writer that provides information about the
- operation being monitored
- */
- DocMemMonitor(StringWriter *pWriter);
+ @param pWriter string writer that provides information about the
+ operation being monitored
+ */
+ DocMemMonitor(StringWriter *pWriter);
- /*
- Constructor.
+ /*
+ Constructor.
- This variant allows explicit selection of the limits. Note that
- limits of zero are treated as infinite.
+ This variant allows explicit selection of the limits. Note that
+ limits of zero are treated as infinite.
- The StringWriter parameter must outlive the DocMemMonitor instance.
+ The StringWriter parameter must outlive the DocMemMonitor instance.
- @param pWriter string writer that provides information about the
- operation being monitored
- @param warnLimit the amount of ram to issue (log) a warning for
- @param errorLimit the amount of ram to throw an error for
- */
- DocMemMonitor(StringWriter *pWriter, size_t warnLimit,
- size_t errorLimit);
+ @param pWriter string writer that provides information about the
+ operation being monitored
+ @param warnLimit the amount of ram to issue (log) a warning for
+ @param errorLimit the amount of ram to throw an error for
+ */
+ DocMemMonitor(StringWriter *pWriter, size_t warnLimit,
+ size_t errorLimit);
- /*
- Increment the total amount of memory used by the given amount. If
- the warning threshold is exceeded, a warning will be logged. If the
- error threshold is exceeded, an error will be thrown.
+ /*
+ Increment the total amount of memory used by the given amount. If
+ the warning threshold is exceeded, a warning will be logged. If the
+ error threshold is exceeded, an error will be thrown.
- @param amount the amount of memory to add to the current total
- */
- void addToTotal(size_t amount);
+ @param amount the amount of memory to add to the current total
+ */
+ void addToTotal(size_t amount);
private:
- /*
- Real constructor body.
-
- Provides common construction for all the variant constructors.
- */
- void init(StringWriter *pW, size_t warnLimit, size_t errorLimit);
-
- bool warned;
- size_t totalUsed;
- size_t warnLimit;
- size_t errorLimit;
- StringWriter *pWriter;
+ /*
+ Real constructor body.
+
+ Provides common construction for all the variant constructors.
+ */
+ void init(StringWriter *pW, size_t warnLimit, size_t errorLimit);
+
+ bool warned;
+ size_t totalUsed;
+ size_t warnLimit;
+ size_t errorLimit;
+ StringWriter *pWriter;
};
}
diff --git a/src/mongo/db/pipeline/document.cpp b/src/mongo/db/pipeline/document.cpp
index 11c6c64f71f..6416a341f41 100755
--- a/src/mongo/db/pipeline/document.cpp
+++ b/src/mongo/db/pipeline/document.cpp
@@ -29,7 +29,7 @@ namespace mongo {
string Document::idName("_id");
intrusive_ptr<Document> Document::createFromBsonObj(BSONObj *pBsonObj) {
- intrusive_ptr<Document> pDocument(new Document(pBsonObj));
+ intrusive_ptr<Document> pDocument(new Document(pBsonObj));
return pDocument;
}
@@ -40,7 +40,7 @@ namespace mongo {
while(bsonIterator.more()) {
BSONElement bsonElement(bsonIterator.next());
string fieldName(bsonElement.fieldName());
- intrusive_ptr<const Value> pValue(
+ intrusive_ptr<const Value> pValue(
Value::createFromBsonElement(&bsonElement));
vFieldName.push_back(fieldName);
@@ -55,7 +55,7 @@ namespace mongo {
}
intrusive_ptr<Document> Document::create(size_t sizeHint) {
- intrusive_ptr<Document> pDocument(new Document(sizeHint));
+ intrusive_ptr<Document> pDocument(new Document(sizeHint));
return pDocument;
}
@@ -70,7 +70,7 @@ namespace mongo {
intrusive_ptr<Document> Document::clone() {
const size_t n = vFieldName.size();
- intrusive_ptr<Document> pNew(Document::create(n));
+ intrusive_ptr<Document> pNew(Document::create(n));
for(size_t i = 0; i < n; ++i)
pNew->addField(vFieldName[i], vpValue[i]);
@@ -97,7 +97,7 @@ namespace mongo {
*/
const size_t n = vFieldName.size();
for(size_t i = 0; i < n; ++i) {
- if (fieldName.compare(vFieldName[i]) == 0)
+ if (fieldName.compare(vFieldName[i]) == 0)
return vpValue[i];
}
@@ -105,9 +105,9 @@ namespace mongo {
}
void Document::addField(const string &fieldName,
- const intrusive_ptr<const Value> &pValue) {
- uassert(15945, str::stream() << "cannot add undefined field " <<
- fieldName << " to document", pValue->getType() != Undefined);
+ const intrusive_ptr<const Value> &pValue) {
+ uassert(15945, str::stream() << "cannot add undefined field " <<
+ fieldName << " to document", pValue->getType() != Undefined);
vFieldName.push_back(fieldName);
vpValue.push_back(pValue);
@@ -115,60 +115,60 @@ namespace mongo {
void Document::setField(size_t index,
const string &fieldName,
- const intrusive_ptr<const Value> &pValue) {
- /* special case: should this field be removed? */
- if (!pValue.get()) {
- vFieldName.erase(vFieldName.begin() + index);
- vpValue.erase(vpValue.begin() + index);
- return;
- }
-
- /* make sure we have a valid value */
- uassert(15968, str::stream() << "cannot set undefined field " <<
- fieldName << " to document", pValue->getType() != Undefined);
-
- /* set the indicated field */
+ const intrusive_ptr<const Value> &pValue) {
+ /* special case: should this field be removed? */
+ if (!pValue.get()) {
+ vFieldName.erase(vFieldName.begin() + index);
+ vpValue.erase(vpValue.begin() + index);
+ return;
+ }
+
+ /* make sure we have a valid value */
+ uassert(15968, str::stream() << "cannot set undefined field " <<
+ fieldName << " to document", pValue->getType() != Undefined);
+
+ /* set the indicated field */
vFieldName[index] = fieldName;
vpValue[index] = pValue;
}
intrusive_ptr<const Value> Document::getField(const string &fieldName) const {
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- if (fieldName.compare(vFieldName[i]) == 0)
- return vpValue[i];
- }
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ if (fieldName.compare(vFieldName[i]) == 0)
+ return vpValue[i];
+ }
- /* if we got here, there's no such field */
- return intrusive_ptr<const Value>();
+ /* if we got here, there's no such field */
+ return intrusive_ptr<const Value>();
}
size_t Document::getApproximateSize() const {
- size_t size = sizeof(Document);
- const size_t n = vpValue.size();
- for(size_t i = 0; i < n; ++i)
- size += vpValue[i]->getApproximateSize();
+ size_t size = sizeof(Document);
+ const size_t n = vpValue.size();
+ for(size_t i = 0; i < n; ++i)
+ size += vpValue[i]->getApproximateSize();
- return size;
+ return size;
}
size_t Document::getFieldIndex(const string &fieldName) const {
- const size_t n = vFieldName.size();
- size_t i = 0;
- for(; i < n; ++i) {
- if (fieldName.compare(vFieldName[i]) == 0)
- break;
- }
+ const size_t n = vFieldName.size();
+ size_t i = 0;
+ for(; i < n; ++i) {
+ if (fieldName.compare(vFieldName[i]) == 0)
+ break;
+ }
- return i;
+ return i;
}
void Document::hash_combine(size_t &seed) const {
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- boost::hash_combine(seed, vFieldName[i]);
- vpValue[i]->hash_combine(seed);
- }
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ boost::hash_combine(seed, vFieldName[i]);
+ vpValue[i]->hash_combine(seed);
+ }
}
int Document::compare(const intrusive_ptr<Document> &rL,
diff --git a/src/mongo/db/pipeline/document.h b/src/mongo/db/pipeline/document.h
index f11a825151e..b00f7162cef 100755
--- a/src/mongo/db/pipeline/document.h
+++ b/src/mongo/db/pipeline/document.h
@@ -54,10 +54,10 @@ namespace mongo {
The new document shares all the fields' values with the original.
- This is not a deep copy. Only the fields on the top-level document
- are cloned.
+ This is not a deep copy. Only the fields on the top-level document
+ are cloned.
- @returns the shallow clone of the document
+ @returns the shallow clone of the document
*/
intrusive_ptr<Document> clone();
@@ -91,7 +91,7 @@ namespace mongo {
field.
*/
void addField(const string &fieldName,
- const intrusive_ptr<const Value> &pValue);
+ const intrusive_ptr<const Value> &pValue);
/*
Set the given field to be at the specified position in the
@@ -99,63 +99,63 @@ namespace mongo {
position. The index must be within the current range of field
indices.
- pValue.get() may be NULL, in which case the field will be
- removed. fieldName is ignored in this case.
+ pValue.get() may be NULL, in which case the field will be
+ removed. fieldName is ignored in this case.
- @param index the field index in the list of fields
- @param fieldName the new field name
- @param pValue the new Value
+ @param index the field index in the list of fields
+ @param fieldName the new field name
+ @param pValue the new Value
*/
void setField(size_t index,
const string &fieldName,
- const intrusive_ptr<const Value> &pValue);
+ const intrusive_ptr<const Value> &pValue);
- /*
- Convenience type for dealing with fields.
- */
- typedef pair<string, intrusive_ptr<const Value> > FieldPair;
+ /*
+ Convenience type for dealing with fields.
+ */
+ typedef pair<string, intrusive_ptr<const Value> > FieldPair;
- /*
- Get the indicated field.
+ /*
+ Get the indicated field.
- @param index the field index in the list of fields
- @returns the field name and value of the field
- */
- FieldPair getField(size_t index) const;
+ @param index the field index in the list of fields
+ @returns the field name and value of the field
+ */
+ FieldPair getField(size_t index) const;
- /*
- Get the number of fields in the Document.
+ /*
+ Get the number of fields in the Document.
- @returns the number of fields in the Document
- */
- size_t getFieldCount() const;
+ @returns the number of fields in the Document
+ */
+ size_t getFieldCount() const;
- /*
- Get the index of the given field.
+ /*
+ Get the index of the given field.
- @param fieldName the name of the field
- @returns the index of the field, or if it does not exist, the number
- of fields (getFieldCount())
- */
- size_t getFieldIndex(const string &fieldName) const;
+ @param fieldName the name of the field
+ @returns the index of the field, or if it does not exist, the number
+ of fields (getFieldCount())
+ */
+ size_t getFieldIndex(const string &fieldName) const;
- /*
- Get a field by name.
+ /*
+ Get a field by name.
- @param fieldName the name of the field
- @returns the value of the field
- */
- intrusive_ptr<const Value> getField(const string &fieldName) const;
+ @param fieldName the name of the field
+ @returns the value of the field
+ */
+ intrusive_ptr<const Value> getField(const string &fieldName) const;
- /*
- Get the approximate storage size of the document, in bytes.
+ /*
+ Get the approximate storage size of the document, in bytes.
- Under the assumption that field name strings are shared, they are
- not included in the total.
+ Under the assumption that field name strings are shared, they are
+ not included in the total.
- @returns the approximate storage
- */
- size_t getApproximateSize() const;
+ @returns the approximate storage
+ */
+ size_t getApproximateSize() const;
/*
Compare two documents.
@@ -168,17 +168,17 @@ namespace mongo {
static int compare(const intrusive_ptr<Document> &rL,
const intrusive_ptr<Document> &rR);
- static string idName; // shared "_id"
+ static string idName; // shared "_id"
- /*
- Calculate a hash value.
+ /*
+ Calculate a hash value.
- Meant to be used to create composite hashes suitable for
- boost classes such as unordered_map<>.
+ Meant to be used to create composite hashes suitable for
+ boost classes such as unordered_map<>.
- @param seed value to augment with this' hash
- */
- void hash_combine(size_t &seed) const;
+ @param seed value to augment with this' hash
+ */
+ void hash_combine(size_t &seed) const;
private:
friend class FieldIterator;
@@ -207,7 +207,7 @@ namespace mongo {
@return the next field's <name, Value>
*/
- Document::FieldPair next();
+ Document::FieldPair next();
private:
friend class Document;
@@ -224,7 +224,7 @@ namespace mongo {
We'll hang on to the original document to ensure we keep the
fieldPtr vector alive.
*/
- intrusive_ptr<Document> pDocument;
+ intrusive_ptr<Document> pDocument;
size_t index; // current field in iteration
};
}
@@ -235,7 +235,7 @@ namespace mongo {
namespace mongo {
inline size_t Document::getFieldCount() const {
- return vFieldName.size();
+ return vFieldName.size();
}
inline Document::FieldPair Document::getField(size_t index) const {
diff --git a/src/mongo/db/pipeline/document_source.cpp b/src/mongo/db/pipeline/document_source.cpp
index 813852e35c6..598de320b0e 100755
--- a/src/mongo/db/pipeline/document_source.cpp
+++ b/src/mongo/db/pipeline/document_source.cpp
@@ -23,30 +23,30 @@ namespace mongo {
}
void DocumentSource::setSource(
- const intrusive_ptr<DocumentSource> &pTheSource) {
- assert(!pSource.get());
- pSource = pTheSource;
+ const intrusive_ptr<DocumentSource> &pTheSource) {
+ assert(!pSource.get());
+ pSource = pTheSource;
}
bool DocumentSource::coalesce(
- const intrusive_ptr<DocumentSource> &pNextSource) {
- return false;
+ const intrusive_ptr<DocumentSource> &pNextSource) {
+ return false;
}
void DocumentSource::optimize() {
}
void DocumentSource::addToBsonArray(BSONArrayBuilder *pBuilder) const {
- BSONObjBuilder insides;
- sourceToBson(&insides);
- pBuilder->append(insides.done());
+ BSONObjBuilder insides;
+ sourceToBson(&insides);
+ pBuilder->append(insides.done());
}
void DocumentSource::writeString(stringstream &ss) const {
- BSONArrayBuilder bab;
- addToBsonArray(&bab);
- BSONArray ba(bab.arr());
- ss << ba.toString(/* isArray */true);
+ BSONArrayBuilder bab;
+ addToBsonArray(&bab);
+ BSONArray ba(bab.arr());
+ ss << ba.toString(/* isArray */true);
// our toString should use standard string types.....
}
}
diff --git a/src/mongo/db/pipeline/document_source.h b/src/mongo/db/pipeline/document_source.h
index 7b2134a41af..39f28d59f4d 100755
--- a/src/mongo/db/pipeline/document_source.h
+++ b/src/mongo/db/pipeline/document_source.h
@@ -39,114 +39,114 @@ namespace mongo {
class DocumentSource :
public IntrusiveCounterUnsigned,
- public StringWriter {
+ public StringWriter {
public:
- virtual ~DocumentSource();
+ virtual ~DocumentSource();
- // virtuals from StringWriter
- /**
- Write out a string representation of this pipeline operator.
+ // virtuals from StringWriter
+ /**
+ Write out a string representation of this pipeline operator.
- @param ss string stream to write the string representation to
- */
- virtual void writeString(stringstream &ss) const;
+ @param ss string stream to write the string representation to
+ */
+ virtual void writeString(stringstream &ss) const;
/**
- Is the source at EOF?
+ Is the source at EOF?
- @returns true if the source has no more Documents to return.
+ @returns true if the source has no more Documents to return.
*/
virtual bool eof() = 0;
/**
- Advance the state of the DocumentSource so that it will return the
- next Document.
+ Advance the state of the DocumentSource so that it will return the
+ next Document.
- @returns whether there is another document to fetch, i.e., whether or
- not getCurrent() will succeed.
+ @returns whether there is another document to fetch, i.e., whether or
+ not getCurrent() will succeed.
*/
virtual bool advance() = 0;
/**
Advance the source, and return the next Expression.
- @returns the current Document
+ @returns the current Document
TODO throws an exception if there are no more expressions to return.
*/
virtual intrusive_ptr<Document> getCurrent() = 0;
- /**
- Set the underlying source this source should use to get Documents
- from.
+ /**
+ Set the underlying source this source should use to get Documents
+ from.
- It is an error to set the source more than once. This is to
- prevent changing sources once the original source has been started;
- this could break the state maintained by the DocumentSource.
+ It is an error to set the source more than once. This is to
+ prevent changing sources once the original source has been started;
+ this could break the state maintained by the DocumentSource.
- @param pSource the underlying source to use
- */
- virtual void setSource(const intrusive_ptr<DocumentSource> &pSource);
+ @param pSource the underlying source to use
+ */
+ virtual void setSource(const intrusive_ptr<DocumentSource> &pSource);
- /**
- Attempt to coalesce this DocumentSource with its successor in the
- document processing pipeline. If successful, the successor
- DocumentSource should be removed from the pipeline and discarded.
+ /**
+ Attempt to coalesce this DocumentSource with its successor in the
+ document processing pipeline. If successful, the successor
+ DocumentSource should be removed from the pipeline and discarded.
- If successful, this operation can be applied repeatedly, in an
- attempt to coalesce several sources together.
+ If successful, this operation can be applied repeatedly, in an
+ attempt to coalesce several sources together.
- The default implementation is to do nothing, and return false.
+ The default implementation is to do nothing, and return false.
- @param pNextSource the next source in the document processing chain.
- @returns whether or not the attempt to coalesce was successful or not;
- if the attempt was not successful, nothing has been changed
- */
- virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
+ @param pNextSource the next source in the document processing chain.
+ @returns whether or not the attempt to coalesce was successful or not;
+ if the attempt was not successful, nothing has been changed
+ */
+ virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
- /**
- Optimize the pipeline operation, if possible. This is a local
- optimization that only looks within this DocumentSource. For best
- results, first coalesce compatible sources using coalesce().
+ /**
+ Optimize the pipeline operation, if possible. This is a local
+ optimization that only looks within this DocumentSource. For best
+ results, first coalesce compatible sources using coalesce().
- This is intended for any operations that include expressions, and
- provides a hook for those to optimize those operations.
+ This is intended for any operations that include expressions, and
+ provides a hook for those to optimize those operations.
- The default implementation is to do nothing.
- */
- virtual void optimize();
+ The default implementation is to do nothing.
+ */
+ virtual void optimize();
/**
- Add the DocumentSource to the array builder.
+ Add the DocumentSource to the array builder.
- The default implementation calls sourceToBson() in order to
- convert the inner part of the object which will be added to the
- array being built here.
+ The default implementation calls sourceToBson() in order to
+ convert the inner part of the object which will be added to the
+ array being built here.
- @param pBuilder the array builder to add the operation to.
+ @param pBuilder the array builder to add the operation to.
*/
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
-
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder) const;
+
protected:
- /**
- Create an object that represents the document source. The object
- will have a single field whose name is the source's name. This
- will be used by the default implementation of addToBsonArray()
- to add this object to a pipeline being represented in BSON.
-
- @param pBuilder a blank object builder to write to
- */
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const = 0;
-
- /*
- Most DocumentSources have an underlying source they get their data
- from. This is a convenience for them.
-
- The default implementation of setSource() sets this; if you don't
- need a source, override that to assert(). The default is to
- assert() if this has already been set.
- */
- intrusive_ptr<DocumentSource> pSource;
+ /**
+ Create an object that represents the document source. The object
+ will have a single field whose name is the source's name. This
+ will be used by the default implementation of addToBsonArray()
+ to add this object to a pipeline being represented in BSON.
+
+ @param pBuilder a blank object builder to write to
+ */
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const = 0;
+
+ /*
+ Most DocumentSources have an underlying source they get their data
+ from. This is a convenience for them.
+
+ The default implementation of setSource() sets this; if you don't
+ need a source, override that to assert(). The default is to
+ assert() if this has already been set.
+ */
+ intrusive_ptr<DocumentSource> pSource;
};
@@ -158,83 +158,83 @@ namespace mongo {
virtual bool eof();
virtual bool advance();
virtual intrusive_ptr<Document> getCurrent();
- virtual void setSource(const intrusive_ptr<DocumentSource> &pSource);
+ virtual void setSource(const intrusive_ptr<DocumentSource> &pSource);
- /**
- Create a document source based on a BSON array.
+ /**
+ Create a document source based on a BSON array.
- This is usually put at the beginning of a chain of document sources
- in order to fetch data from the database.
+ This is usually put at the beginning of a chain of document sources
+ in order to fetch data from the database.
- CAUTION: the BSON is not read until the source is used. Any
- elements that appear after these documents must not be read until
- this source is exhausted.
+ CAUTION: the BSON is not read until the source is used. Any
+ elements that appear after these documents must not be read until
+ this source is exhausted.
- @param pBsonElement the BSON array to treat as a document source
- @returns the newly created document source
- */
- static intrusive_ptr<DocumentSourceBsonArray> create(
- BSONElement *pBsonElement);
+ @param pBsonElement the BSON array to treat as a document source
+ @returns the newly created document source
+ */
+ static intrusive_ptr<DocumentSourceBsonArray> create(
+ BSONElement *pBsonElement);
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
private:
DocumentSourceBsonArray(BSONElement *pBsonElement);
- BSONObj embeddedObject;
- BSONObjIterator arrayIterator;
- BSONElement currentElement;
- bool haveCurrent;
+ BSONObj embeddedObject;
+ BSONObjIterator arrayIterator;
+ BSONElement currentElement;
+ bool haveCurrent;
};
class DocumentSourceCommandFutures :
- public DocumentSource {
+ public DocumentSource {
public:
- // virtuals from DocumentSource
- virtual ~DocumentSourceCommandFutures();
+ // virtuals from DocumentSource
+ virtual ~DocumentSourceCommandFutures();
virtual bool eof();
virtual bool advance();
virtual intrusive_ptr<Document> getCurrent();
- virtual void setSource(const intrusive_ptr<DocumentSource> &pSource);
+ virtual void setSource(const intrusive_ptr<DocumentSource> &pSource);
- /* convenient shorthand for a commonly used type */
- typedef list<shared_ptr<Future::CommandResult> > FuturesList;
+ /* convenient shorthand for a commonly used type */
+ typedef list<shared_ptr<Future::CommandResult> > FuturesList;
- /**
- Create a DocumentSource that wraps a list of Command::Futures.
+ /**
+ Create a DocumentSource that wraps a list of Command::Futures.
- @param errmsg place to write error messages to; must exist for the
- lifetime of the created DocumentSourceCommandFutures
- @param pList the list of futures
- */
- static intrusive_ptr<DocumentSourceCommandFutures> create(
- string &errmsg, FuturesList *pList);
+ @param errmsg place to write error messages to; must exist for the
+ lifetime of the created DocumentSourceCommandFutures
+ @param pList the list of futures
+ */
+ static intrusive_ptr<DocumentSourceCommandFutures> create(
+ string &errmsg, FuturesList *pList);
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
private:
- DocumentSourceCommandFutures(string &errmsg, FuturesList *pList);
-
- /**
- Advance to the next document, setting pCurrent appropriately.
-
- Adjusts pCurrent, pBsonSource, and iterator, as needed. On exit,
- pCurrent is the Document to return, or NULL. If NULL, this
- indicates there is nothing more to return.
- */
- void getNextDocument();
-
- bool newSource; // set to true for the first item of a new source
- intrusive_ptr<DocumentSourceBsonArray> pBsonSource;
- intrusive_ptr<Document> pCurrent;
- FuturesList::iterator iterator;
- FuturesList::iterator listEnd;
- string &errmsg;
+ DocumentSourceCommandFutures(string &errmsg, FuturesList *pList);
+
+ /**
+ Advance to the next document, setting pCurrent appropriately.
+
+ Adjusts pCurrent, pBsonSource, and iterator, as needed. On exit,
+ pCurrent is the Document to return, or NULL. If NULL, this
+ indicates there is nothing more to return.
+ */
+ void getNextDocument();
+
+ bool newSource; // set to true for the first item of a new source
+ intrusive_ptr<DocumentSourceBsonArray> pBsonSource;
+ intrusive_ptr<Document> pCurrent;
+ FuturesList::iterator iterator;
+ FuturesList::iterator listEnd;
+ string &errmsg;
};
@@ -246,59 +246,59 @@ namespace mongo {
virtual bool eof();
virtual bool advance();
virtual intrusive_ptr<Document> getCurrent();
- virtual void setSource(const intrusive_ptr<DocumentSource> &pSource);
-
- /**
- Create a document source based on a cursor.
-
- This is usually put at the beginning of a chain of document sources
- in order to fetch data from the database.
-
- @param pCursor the cursor to use to fetch data
- */
- static intrusive_ptr<DocumentSourceCursor> create(
- const shared_ptr<Cursor> &pCursor);
-
- /**
- Add a BSONObj dependency.
-
- Some Cursor creation functions rely on BSON objects to specify
- their query predicate or sort. These often take a BSONObj
- by reference for these, but to not copy it. As a result, the
- BSONObjs specified must outlive the Cursor. In order to ensure
- that, use this to preserve a pointer to the BSONObj here.
-
- From the outside, you must also make sure the BSONObjBuilder
- creates a lasting copy of the data, otherwise it will go away
- when the builder goes out of scope. Therefore, the typical usage
- pattern for this is
- {
- BSONObjBuilder builder;
- // do stuff to the builder
- shared_ptr<BSONObj> pBsonObj(new BSONObj(builder.obj()));
- pDocumentSourceCursor->addBsonDependency(pBsonObj);
- }
-
- @param pBsonObj pointer to the BSON object to preserve
- */
- void addBsonDependency(const shared_ptr<BSONObj> &pBsonObj);
+ virtual void setSource(const intrusive_ptr<DocumentSource> &pSource);
+
+ /**
+ Create a document source based on a cursor.
+
+ This is usually put at the beginning of a chain of document sources
+ in order to fetch data from the database.
+
+ @param pCursor the cursor to use to fetch data
+ */
+ static intrusive_ptr<DocumentSourceCursor> create(
+ const shared_ptr<Cursor> &pCursor);
+
+ /**
+ Add a BSONObj dependency.
+
+ Some Cursor creation functions rely on BSON objects to specify
+ their query predicate or sort. These often take a BSONObj
+ by reference for these, but to not copy it. As a result, the
+ BSONObjs specified must outlive the Cursor. In order to ensure
+ that, use this to preserve a pointer to the BSONObj here.
+
+ From the outside, you must also make sure the BSONObjBuilder
+ creates a lasting copy of the data, otherwise it will go away
+ when the builder goes out of scope. Therefore, the typical usage
+ pattern for this is
+ {
+ BSONObjBuilder builder;
+ // do stuff to the builder
+ shared_ptr<BSONObj> pBsonObj(new BSONObj(builder.obj()));
+ pDocumentSourceCursor->addBsonDependency(pBsonObj);
+ }
+
+ @param pBsonObj pointer to the BSON object to preserve
+ */
+ void addBsonDependency(const shared_ptr<BSONObj> &pBsonObj);
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
private:
DocumentSourceCursor(const shared_ptr<Cursor> &pTheCursor);
- void findNext();
- intrusive_ptr<Document> pCurrent;
+ void findNext();
+ intrusive_ptr<Document> pCurrent;
- /*
- The bsonDependencies must outlive the Cursor wrapped by this
- source. Therefore, bsonDependencies must appear before pCursor
- in order its destructor to be called *after* pCursor's.
- */
- vector<shared_ptr<BSONObj> > bsonDependencies;
+ /*
+ The bsonDependencies must outlive the Cursor wrapped by this
+ source. Therefore, bsonDependencies must appear before pCursor
+ in order its destructor to be called *after* pCursor's.
+ */
+ vector<shared_ptr<BSONObj> > bsonDependencies;
shared_ptr<Cursor> pCursor;
};
@@ -318,30 +318,30 @@ namespace mongo {
virtual bool advance();
virtual intrusive_ptr<Document> getCurrent();
- /**
- Create a BSONObj suitable for Matcher construction.
+ /**
+ Create a BSONObj suitable for Matcher construction.
- This is used after filter analysis has moved as many filters to
- as early a point as possible in the document processing pipeline.
- See db/Matcher.h and the associated wiki documentation for the
- format. This conversion is used to move back to the low-level
- find() Cursor mechanism.
+ This is used after filter analysis has moved as many filters to
+ as early a point as possible in the document processing pipeline.
+ See db/Matcher.h and the associated wiki documentation for the
+ format. This conversion is used to move back to the low-level
+ find() Cursor mechanism.
- @param pBuilder the builder to write to
- */
- virtual void toMatcherBson(BSONObjBuilder *pBuilder) const = 0;
+ @param pBuilder the builder to write to
+ */
+ virtual void toMatcherBson(BSONObjBuilder *pBuilder) const = 0;
protected:
DocumentSourceFilterBase();
- /**
- Test the given document against the predicate and report if it
- should be accepted or not.
+ /**
+ Test the given document against the predicate and report if it
+ should be accepted or not.
- @param pDocument the document to test
- @returns true if the document matches the filter, false otherwise
- */
- virtual bool accept(const intrusive_ptr<Document> &pDocument) const = 0;
+ @param pDocument the document to test
+ @returns true if the document matches the filter, false otherwise
+ */
+ virtual bool accept(const intrusive_ptr<Document> &pDocument) const = 0;
private:
@@ -358,18 +358,18 @@ namespace mongo {
public:
// virtuals from DocumentSource
virtual ~DocumentSourceFilter();
- virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
- virtual void optimize();
+ virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
+ virtual void optimize();
- /**
- Create a filter.
+ /**
+ Create a filter.
@param pBsonElement the raw BSON specification for the filter
@returns the filter
- */
- static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx);
+ */
+ static intrusive_ptr<DocumentSource> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx);
/**
Create a filter.
@@ -380,27 +380,27 @@ namespace mongo {
static intrusive_ptr<DocumentSourceFilter> create(
const intrusive_ptr<Expression> &pFilter);
- /**
- Create a BSONObj suitable for Matcher construction.
+ /**
+ Create a BSONObj suitable for Matcher construction.
- This is used after filter analysis has moved as many filters to
- as early a point as possible in the document processing pipeline.
- See db/Matcher.h and the associated wiki documentation for the
- format. This conversion is used to move back to the low-level
- find() Cursor mechanism.
+ This is used after filter analysis has moved as many filters to
+ as early a point as possible in the document processing pipeline.
+ See db/Matcher.h and the associated wiki documentation for the
+ format. This conversion is used to move back to the low-level
+ find() Cursor mechanism.
- @param pBuilder the builder to write to
- */
- void toMatcherBson(BSONObjBuilder *pBuilder) const;
+ @param pBuilder the builder to write to
+ */
+ void toMatcherBson(BSONObjBuilder *pBuilder) const;
- static const char filterName[];
+ static const char filterName[];
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
- // virtuals from DocumentSourceFilterBase
- virtual bool accept(const intrusive_ptr<Document> &pDocument) const;
+ // virtuals from DocumentSourceFilterBase
+ virtual bool accept(const intrusive_ptr<Document> &pDocument) const;
private:
DocumentSourceFilter(const intrusive_ptr<Expression> &pFilter);
@@ -420,12 +420,12 @@ namespace mongo {
/**
Create a new grouping DocumentSource.
-
- @param pCtx the expression context
- @returns the DocumentSource
+
+ @param pCtx the expression context
+ @returns the DocumentSource
*/
static intrusive_ptr<DocumentSourceGroup> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
/**
Set the Id Expression.
@@ -451,56 +451,56 @@ namespace mongo {
group field
*/
void addAccumulator(string fieldName,
- intrusive_ptr<Accumulator> (*pAccumulatorFactory)(
- const intrusive_ptr<ExpressionContext> &),
+ intrusive_ptr<Accumulator> (*pAccumulatorFactory)(
+ const intrusive_ptr<ExpressionContext> &),
const intrusive_ptr<Expression> &pExpression);
- /**
- Create a grouping DocumentSource from BSON.
+ /**
+ Create a grouping DocumentSource from BSON.
- This is a convenience method that uses the above, and operates on
- a BSONElement that has been deteremined to be an Object with an
- element named $group.
+ This is a convenience method that uses the above, and operates on
+ a BSONElement that has been deteremined to be an Object with an
+ element named $group.
- @param pBsonElement the BSONELement that defines the group
- @param pCtx the expression context
- @returns the grouping DocumentSource
- */
+ @param pBsonElement the BSONELement that defines the group
+ @param pCtx the expression context
+ @returns the grouping DocumentSource
+ */
static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx);
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx);
- /**
- Create a unifying group that can be used to combine group results
- from shards.
+ /**
+ Create a unifying group that can be used to combine group results
+ from shards.
- @returns the grouping DocumentSource
- */
- intrusive_ptr<DocumentSource> createMerger();
+ @returns the grouping DocumentSource
+ */
+ intrusive_ptr<DocumentSource> createMerger();
- static const char groupName[];
+ static const char groupName[];
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
private:
DocumentSourceGroup(const intrusive_ptr<ExpressionContext> &pCtx);
- /*
- Before returning anything, this source must fetch everything from
- the underlying source and group it. populate() is used to do that
- on the first call to any method on this source. The populated
- boolean indicates that this has been done.
- */
+ /*
+ Before returning anything, this source must fetch everything from
+ the underlying source and group it. populate() is used to do that
+ on the first call to any method on this source. The populated
+ boolean indicates that this has been done.
+ */
void populate();
bool populated;
intrusive_ptr<Expression> pIdExpression;
- typedef boost::unordered_map<intrusive_ptr<const Value>,
- vector<intrusive_ptr<Accumulator> >, Value::Hash> GroupsType;
+ typedef boost::unordered_map<intrusive_ptr<const Value>,
+ vector<intrusive_ptr<Accumulator> >, Value::Hash> GroupsType;
GroupsType groups;
/*
@@ -517,17 +517,17 @@ namespace mongo {
*/
vector<string> vFieldName;
vector<intrusive_ptr<Accumulator> (*)(
- const intrusive_ptr<ExpressionContext> &)> vpAccumulatorFactory;
+ const intrusive_ptr<ExpressionContext> &)> vpAccumulatorFactory;
vector<intrusive_ptr<Expression> > vpExpression;
intrusive_ptr<Document> makeDocument(
- const GroupsType::iterator &rIter);
+ const GroupsType::iterator &rIter);
GroupsType::iterator groupsIterator;
intrusive_ptr<Document> pCurrent;
- intrusive_ptr<ExpressionContext> pCtx;
+ intrusive_ptr<ExpressionContext> pCtx;
};
@@ -537,42 +537,42 @@ namespace mongo {
// virtuals from DocumentSource
virtual ~DocumentSourceMatch();
- /**
- Create a filter.
+ /**
+ Create a filter.
@param pBsonElement the raw BSON specification for the filter
@returns the filter
- */
- static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx);
+ */
+ static intrusive_ptr<DocumentSource> createFromBson(
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx);
- /**
- Create a BSONObj suitable for Matcher construction.
+ /**
+ Create a BSONObj suitable for Matcher construction.
- This is used after filter analysis has moved as many filters to
- as early a point as possible in the document processing pipeline.
- See db/Matcher.h and the associated wiki documentation for the
- format. This conversion is used to move back to the low-level
- find() Cursor mechanism.
+ This is used after filter analysis has moved as many filters to
+ as early a point as possible in the document processing pipeline.
+ See db/Matcher.h and the associated wiki documentation for the
+ format. This conversion is used to move back to the low-level
+ find() Cursor mechanism.
- @param pBuilder the builder to write to
- */
- void toMatcherBson(BSONObjBuilder *pBuilder) const;
+ @param pBuilder the builder to write to
+ */
+ void toMatcherBson(BSONObjBuilder *pBuilder) const;
- static const char matchName[];
+ static const char matchName[];
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
- // virtuals from DocumentSourceFilterBase
- virtual bool accept(const intrusive_ptr<Document> &pDocument) const;
+ // virtuals from DocumentSourceFilterBase
+ virtual bool accept(const intrusive_ptr<Document> &pDocument) const;
private:
DocumentSourceMatch(const BSONObj &query);
- Matcher matcher;
+ Matcher matcher;
};
@@ -585,22 +585,22 @@ namespace mongo {
virtual bool advance();
virtual intrusive_ptr<Document> getCurrent();
- /**
- Create a document source for output and pass-through.
+ /**
+ Create a document source for output and pass-through.
- This can be put anywhere in a pipeline and will store content as
- well as pass it on.
+ This can be put anywhere in a pipeline and will store content as
+ well as pass it on.
- @returns the newly created document source
- */
- static intrusive_ptr<DocumentSourceOut> createFromBson(
- BSONElement *pBsonElement);
+ @returns the newly created document source
+ */
+ static intrusive_ptr<DocumentSourceOut> createFromBson(
+ BSONElement *pBsonElement);
- static const char outName[];
+ static const char outName[];
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
private:
DocumentSourceOut(BSONElement *pBsonElement);
@@ -616,28 +616,28 @@ namespace mongo {
virtual bool eof();
virtual bool advance();
virtual intrusive_ptr<Document> getCurrent();
- virtual void optimize();
+ virtual void optimize();
/**
Create a new DocumentSource that can implement projection.
- @returns the projection DocumentSource
+ @returns the projection DocumentSource
*/
static intrusive_ptr<DocumentSourceProject> create();
- /**
- Include a field path in a projection.
+ /**
+ Include a field path in a projection.
- @param fieldPath the path of the field to include
- */
- void includePath(const string &fieldPath);
+ @param fieldPath the path of the field to include
+ */
+ void includePath(const string &fieldPath);
- /**
- Exclude a field path from the projection.
+ /**
+ Exclude a field path from the projection.
- @param fieldPath the path of the field to exclude
- */
- void excludePath(const string &fieldPath);
+ @param fieldPath the path of the field to exclude
+ */
+ void excludePath(const string &fieldPath);
/**
Add an output Expression in the projection.
@@ -649,33 +649,33 @@ namespace mongo {
@param pExpression the expression used to compute the field
*/
void addField(const string &fieldName,
- const intrusive_ptr<Expression> &pExpression);
+ const intrusive_ptr<Expression> &pExpression);
- /**
- Create a new projection DocumentSource from BSON.
+ /**
+ Create a new projection DocumentSource from BSON.
- This is a convenience for directly handling BSON, and relies on the
- above methods.
+ This is a convenience for directly handling BSON, and relies on the
+ above methods.
- @param pBsonElement the BSONElement with an object named $project
- @returns the created projection
- */
+ @param pBsonElement the BSONElement with an object named $project
+ @returns the created projection
+ */
static intrusive_ptr<DocumentSource> createFromBson(
BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
- static const char projectName[];
+ static const char projectName[];
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
private:
DocumentSourceProject();
// configuration state
- bool excludeId;
- intrusive_ptr<ExpressionObject> pEO;
+ bool excludeId;
+ intrusive_ptr<ExpressionObject> pEO;
};
@@ -687,114 +687,114 @@ namespace mongo {
virtual bool eof();
virtual bool advance();
virtual intrusive_ptr<Document> getCurrent();
- /*
- TODO
- Adjacent sorts should reduce to the last sort.
- virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
- */
+ /*
+ TODO
+ Adjacent sorts should reduce to the last sort.
+ virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource);
+ */
/**
Create a new sorting DocumentSource.
-
- @param pCtx the expression context
- @returns the DocumentSource
+
+ @param pCtx the expression context
+ @returns the DocumentSource
*/
static intrusive_ptr<DocumentSourceSort> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
- /**
- Add sort key field.
+ /**
+ Add sort key field.
- Adds a sort key field to the key being built up. A concatenated
- key is built up by calling this repeatedly.
+ Adds a sort key field to the key being built up. A concatenated
+ key is built up by calling this repeatedly.
- @param fieldPath the field path to the key component
- @param ascending if true, use the key for an ascending sort,
- otherwise, use it for descending
- */
- void addKey(const string &fieldPath, bool ascending);
+ @param fieldPath the field path to the key component
+ @param ascending if true, use the key for an ascending sort,
+ otherwise, use it for descending
+ */
+ void addKey(const string &fieldPath, bool ascending);
- /**
- Write out an object whose contents are the sort key.
+ /**
+ Write out an object whose contents are the sort key.
- @param pBuilder initialized object builder.
- @param fieldPrefix specify whether or not to include the field prefix
- */
- void sortKeyToBson(BSONObjBuilder *pBuilder, bool usePrefix) const;
+ @param pBuilder initialized object builder.
+ @param fieldPrefix specify whether or not to include the field prefix
+ */
+ void sortKeyToBson(BSONObjBuilder *pBuilder, bool usePrefix) const;
- /**
- Create a sorting DocumentSource from BSON.
+ /**
+ Create a sorting DocumentSource from BSON.
- This is a convenience method that uses the above, and operates on
- a BSONElement that has been deteremined to be an Object with an
- element named $group.
+ This is a convenience method that uses the above, and operates on
+ a BSONElement that has been deteremined to be an Object with an
+ element named $group.
- @param pBsonElement the BSONELement that defines the group
- @param pCtx the expression context
- @returns the grouping DocumentSource
- */
+ @param pBsonElement the BSONELement that defines the group
+ @param pCtx the expression context
+ @returns the grouping DocumentSource
+ */
static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx);
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx);
- static const char sortName[];
+ static const char sortName[];
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
private:
DocumentSourceSort(const intrusive_ptr<ExpressionContext> &pCtx);
- /*
- Before returning anything, this source must fetch everything from
- the underlying source and group it. populate() is used to do that
- on the first call to any method on this source. The populated
- boolean indicates that this has been done.
- */
+ /*
+ Before returning anything, this source must fetch everything from
+ the underlying source and group it. populate() is used to do that
+ on the first call to any method on this source. The populated
+ boolean indicates that this has been done.
+ */
void populate();
bool populated;
long long count;
- /* these two parallel each other */
- vector<intrusive_ptr<ExpressionFieldPath> > vSortKey;
- vector<bool> vAscending;
+ /* these two parallel each other */
+ vector<intrusive_ptr<ExpressionFieldPath> > vSortKey;
+ vector<bool> vAscending;
- class Carrier {
- public:
- /*
- We need access to the key for compares, so we have to carry
- this around.
- */
- DocumentSourceSort *pSort;
+ class Carrier {
+ public:
+ /*
+ We need access to the key for compares, so we have to carry
+ this around.
+ */
+ DocumentSourceSort *pSort;
- intrusive_ptr<Document> pDocument;
+ intrusive_ptr<Document> pDocument;
- Carrier(DocumentSourceSort *pSort,
- const intrusive_ptr<Document> &pDocument);
+ Carrier(DocumentSourceSort *pSort,
+ const intrusive_ptr<Document> &pDocument);
- static bool lessThan(const Carrier &rL, const Carrier &rR);
- };
+ static bool lessThan(const Carrier &rL, const Carrier &rR);
+ };
- /*
- Compare two documents according to the specified sort key.
+ /*
+ Compare two documents according to the specified sort key.
- @param rL reference to the left document
- @param rR reference to the right document
- @returns a number less than, equal to, or greater than zero,
- indicating pL < pR, pL == pR, or pL > pR, respectively
- */
- int compare(const intrusive_ptr<Document> &pL,
- const intrusive_ptr<Document> &pR);
+ @param rL reference to the left document
+ @param rR reference to the right document
+ @returns a number less than, equal to, or greater than zero,
+ indicating pL < pR, pL == pR, or pL > pR, respectively
+ */
+ int compare(const intrusive_ptr<Document> &pL,
+ const intrusive_ptr<Document> &pR);
- typedef list<Carrier> ListType;
- ListType documents;
+ typedef list<Carrier> ListType;
+ ListType documents;
ListType::iterator listIterator;
intrusive_ptr<Document> pCurrent;
- intrusive_ptr<ExpressionContext> pCtx;
+ intrusive_ptr<ExpressionContext> pCtx;
};
@@ -810,33 +810,33 @@ namespace mongo {
/**
Create a new limiting DocumentSource.
- @param pCtx the expression context
- @returns the DocumentSource
+ @param pCtx the expression context
+ @returns the DocumentSource
*/
static intrusive_ptr<DocumentSourceLimit> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
- /**
- Create a limiting DocumentSource from BSON.
+ /**
+ Create a limiting DocumentSource from BSON.
- This is a convenience method that uses the above, and operates on
- a BSONElement that has been deteremined to be an Object with an
- element named $limit.
+ This is a convenience method that uses the above, and operates on
+ a BSONElement that has been deteremined to be an Object with an
+ element named $limit.
- @param pBsonElement the BSONELement that defines the limit
- @param pCtx the expression context
- @returns the grouping DocumentSource
- */
+ @param pBsonElement the BSONELement that defines the limit
+ @param pCtx the expression context
+ @returns the grouping DocumentSource
+ */
static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx);
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx);
- static const char limitName[];
+ static const char limitName[];
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
private:
DocumentSourceLimit(const intrusive_ptr<ExpressionContext> &pCtx);
@@ -845,7 +845,7 @@ namespace mongo {
long long count;
intrusive_ptr<Document> pCurrent;
- intrusive_ptr<ExpressionContext> pCtx;
+ intrusive_ptr<ExpressionContext> pCtx;
};
class DocumentSourceSkip :
@@ -860,33 +860,33 @@ namespace mongo {
/**
Create a new skipping DocumentSource.
- @param pCtx the expression context
- @returns the DocumentSource
+ @param pCtx the expression context
+ @returns the DocumentSource
*/
static intrusive_ptr<DocumentSourceSkip> create(
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
- /**
- Create a skipping DocumentSource from BSON.
+ /**
+ Create a skipping DocumentSource from BSON.
- This is a convenience method that uses the above, and operates on
- a BSONElement that has been deteremined to be an Object with an
- element named $skip.
+ This is a convenience method that uses the above, and operates on
+ a BSONElement that has been deteremined to be an Object with an
+ element named $skip.
- @param pBsonElement the BSONELement that defines the skip
- @param pCtx the expression context
- @returns the grouping DocumentSource
- */
+ @param pBsonElement the BSONELement that defines the skip
+ @param pCtx the expression context
+ @returns the grouping DocumentSource
+ */
static intrusive_ptr<DocumentSource> createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx);
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx);
- static const char skipName[];
+ static const char skipName[];
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
private:
DocumentSourceSkip(const intrusive_ptr<ExpressionContext> &pCtx);
@@ -900,7 +900,7 @@ namespace mongo {
long long count;
intrusive_ptr<Document> pCurrent;
- intrusive_ptr<ExpressionContext> pCtx;
+ intrusive_ptr<ExpressionContext> pCtx;
};
@@ -917,45 +917,45 @@ namespace mongo {
/**
Create a new DocumentSource that can implement unwind.
- @returns the projection DocumentSource
+ @returns the projection DocumentSource
*/
static intrusive_ptr<DocumentSourceUnwind> create();
/**
- Specify the field to unwind. There must be exactly one before
- the pipeline begins execution.
+ Specify the field to unwind. There must be exactly one before
+ the pipeline begins execution.
- @param rFieldPath - path to the field to unwind
+ @param rFieldPath - path to the field to unwind
*/
- void unwindField(const FieldPath &rFieldPath);
+ void unwindField(const FieldPath &rFieldPath);
- /**
- Create a new projection DocumentSource from BSON.
+ /**
+ Create a new projection DocumentSource from BSON.
- This is a convenience for directly handling BSON, and relies on the
- above methods.
+ This is a convenience for directly handling BSON, and relies on the
+ above methods.
- @param pBsonElement the BSONElement with an object named $project
- @returns the created projection
- */
+ @param pBsonElement the BSONElement with an object named $project
+ @returns the created projection
+ */
static intrusive_ptr<DocumentSource> createFromBson(
BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx);
+ const intrusive_ptr<ExpressionContext> &pCtx);
- static const char unwindName[];
+ static const char unwindName[];
protected:
- // virtuals from DocumentSource
- virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
+ // virtuals from DocumentSource
+ virtual void sourceToBson(BSONObjBuilder *pBuilder) const;
private:
DocumentSourceUnwind();
// configuration state
- FieldPath unwindPath;
+ FieldPath unwindPath;
- vector<int> fieldIndex; /* for the current document, the indices
- leading down to the field being unwound */
+ vector<int> fieldIndex; /* for the current document, the indices
+ leading down to the field being unwound */
// iteration state
intrusive_ptr<Document> pNoUnwindDocument;
@@ -964,27 +964,27 @@ namespace mongo {
intrusive_ptr<ValueIterator> pUnwinder; // iterator used for unwinding
intrusive_ptr<const Value> pUnwindValue; // current value
- /*
- Clear all the state related to unwinding an array.
- */
- void resetArray();
+ /*
+ Clear all the state related to unwinding an array.
+ */
+ void resetArray();
- /*
- Clone the current document being unwound.
+ /*
+ Clone the current document being unwound.
- This is a partial deep clone. Because we're going to replace the
- value at the end, we have to replace everything along the path
- leading to that in order to not share that change with any other
- clones (or the original) that we've made.
+ This is a partial deep clone. Because we're going to replace the
+ value at the end, we have to replace everything along the path
+ leading to that in order to not share that change with any other
+ clones (or the original) that we've made.
- This expects pUnwindValue to have been set by a prior call to
- advance(). However, pUnwindValue may also be NULL, in which case
- the field will be removed -- this is the action for an empty
- array.
+ This expects pUnwindValue to have been set by a prior call to
+ advance(). However, pUnwindValue may also be NULL, in which case
+ the field will be removed -- this is the action for an empty
+ array.
- @returns a partial deep clone of pNoUnwindDocument
- */
- intrusive_ptr<Document> clonePath() const;
+ @returns a partial deep clone of pNoUnwindDocument
+ */
+ intrusive_ptr<Document> clonePath() const;
};
@@ -1001,16 +1001,16 @@ namespace mongo {
}
inline void DocumentSourceUnwind::resetArray() {
- pNoUnwindDocument.reset();
- pUnwindArray.reset();
- pUnwinder.reset();
- pUnwindValue.reset();
+ pNoUnwindDocument.reset();
+ pUnwindArray.reset();
+ pUnwinder.reset();
+ pUnwindValue.reset();
}
inline DocumentSourceSort::Carrier::Carrier(
- DocumentSourceSort *pTheSort,
- const intrusive_ptr<Document> &pTheDocument):
- pSort(pTheSort),
- pDocument(pTheDocument) {
+ DocumentSourceSort *pTheSort,
+ const intrusive_ptr<Document> &pTheDocument):
+ pSort(pTheSort),
+ pDocument(pTheDocument) {
}
}
diff --git a/src/mongo/db/pipeline/document_source_bson_array.cpp b/src/mongo/db/pipeline/document_source_bson_array.cpp
index 5d187b03ef9..70ac266d268 100755
--- a/src/mongo/db/pipeline/document_source_bson_array.cpp
+++ b/src/mongo/db/pipeline/document_source_bson_array.cpp
@@ -26,24 +26,24 @@ namespace mongo {
}
bool DocumentSourceBsonArray::eof() {
- return !haveCurrent;
+ return !haveCurrent;
}
bool DocumentSourceBsonArray::advance() {
- if (eof())
- return false;
+ if (eof())
+ return false;
- if (!arrayIterator.more()) {
- haveCurrent = false;
- return false;
- }
+ if (!arrayIterator.more()) {
+ haveCurrent = false;
+ return false;
+ }
- currentElement = arrayIterator.next();
- return true;
+ currentElement = arrayIterator.next();
+ return true;
}
intrusive_ptr<Document> DocumentSourceBsonArray::getCurrent() {
- assert(haveCurrent);
+ assert(haveCurrent);
BSONObj documentObj(currentElement.Obj());
intrusive_ptr<Document> pDocument(
Document::createFromBsonObj(&documentObj));
@@ -51,33 +51,33 @@ namespace mongo {
}
void DocumentSourceBsonArray::setSource(
- const intrusive_ptr<DocumentSource> &pSource) {
- /* this doesn't take a source */
- assert(false);
+ const intrusive_ptr<DocumentSource> &pSource) {
+ /* this doesn't take a source */
+ assert(false);
}
DocumentSourceBsonArray::DocumentSourceBsonArray(
- BSONElement *pBsonElement):
+ BSONElement *pBsonElement):
embeddedObject(pBsonElement->embeddedObject()),
arrayIterator(embeddedObject),
haveCurrent(false) {
- if (arrayIterator.more()) {
- currentElement = arrayIterator.next();
- haveCurrent = true;
- }
+ if (arrayIterator.more()) {
+ currentElement = arrayIterator.next();
+ haveCurrent = true;
+ }
}
intrusive_ptr<DocumentSourceBsonArray> DocumentSourceBsonArray::create(
- BSONElement *pBsonElement) {
+ BSONElement *pBsonElement) {
- assert(pBsonElement->type() == Array);
- intrusive_ptr<DocumentSourceBsonArray> pSource(
- new DocumentSourceBsonArray(pBsonElement));
+ assert(pBsonElement->type() == Array);
+ intrusive_ptr<DocumentSourceBsonArray> pSource(
+ new DocumentSourceBsonArray(pBsonElement));
- return pSource;
+ return pSource;
}
void DocumentSourceBsonArray::sourceToBson(BSONObjBuilder *pBuilder) const {
- assert(false); // this has no analog in the BSON world
+ assert(false); // this has no analog in the BSON world
}
}
diff --git a/src/mongo/db/pipeline/document_source_command_futures.cpp b/src/mongo/db/pipeline/document_source_command_futures.cpp
index 61a257cf16f..692f5de5a10 100755
--- a/src/mongo/db/pipeline/document_source_command_futures.cpp
+++ b/src/mongo/db/pipeline/document_source_command_futures.cpp
@@ -24,42 +24,42 @@ namespace mongo {
}
bool DocumentSourceCommandFutures::eof() {
- /* if we haven't even started yet, do so */
- if (!pCurrent.get())
- getNextDocument();
+ /* if we haven't even started yet, do so */
+ if (!pCurrent.get())
+ getNextDocument();
- return (pCurrent.get() == NULL);
+ return (pCurrent.get() == NULL);
}
bool DocumentSourceCommandFutures::advance() {
- if (eof())
- return false;
+ if (eof())
+ return false;
- /* advance */
- getNextDocument();
+ /* advance */
+ getNextDocument();
- return (pCurrent.get() != NULL);
+ return (pCurrent.get() != NULL);
}
intrusive_ptr<Document> DocumentSourceCommandFutures::getCurrent() {
- assert(!eof());
- return pCurrent;
+ assert(!eof());
+ return pCurrent;
}
void DocumentSourceCommandFutures::setSource(
- const intrusive_ptr<DocumentSource> &pSource) {
- /* this doesn't take a source */
- assert(false);
+ const intrusive_ptr<DocumentSource> &pSource) {
+ /* this doesn't take a source */
+ assert(false);
}
void DocumentSourceCommandFutures::sourceToBson(
- BSONObjBuilder *pBuilder) const {
+ BSONObjBuilder *pBuilder) const {
/* this has no BSON equivalent */
- assert(false);
+ assert(false);
}
DocumentSourceCommandFutures::DocumentSourceCommandFutures(
- string &theErrmsg, FuturesList *pList):
+ string &theErrmsg, FuturesList *pList):
newSource(false),
pBsonSource(),
pCurrent(),
@@ -70,63 +70,63 @@ namespace mongo {
intrusive_ptr<DocumentSourceCommandFutures>
DocumentSourceCommandFutures::create(
- string &errmsg, FuturesList *pList) {
- intrusive_ptr<DocumentSourceCommandFutures> pSource(
- new DocumentSourceCommandFutures(errmsg, pList));
- return pSource;
+ string &errmsg, FuturesList *pList) {
+ intrusive_ptr<DocumentSourceCommandFutures> pSource(
+ new DocumentSourceCommandFutures(errmsg, pList));
+ return pSource;
}
void DocumentSourceCommandFutures::getNextDocument() {
- while(true) {
- if (!pBsonSource.get()) {
- /* if there aren't any more futures, we're done */
- if (iterator == listEnd) {
- pCurrent.reset();
- return;
- }
-
- /* grab the next command result */
- shared_ptr<Future::CommandResult> pResult(*iterator);
- ++iterator;
-
- /* try to wait for it */
- if (!pResult->join()) {
- error() << "sharded pipeline failed on shard: " <<
- pResult->getServer() << " error: " <<
- pResult->result() << endl;
- errmsg += "-- mongod pipeline failed: ";
- errmsg += pResult->result().toString();
-
- /* move on to the next command future */
- continue;
- }
-
- /* grab the result array out of the shard server's response */
- BSONObj shardResult(pResult->result());
- BSONObjIterator objIterator(shardResult);
- while(objIterator.more()) {
- BSONElement element(objIterator.next());
- const char *pFieldName = element.fieldName();
-
- /* find the result array and quit this loop */
- if (strcmp(pFieldName, "result") == 0) {
- pBsonSource = DocumentSourceBsonArray::create(&element);
- newSource = true;
- break;
- }
- }
- }
-
- /* if we're done with this shard's results, try the next */
- if (pBsonSource->eof() ||
- (!newSource && !pBsonSource->advance())) {
- pBsonSource.reset();
- continue;
- }
-
- pCurrent = pBsonSource->getCurrent();
- newSource = false;
- return;
- }
+ while(true) {
+ if (!pBsonSource.get()) {
+ /* if there aren't any more futures, we're done */
+ if (iterator == listEnd) {
+ pCurrent.reset();
+ return;
+ }
+
+ /* grab the next command result */
+ shared_ptr<Future::CommandResult> pResult(*iterator);
+ ++iterator;
+
+ /* try to wait for it */
+ if (!pResult->join()) {
+ error() << "sharded pipeline failed on shard: " <<
+ pResult->getServer() << " error: " <<
+ pResult->result() << endl;
+ errmsg += "-- mongod pipeline failed: ";
+ errmsg += pResult->result().toString();
+
+ /* move on to the next command future */
+ continue;
+ }
+
+ /* grab the result array out of the shard server's response */
+ BSONObj shardResult(pResult->result());
+ BSONObjIterator objIterator(shardResult);
+ while(objIterator.more()) {
+ BSONElement element(objIterator.next());
+ const char *pFieldName = element.fieldName();
+
+ /* find the result array and quit this loop */
+ if (strcmp(pFieldName, "result") == 0) {
+ pBsonSource = DocumentSourceBsonArray::create(&element);
+ newSource = true;
+ break;
+ }
+ }
+ }
+
+ /* if we're done with this shard's results, try the next */
+ if (pBsonSource->eof() ||
+ (!newSource && !pBsonSource->advance())) {
+ pBsonSource.reset();
+ continue;
+ }
+
+ pCurrent = pBsonSource->getCurrent();
+ newSource = false;
+ return;
+ }
}
}
diff --git a/src/mongo/db/pipeline/document_source_filter.cpp b/src/mongo/db/pipeline/document_source_filter.cpp
index 66e57ba2e93..d4c8b75cf75 100755
--- a/src/mongo/db/pipeline/document_source_filter.cpp
+++ b/src/mongo/db/pipeline/document_source_filter.cpp
@@ -30,49 +30,49 @@ namespace mongo {
}
bool DocumentSourceFilter::coalesce(
- const intrusive_ptr<DocumentSource> &pNextSource) {
-
- /* we only know how to coalesce other filters */
- DocumentSourceFilter *pDocFilter =
- dynamic_cast<DocumentSourceFilter *>(pNextSource.get());
- if (!pDocFilter)
- return false;
-
- /*
- Two adjacent filters can be combined by creating a conjunction of
- their predicates.
- */
- intrusive_ptr<ExpressionNary> pAnd(ExpressionAnd::create());
- pAnd->addOperand(pFilter);
- pAnd->addOperand(pDocFilter->pFilter);
- pFilter = pAnd;
-
- return true;
+ const intrusive_ptr<DocumentSource> &pNextSource) {
+
+ /* we only know how to coalesce other filters */
+ DocumentSourceFilter *pDocFilter =
+ dynamic_cast<DocumentSourceFilter *>(pNextSource.get());
+ if (!pDocFilter)
+ return false;
+
+ /*
+ Two adjacent filters can be combined by creating a conjunction of
+ their predicates.
+ */
+ intrusive_ptr<ExpressionNary> pAnd(ExpressionAnd::create());
+ pAnd->addOperand(pFilter);
+ pAnd->addOperand(pDocFilter->pFilter);
+ pFilter = pAnd;
+
+ return true;
}
void DocumentSourceFilter::optimize() {
- pFilter = pFilter->optimize();
+ pFilter = pFilter->optimize();
}
void DocumentSourceFilter::sourceToBson(BSONObjBuilder *pBuilder) const {
- pFilter->addToBsonObj(pBuilder, filterName, 0);
+ pFilter->addToBsonObj(pBuilder, filterName, 0);
}
bool DocumentSourceFilter::accept(
- const intrusive_ptr<Document> &pDocument) const {
- intrusive_ptr<const Value> pValue(pFilter->evaluate(pDocument));
- return pValue->coerceToBool();
+ const intrusive_ptr<Document> &pDocument) const {
+ intrusive_ptr<const Value> pValue(pFilter->evaluate(pDocument));
+ return pValue->coerceToBool();
}
intrusive_ptr<DocumentSource> DocumentSourceFilter::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx) {
- uassert(15946, "a document filter expression must be an object",
- pBsonElement->type() == Object);
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ uassert(15946, "a document filter expression must be an object",
+ pBsonElement->type() == Object);
- Expression::ObjectCtx oCtx(0);
+ Expression::ObjectCtx oCtx(0);
intrusive_ptr<Expression> pExpression(
- Expression::parseObject(pBsonElement, &oCtx));
+ Expression::parseObject(pBsonElement, &oCtx));
intrusive_ptr<DocumentSourceFilter> pFilter(
DocumentSourceFilter::create(pExpression));
@@ -88,11 +88,11 @@ namespace mongo {
DocumentSourceFilter::DocumentSourceFilter(
const intrusive_ptr<Expression> &pTheFilter):
- DocumentSourceFilterBase(),
+ DocumentSourceFilterBase(),
pFilter(pTheFilter) {
}
void DocumentSourceFilter::toMatcherBson(BSONObjBuilder *pBuilder) const {
- pFilter->toMatcherBson(pBuilder, 0);
+ pFilter->toMatcherBson(pBuilder, 0);
}
}
diff --git a/src/mongo/db/pipeline/document_source_group.cpp b/src/mongo/db/pipeline/document_source_group.cpp
index 244561589da..0624bbce646 100755
--- a/src/mongo/db/pipeline/document_source_group.cpp
+++ b/src/mongo/db/pipeline/document_source_group.cpp
@@ -62,31 +62,31 @@ namespace mongo {
}
void DocumentSourceGroup::sourceToBson(BSONObjBuilder *pBuilder) const {
- BSONObjBuilder insides;
+ BSONObjBuilder insides;
- /* add the _id */
- pIdExpression->addToBsonObj(&insides, Document::idName.c_str(), 0);
+ /* add the _id */
+ pIdExpression->addToBsonObj(&insides, Document::idName.c_str(), 0);
- /* add the remaining fields */
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- intrusive_ptr<Accumulator> pA((*vpAccumulatorFactory[i])(pCtx));
- pA->addOperand(vpExpression[i]);
- pA->addToBsonObj(&insides, vFieldName[i], 0);
- }
+ /* add the remaining fields */
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ intrusive_ptr<Accumulator> pA((*vpAccumulatorFactory[i])(pCtx));
+ pA->addOperand(vpExpression[i]);
+ pA->addToBsonObj(&insides, vFieldName[i], 0);
+ }
- pBuilder->append(groupName, insides.done());
+ pBuilder->append(groupName, insides.done());
}
intrusive_ptr<DocumentSourceGroup> DocumentSourceGroup::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
+ const intrusive_ptr<ExpressionContext> &pCtx) {
intrusive_ptr<DocumentSourceGroup> pSource(
new DocumentSourceGroup(pCtx));
return pSource;
}
DocumentSourceGroup::DocumentSourceGroup(
- const intrusive_ptr<ExpressionContext> &pTheCtx):
+ const intrusive_ptr<ExpressionContext> &pTheCtx):
populated(false),
pIdExpression(),
groups(),
@@ -99,7 +99,7 @@ namespace mongo {
void DocumentSourceGroup::addAccumulator(
string fieldName,
intrusive_ptr<Accumulator> (*pAccumulatorFactory)(
- const intrusive_ptr<ExpressionContext> &),
+ const intrusive_ptr<ExpressionContext> &),
const intrusive_ptr<Expression> &pExpression) {
vFieldName.push_back(fieldName);
vpAccumulatorFactory.push_back(pAccumulatorFactory);
@@ -110,7 +110,7 @@ namespace mongo {
struct GroupOpDesc {
const char *pName;
intrusive_ptr<Accumulator> (*pFactory)(
- const intrusive_ptr<ExpressionContext> &);
+ const intrusive_ptr<ExpressionContext> &);
};
static int GroupOpDescCmp(const void *pL, const void *pR) {
@@ -136,13 +136,13 @@ namespace mongo {
static const size_t NGroupOp = sizeof(GroupOpTable)/sizeof(GroupOpTable[0]);
intrusive_ptr<DocumentSource> DocumentSourceGroup::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx) {
- uassert(15947, "a group's fields must be specified in an object",
- pBsonElement->type() == Object);
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ uassert(15947, "a group's fields must be specified in an object",
+ pBsonElement->type() == Object);
intrusive_ptr<DocumentSourceGroup> pGroup(
- DocumentSourceGroup::create(pCtx));
+ DocumentSourceGroup::create(pCtx));
bool idSet = false;
BSONObj groupObj(pBsonElement->Obj());
@@ -152,84 +152,84 @@ namespace mongo {
const char *pFieldName = groupField.fieldName();
if (strcmp(pFieldName, Document::idName.c_str()) == 0) {
- uassert(15948, "a group's _id may only be specified once",
- !idSet);
-
- BSONType groupType = groupField.type();
-
- if (groupType == Object) {
- /*
- Use the projection-like set of field paths to create the
- group-by key.
- */
- Expression::ObjectCtx oCtx(
- Expression::ObjectCtx::DOCUMENT_OK);
- intrusive_ptr<Expression> pId(
- Expression::parseObject(&groupField, &oCtx));
-
- pGroup->setIdExpression(pId);
- idSet = true;
- }
- else if (groupType == String) {
- string groupString(groupField.String());
- const char *pGroupString = groupString.c_str();
- if ((groupString.length() == 0) ||
- (pGroupString[0] != '$'))
- goto StringConstantId;
-
- string pathString(
- Expression::removeFieldPrefix(groupString));
- intrusive_ptr<ExpressionFieldPath> pFieldPath(
- ExpressionFieldPath::create(pathString));
- pGroup->setIdExpression(pFieldPath);
- idSet = true;
- }
- else {
- /* pick out the constant types that are allowed */
- switch(groupType) {
- case NumberDouble:
- case String:
- case Object:
- case Array:
- case jstOID:
- case Bool:
- case Date:
- case NumberInt:
- case Timestamp:
- case NumberLong:
- case jstNULL:
- StringConstantId: // from string case above
- {
- intrusive_ptr<const Value> pValue(
- Value::createFromBsonElement(&groupField));
- intrusive_ptr<ExpressionConstant> pConstant(
- ExpressionConstant::create(pValue));
- pGroup->setIdExpression(pConstant);
- idSet = true;
- break;
- }
-
- default:
- uassert(15949, str::stream() <<
- "a group's _id may not include fields of BSON type " << groupType,
- false);
- }
- }
+ uassert(15948, "a group's _id may only be specified once",
+ !idSet);
+
+ BSONType groupType = groupField.type();
+
+ if (groupType == Object) {
+ /*
+ Use the projection-like set of field paths to create the
+ group-by key.
+ */
+ Expression::ObjectCtx oCtx(
+ Expression::ObjectCtx::DOCUMENT_OK);
+ intrusive_ptr<Expression> pId(
+ Expression::parseObject(&groupField, &oCtx));
+
+ pGroup->setIdExpression(pId);
+ idSet = true;
+ }
+ else if (groupType == String) {
+ string groupString(groupField.String());
+ const char *pGroupString = groupString.c_str();
+ if ((groupString.length() == 0) ||
+ (pGroupString[0] != '$'))
+ goto StringConstantId;
+
+ string pathString(
+ Expression::removeFieldPrefix(groupString));
+ intrusive_ptr<ExpressionFieldPath> pFieldPath(
+ ExpressionFieldPath::create(pathString));
+ pGroup->setIdExpression(pFieldPath);
+ idSet = true;
+ }
+ else {
+ /* pick out the constant types that are allowed */
+ switch(groupType) {
+ case NumberDouble:
+ case String:
+ case Object:
+ case Array:
+ case jstOID:
+ case Bool:
+ case Date:
+ case NumberInt:
+ case Timestamp:
+ case NumberLong:
+ case jstNULL:
+ StringConstantId: // from string case above
+ {
+ intrusive_ptr<const Value> pValue(
+ Value::createFromBsonElement(&groupField));
+ intrusive_ptr<ExpressionConstant> pConstant(
+ ExpressionConstant::create(pValue));
+ pGroup->setIdExpression(pConstant);
+ idSet = true;
+ break;
+ }
+
+ default:
+ uassert(15949, str::stream() <<
+ "a group's _id may not include fields of BSON type " << groupType,
+ false);
+ }
+ }
}
else {
/*
Treat as a projection field with the additional ability to
add aggregation operators.
*/
- uassert(15950, str::stream() <<
- "the group aggregate field name " <<
- *pFieldName << " cannot be an operator name",
- *pFieldName != '$');
+ uassert(15950, str::stream() <<
+ "the group aggregate field name " <<
+ *pFieldName << " cannot be an operator name",
+ *pFieldName != '$');
- uassert(15951, str::stream() <<
- "the group aggregate field " << *pFieldName <<
- "must be defined as an expression inside an object",
- groupField.type() == Object);
+ uassert(15951, str::stream() <<
+ "the group aggregate field " << *pFieldName <<
+ "must be defined as an expression inside an object",
+ groupField.type() == Object);
BSONObj subField(groupField.Obj());
BSONObjIterator subIterator(subField);
@@ -241,28 +241,28 @@ namespace mongo {
GroupOpDesc key;
key.pName = subElement.fieldName();
const GroupOpDesc *pOp =
- (const GroupOpDesc *)bsearch(
+ (const GroupOpDesc *)bsearch(
&key, GroupOpTable, NGroupOp, sizeof(GroupOpDesc),
GroupOpDescCmp);
- uassert(15952, str::stream() <<
- "unknown group operator \"" <<
- key.pName << "\"",
- pOp);
+ uassert(15952, str::stream() <<
+ "unknown group operator \"" <<
+ key.pName << "\"",
+ pOp);
intrusive_ptr<Expression> pGroupExpr;
BSONType elementType = subElement.type();
if (elementType == Object) {
- Expression::ObjectCtx oCtx(
- Expression::ObjectCtx::DOCUMENT_OK);
+ Expression::ObjectCtx oCtx(
+ Expression::ObjectCtx::DOCUMENT_OK);
pGroupExpr = Expression::parseObject(
- &subElement, &oCtx);
- }
+ &subElement, &oCtx);
+ }
else if (elementType == Array) {
- uassert(15953, str::stream() <<
- "aggregating group operators are unary (" <<
- key.pName << ")", false);
+ uassert(15953, str::stream() <<
+ "aggregating group operators are unary (" <<
+ key.pName << ")", false);
}
else { /* assume its an atomic single operand */
pGroupExpr = Expression::parseOperand(&subElement);
@@ -272,14 +272,14 @@ namespace mongo {
pFieldName, pOp->pFactory, pGroupExpr);
}
- uassert(15954, str::stream() <<
- "the computed aggregate \"" <<
- pFieldName << "\" must specify exactly one operator",
- subCount == 1);
+ uassert(15954, str::stream() <<
+ "the computed aggregate \"" <<
+ pFieldName << "\" must specify exactly one operator",
+ subCount == 1);
}
}
- uassert(15955, "a group specification must include an _id", idSet);
+ uassert(15955, "a group specification must include an _id", idSet);
return pGroup;
}
@@ -291,12 +291,12 @@ namespace mongo {
/* get the _id document */
intrusive_ptr<const Value> pId(pIdExpression->evaluate(pDocument));
- uassert(15956, "the _id field for a group must not be undefined",
- pId->getType() != Undefined);
+ uassert(15956, "the _id field for a group must not be undefined",
+ pId->getType() != Undefined);
/*
Look for the _id value in the map; if it's not there, add a
- new entry with a blank accumulator.
+ new entry with a blank accumulator.
*/
vector<intrusive_ptr<Accumulator> > *pGroup;
GroupsType::iterator it(groups.find(pId));
@@ -353,38 +353,38 @@ namespace mongo {
/* add the rest of the fields */
for(size_t i = 0; i < n; ++i) {
- intrusive_ptr<const Value> pValue((*pGroup)[i]->getValue());
- if (pValue->getType() != Undefined)
- pResult->addField(vFieldName[i], pValue);
- }
+ intrusive_ptr<const Value> pValue((*pGroup)[i]->getValue());
+ if (pValue->getType() != Undefined)
+ pResult->addField(vFieldName[i], pValue);
+ }
return pResult;
}
intrusive_ptr<DocumentSource> DocumentSourceGroup::createMerger() {
- intrusive_ptr<DocumentSourceGroup> pMerger(
- DocumentSourceGroup::create(pCtx));
-
- /* the merger will use the same grouping key */
- pMerger->setIdExpression(ExpressionFieldPath::create(
- Document::idName.c_str()));
-
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- /*
- The merger's output field names will be the same, as will the
- accumulator factories. However, for some accumulators, the
- expression to be accumulated will be different. The original
- accumulator may be collecting an expression based on a field
- expression or constant. Here, we accumulate the output of the
- same name from the prior group.
- */
- pMerger->addAccumulator(
- vFieldName[i], vpAccumulatorFactory[i],
- ExpressionFieldPath::create(vFieldName[i]));
- }
-
- return pMerger;
+ intrusive_ptr<DocumentSourceGroup> pMerger(
+ DocumentSourceGroup::create(pCtx));
+
+ /* the merger will use the same grouping key */
+ pMerger->setIdExpression(ExpressionFieldPath::create(
+ Document::idName.c_str()));
+
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ /*
+ The merger's output field names will be the same, as will the
+ accumulator factories. However, for some accumulators, the
+ expression to be accumulated will be different. The original
+ accumulator may be collecting an expression based on a field
+ expression or constant. Here, we accumulate the output of the
+ same name from the prior group.
+ */
+ pMerger->addAccumulator(
+ vFieldName[i], vpAccumulatorFactory[i],
+ ExpressionFieldPath::create(vFieldName[i]));
+ }
+
+ return pMerger;
}
}
diff --git a/src/mongo/db/pipeline/document_source_limit.cpp b/src/mongo/db/pipeline/document_source_limit.cpp
index a73d4da2005..fd977cc126a 100644
--- a/src/mongo/db/pipeline/document_source_limit.cpp
+++ b/src/mongo/db/pipeline/document_source_limit.cpp
@@ -55,28 +55,28 @@ namespace mongo {
}
void DocumentSourceLimit::sourceToBson(BSONObjBuilder *pBuilder) const {
- pBuilder->append("$limit", limit);
+ pBuilder->append("$limit", limit);
}
intrusive_ptr<DocumentSourceLimit> DocumentSourceLimit::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
+ const intrusive_ptr<ExpressionContext> &pCtx) {
intrusive_ptr<DocumentSourceLimit> pSource(
new DocumentSourceLimit(pCtx));
return pSource;
}
intrusive_ptr<DocumentSource> DocumentSourceLimit::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx) {
- uassert(15957, "the limit must be specified as a number",
- pBsonElement->isNumber());
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ uassert(15957, "the limit must be specified as a number",
+ pBsonElement->isNumber());
intrusive_ptr<DocumentSourceLimit> pLimit(
- DocumentSourceLimit::create(pCtx));
+ DocumentSourceLimit::create(pCtx));
pLimit->limit = (int)pBsonElement->numberLong();
- uassert(15958, "the limit must be positive",
- pLimit->limit > 0);
+ uassert(15958, "the limit must be positive",
+ pLimit->limit > 0);
return pLimit;
}
diff --git a/src/mongo/db/pipeline/document_source_match.cpp b/src/mongo/db/pipeline/document_source_match.cpp
index bedac3ef717..76d1305c7f1 100755
--- a/src/mongo/db/pipeline/document_source_match.cpp
+++ b/src/mongo/db/pipeline/document_source_match.cpp
@@ -31,50 +31,50 @@ namespace mongo {
}
void DocumentSourceMatch::sourceToBson(BSONObjBuilder *pBuilder) const {
- const BSONObj *pQuery = matcher.getQuery();
- pBuilder->append(matchName, *pQuery);
+ const BSONObj *pQuery = matcher.getQuery();
+ pBuilder->append(matchName, *pQuery);
}
bool DocumentSourceMatch::accept(
- const intrusive_ptr<Document> &pDocument) const {
-
- /*
- The matcher only takes BSON documents, so we have to make one.
-
- LATER
- We could optimize this by making a document with only the
- fields referenced by the Matcher. We could do this by looking inside
- the Matcher's BSON before it is created, and recording those. The
- easiest implementation might be to hold onto an ExpressionDocument
- in here, and give that pDocument to create the created subset of
- fields, and then convert that instead.
- */
- BSONObjBuilder objBuilder;
- pDocument->toBson(&objBuilder);
- BSONObj obj(objBuilder.done());
-
- return matcher.matches(obj);
+ const intrusive_ptr<Document> &pDocument) const {
+
+ /*
+ The matcher only takes BSON documents, so we have to make one.
+
+ LATER
+ We could optimize this by making a document with only the
+ fields referenced by the Matcher. We could do this by looking inside
+ the Matcher's BSON before it is created, and recording those. The
+ easiest implementation might be to hold onto an ExpressionDocument
+ in here, and give that pDocument to create the created subset of
+ fields, and then convert that instead.
+ */
+ BSONObjBuilder objBuilder;
+ pDocument->toBson(&objBuilder);
+ BSONObj obj(objBuilder.done());
+
+ return matcher.matches(obj);
}
intrusive_ptr<DocumentSource> DocumentSourceMatch::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx) {
- uassert(15959, "the match filter must be an expression in an object",
- pBsonElement->type() == Object);
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ uassert(15959, "the match filter must be an expression in an object",
+ pBsonElement->type() == Object);
intrusive_ptr<DocumentSourceMatch> pMatcher(
- new DocumentSourceMatch(pBsonElement->Obj()));
+ new DocumentSourceMatch(pBsonElement->Obj()));
return pMatcher;
}
void DocumentSourceMatch::toMatcherBson(BSONObjBuilder *pBuilder) const {
- const BSONObj *pQuery = matcher.getQuery();
- pBuilder->appendElements(*pQuery);
+ const BSONObj *pQuery = matcher.getQuery();
+ pBuilder->appendElements(*pQuery);
}
DocumentSourceMatch::DocumentSourceMatch(const BSONObj &query):
- DocumentSourceFilterBase(),
+ DocumentSourceFilterBase(),
matcher(query) {
}
}
diff --git a/src/mongo/db/pipeline/document_source_out.cpp b/src/mongo/db/pipeline/document_source_out.cpp
index 5a30342d25c..c668d60fd3c 100755
--- a/src/mongo/db/pipeline/document_source_out.cpp
+++ b/src/mongo/db/pipeline/document_source_out.cpp
@@ -27,30 +27,30 @@ namespace mongo {
}
bool DocumentSourceOut::eof() {
- return pSource->eof();
+ return pSource->eof();
}
bool DocumentSourceOut::advance() {
- return pSource->advance();
+ return pSource->advance();
}
boost::intrusive_ptr<Document> DocumentSourceOut::getCurrent() {
- return pSource->getCurrent();
+ return pSource->getCurrent();
}
DocumentSourceOut::DocumentSourceOut(BSONElement *pBsonElement) {
- assert(false && "unimplemented");
+ assert(false && "unimplemented");
}
intrusive_ptr<DocumentSourceOut> DocumentSourceOut::createFromBson(
- BSONElement *pBsonElement) {
- intrusive_ptr<DocumentSourceOut> pSource(
- new DocumentSourceOut(pBsonElement));
+ BSONElement *pBsonElement) {
+ intrusive_ptr<DocumentSourceOut> pSource(
+ new DocumentSourceOut(pBsonElement));
- return pSource;
+ return pSource;
}
void DocumentSourceOut::sourceToBson(BSONObjBuilder *pBuilder) const {
- assert(false); // CW TODO
+ assert(false); // CW TODO
}
}
diff --git a/src/mongo/db/pipeline/document_source_project.cpp b/src/mongo/db/pipeline/document_source_project.cpp
index bb7a0b5a6d9..c69843c4784 100755
--- a/src/mongo/db/pipeline/document_source_project.cpp
+++ b/src/mongo/db/pipeline/document_source_project.cpp
@@ -30,8 +30,8 @@ namespace mongo {
}
DocumentSourceProject::DocumentSourceProject():
- excludeId(false),
- pEO(ExpressionObject::create()) {
+ excludeId(false),
+ pEO(ExpressionObject::create()) {
}
bool DocumentSourceProject::eof() {
@@ -43,36 +43,36 @@ namespace mongo {
}
intrusive_ptr<Document> DocumentSourceProject::getCurrent() {
- intrusive_ptr<Document> pInDocument(pSource->getCurrent());
+ intrusive_ptr<Document> pInDocument(pSource->getCurrent());
- /* create the result document */
- const size_t sizeHint =
- pEO->getSizeHint(pInDocument) + (excludeId ? 0 : 1);
- intrusive_ptr<Document> pResultDocument(Document::create(sizeHint));
+ /* create the result document */
+ const size_t sizeHint =
+ pEO->getSizeHint(pInDocument) + (excludeId ? 0 : 1);
+ intrusive_ptr<Document> pResultDocument(Document::create(sizeHint));
- if (!excludeId) {
- intrusive_ptr<const Value> pId(
- pInDocument->getField(Document::idName));
- pResultDocument->addField(Document::idName, pId);
- }
+ if (!excludeId) {
+ intrusive_ptr<const Value> pId(
+ pInDocument->getField(Document::idName));
+ pResultDocument->addField(Document::idName, pId);
+ }
- /* use the ExpressionObject to create the base result */
- pEO->addToDocument(pResultDocument, pInDocument);
+ /* use the ExpressionObject to create the base result */
+ pEO->addToDocument(pResultDocument, pInDocument);
return pResultDocument;
}
void DocumentSourceProject::optimize() {
- intrusive_ptr<Expression> pE(pEO->optimize());
- pEO = dynamic_pointer_cast<ExpressionObject>(pE);
+ intrusive_ptr<Expression> pE(pEO->optimize());
+ pEO = dynamic_pointer_cast<ExpressionObject>(pE);
}
void DocumentSourceProject::sourceToBson(BSONObjBuilder *pBuilder) const {
- BSONObjBuilder insides;
- if (excludeId)
- insides.append(Document::idName, false);
- pEO->documentToBson(&insides, 0);
- pBuilder->append(projectName, insides.done());
+ BSONObjBuilder insides;
+ if (excludeId)
+ insides.append(Document::idName, false);
+ pEO->documentToBson(&insides, 0);
+ pBuilder->append(projectName, insides.done());
}
intrusive_ptr<DocumentSourceProject> DocumentSourceProject::create() {
@@ -83,45 +83,45 @@ namespace mongo {
void DocumentSourceProject::addField(
const string &fieldName, const intrusive_ptr<Expression> &pExpression) {
- uassert(15960,
- "projection fields must be defined by non-empty expressions",
- pExpression);
+ uassert(15960,
+ "projection fields must be defined by non-empty expressions",
+ pExpression);
- pEO->addField(fieldName, pExpression);
+ pEO->addField(fieldName, pExpression);
}
void DocumentSourceProject::includePath(const string &fieldPath) {
- if (Document::idName.compare(fieldPath) == 0) {
- uassert(15961, str::stream() << projectName <<
- ": _id cannot be included once it has been excluded",
- !excludeId);
+ if (Document::idName.compare(fieldPath) == 0) {
+ uassert(15961, str::stream() << projectName <<
+ ": _id cannot be included once it has been excluded",
+ !excludeId);
- return;
- }
+ return;
+ }
- pEO->includePath(fieldPath);
+ pEO->includePath(fieldPath);
}
void DocumentSourceProject::excludePath(const string &fieldPath) {
- if (Document::idName.compare(fieldPath) == 0) {
- excludeId = true;
- return;
- }
+ if (Document::idName.compare(fieldPath) == 0) {
+ excludeId = true;
+ return;
+ }
- pEO->excludePath(fieldPath);
+ pEO->excludePath(fieldPath);
}
intrusive_ptr<DocumentSource> DocumentSourceProject::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx) {
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
/* validate */
- uassert(15969, str::stream() << projectName <<
- " specification must be an object",
- pBsonElement->type() == Object);
+ uassert(15969, str::stream() << projectName <<
+ " specification must be an object",
+ pBsonElement->type() == Object);
/* chain the projection onto the original source */
intrusive_ptr<DocumentSourceProject> pProject(
- DocumentSourceProject::create());
+ DocumentSourceProject::create());
/*
Pull out the $project object. This should just be a list of
@@ -130,8 +130,8 @@ namespace mongo {
*/
BSONObj projectObj(pBsonElement->Obj());
BSONObjIterator fieldIterator(projectObj);
- Expression::ObjectCtx objectCtx(
- Expression::ObjectCtx::DOCUMENT_OK);
+ Expression::ObjectCtx objectCtx(
+ Expression::ObjectCtx::DOCUMENT_OK);
while(fieldIterator.more()) {
BSONElement outFieldElement(fieldIterator.next());
string outFieldPath(outFieldElement.fieldName());
@@ -142,7 +142,7 @@ namespace mongo {
switch(specType) {
case NumberDouble: {
double inclusion = outFieldElement.numberDouble();
- fieldInclusion = static_cast<int>(inclusion);
+ fieldInclusion = static_cast<int>(inclusion);
goto IncludeExclude;
}
@@ -151,14 +151,14 @@ namespace mongo {
fieldInclusion = outFieldElement.numberInt();
IncludeExclude:
- uassert(15970, str::stream() <<
- "field inclusion or exclusion specification for \"" <<
- outFieldPath <<
- "\" must be true, 1, false, or zero",
- ((fieldInclusion == 0) || (fieldInclusion == 1)));
+ uassert(15970, str::stream() <<
+ "field inclusion or exclusion specification for \"" <<
+ outFieldPath <<
+ "\" must be true, 1, false, or zero",
+ ((fieldInclusion == 0) || (fieldInclusion == 1)));
if (fieldInclusion == 0)
- pProject->excludePath(outFieldPath);
+ pProject->excludePath(outFieldPath);
else
pProject->includePath(outFieldPath);
break;
@@ -172,11 +172,11 @@ IncludeExclude:
/* include a field, with rename */
fieldInclusion = 1;
inFieldName = outFieldElement.String();
- pProject->addField(
- outFieldPath,
- ExpressionFieldPath::create(
- Expression::removeFieldPrefix(inFieldName)));
- break;
+ pProject->addField(
+ outFieldPath,
+ ExpressionFieldPath::create(
+ Expression::removeFieldPrefix(inFieldName)));
+ break;
case Object: {
intrusive_ptr<Expression> pDocument(
@@ -188,10 +188,10 @@ IncludeExclude:
}
default:
- uassert(15971, str::stream() <<
- "invalid BSON type (" << specType <<
- ") for " << projectName <<
- " field " << outFieldPath, false);
+ uassert(15971, str::stream() <<
+ "invalid BSON type (" << specType <<
+ ") for " << projectName <<
+ " field " << outFieldPath, false);
}
}
diff --git a/src/mongo/db/pipeline/document_source_skip.cpp b/src/mongo/db/pipeline/document_source_skip.cpp
index 74bf2360ce9..f26486e75a2 100644
--- a/src/mongo/db/pipeline/document_source_skip.cpp
+++ b/src/mongo/db/pipeline/document_source_skip.cpp
@@ -72,24 +72,24 @@ namespace mongo {
}
void DocumentSourceSkip::sourceToBson(BSONObjBuilder *pBuilder) const {
- pBuilder->append("$skip", skip);
+ pBuilder->append("$skip", skip);
}
intrusive_ptr<DocumentSourceSkip> DocumentSourceSkip::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
+ const intrusive_ptr<ExpressionContext> &pCtx) {
intrusive_ptr<DocumentSourceSkip> pSource(
new DocumentSourceSkip(pCtx));
return pSource;
}
intrusive_ptr<DocumentSource> DocumentSourceSkip::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx) {
- uassert(15972, str::stream() << "the value to " <<
- skipName << " must be a number", pBsonElement->isNumber());
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ uassert(15972, str::stream() << "the value to " <<
+ skipName << " must be a number", pBsonElement->isNumber());
intrusive_ptr<DocumentSourceSkip> pSkip(
- DocumentSourceSkip::create(pCtx));
+ DocumentSourceSkip::create(pCtx));
pSkip->skip = (int)pBsonElement->numberLong();
assert(pSkip->skip > 0); // CW TODO error code
diff --git a/src/mongo/db/pipeline/document_source_sort.cpp b/src/mongo/db/pipeline/document_source_sort.cpp
index bf4739af7d1..8e89617805c 100755
--- a/src/mongo/db/pipeline/document_source_sort.cpp
+++ b/src/mongo/db/pipeline/document_source_sort.cpp
@@ -51,7 +51,7 @@ namespace mongo {
count = 0;
return false;
}
- pCurrent = listIterator->pDocument;
+ pCurrent = listIterator->pDocument;
return true;
}
@@ -64,100 +64,100 @@ namespace mongo {
}
void DocumentSourceSort::sourceToBson(BSONObjBuilder *pBuilder) const {
- BSONObjBuilder insides;
- sortKeyToBson(&insides, false);
- pBuilder->append(sortName, insides.done());
+ BSONObjBuilder insides;
+ sortKeyToBson(&insides, false);
+ pBuilder->append(sortName, insides.done());
}
intrusive_ptr<DocumentSourceSort> DocumentSourceSort::create(
- const intrusive_ptr<ExpressionContext> &pCtx) {
+ const intrusive_ptr<ExpressionContext> &pCtx) {
intrusive_ptr<DocumentSourceSort> pSource(
new DocumentSourceSort(pCtx));
return pSource;
}
DocumentSourceSort::DocumentSourceSort(
- const intrusive_ptr<ExpressionContext> &pTheCtx):
+ const intrusive_ptr<ExpressionContext> &pTheCtx):
populated(false),
pCtx(pTheCtx) {
}
void DocumentSourceSort::addKey(const string &fieldPath, bool ascending) {
- intrusive_ptr<ExpressionFieldPath> pE(
- ExpressionFieldPath::create(fieldPath));
- vSortKey.push_back(pE);
- vAscending.push_back(ascending);
+ intrusive_ptr<ExpressionFieldPath> pE(
+ ExpressionFieldPath::create(fieldPath));
+ vSortKey.push_back(pE);
+ vAscending.push_back(ascending);
}
void DocumentSourceSort::sortKeyToBson(
- BSONObjBuilder *pBuilder, bool usePrefix) const {
- /* add the key fields */
- const size_t n = vSortKey.size();
- for(size_t i = 0; i < n; ++i) {
- /* create the "field name" */
- stringstream ss;
- vSortKey[i]->writeFieldPath(ss, usePrefix);
-
- /* append a named integer based on the sort order */
- pBuilder->append(ss.str(), (vAscending[i] ? 1 : -1));
- }
+ BSONObjBuilder *pBuilder, bool usePrefix) const {
+ /* add the key fields */
+ const size_t n = vSortKey.size();
+ for(size_t i = 0; i < n; ++i) {
+ /* create the "field name" */
+ stringstream ss;
+ vSortKey[i]->writeFieldPath(ss, usePrefix);
+
+ /* append a named integer based on the sort order */
+ pBuilder->append(ss.str(), (vAscending[i] ? 1 : -1));
+ }
}
intrusive_ptr<DocumentSource> DocumentSourceSort::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx) {
- uassert(15973, str::stream() << " the " <<
- sortName << " key specification must be an object",
- pBsonElement->type() == Object);
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
+ uassert(15973, str::stream() << " the " <<
+ sortName << " key specification must be an object",
+ pBsonElement->type() == Object);
intrusive_ptr<DocumentSourceSort> pSort(
- DocumentSourceSort::create(pCtx));
+ DocumentSourceSort::create(pCtx));
/* check for then iterate over the sort object */
- size_t sortKeys = 0;
- for(BSONObjIterator keyIterator(pBsonElement->Obj().begin());
- keyIterator.more();) {
- BSONElement keyField(keyIterator.next());
- const char *pKeyFieldName = keyField.fieldName();
- int sortOrder = 0;
-
- uassert(15974, str::stream() << sortName <<
- " key ordering must be specified using a number",
- keyField.isNumber());
- sortOrder = (int)keyField.numberInt();
-
- uassert(15975, str::stream() << sortName <<
- " key ordering must be 1 (for ascending) or -1 (for descending",
- ((sortOrder == 1) || (sortOrder == -1)));
-
- pSort->addKey(pKeyFieldName, (sortOrder > 0));
- ++sortKeys;
- }
-
- uassert(15976, str::stream() << sortName <<
- " must have at least one sort key", (sortKeys > 0));
+ size_t sortKeys = 0;
+ for(BSONObjIterator keyIterator(pBsonElement->Obj().begin());
+ keyIterator.more();) {
+ BSONElement keyField(keyIterator.next());
+ const char *pKeyFieldName = keyField.fieldName();
+ int sortOrder = 0;
+
+ uassert(15974, str::stream() << sortName <<
+ " key ordering must be specified using a number",
+ keyField.isNumber());
+ sortOrder = (int)keyField.numberInt();
+
+ uassert(15975, str::stream() << sortName <<
+ " key ordering must be 1 (for ascending) or -1 (for descending",
+ ((sortOrder == 1) || (sortOrder == -1)));
+
+ pSort->addKey(pKeyFieldName, (sortOrder > 0));
+ ++sortKeys;
+ }
+
+ uassert(15976, str::stream() << sortName <<
+ " must have at least one sort key", (sortKeys > 0));
return pSort;
}
void DocumentSourceSort::populate() {
- /* make sure we've got a sort key */
- assert(vSortKey.size());
+ /* make sure we've got a sort key */
+ assert(vSortKey.size());
- /* track and warn about how much physical memory has been used */
- DocMemMonitor dmm(this);
+ /* track and warn about how much physical memory has been used */
+ DocMemMonitor dmm(this);
- /* pull everything from the underlying source */
+ /* pull everything from the underlying source */
for(bool hasNext = !pSource->eof(); hasNext;
- hasNext = pSource->advance()) {
- intrusive_ptr<Document> pDocument(pSource->getCurrent());
- documents.push_back(Carrier(this, pDocument));
+ hasNext = pSource->advance()) {
+ intrusive_ptr<Document> pDocument(pSource->getCurrent());
+ documents.push_back(Carrier(this, pDocument));
- dmm.addToTotal(pDocument->getApproximateSize());
- }
+ dmm.addToTotal(pDocument->getApproximateSize());
+ }
- /* sort the list */
- documents.sort(Carrier::lessThan);
+ /* sort the list */
+ documents.sort(Carrier::lessThan);
/* start the sort iterator */
listIterator = documents.begin();
@@ -168,49 +168,49 @@ namespace mongo {
}
int DocumentSourceSort::compare(
- const intrusive_ptr<Document> &pL, const intrusive_ptr<Document> &pR) {
-
- /*
- populate() already checked that there is a non-empty sort key,
- so we shouldn't have to worry about that here.
-
- However, the tricky part is what to do is none of the sort keys are
- present. In this case, consider the document less.
- */
- const size_t n = vSortKey.size();
- for(size_t i = 0; i < n; ++i) {
- /* evaluate the sort keys */
- ExpressionFieldPath *pE = vSortKey[i].get();
- intrusive_ptr<const Value> pLeft(pE->evaluate(pL));
- intrusive_ptr<const Value> pRight(pE->evaluate(pR));
-
- /*
- Compare the two values; if they differ, return. If they are
- the same, move on to the next key.
- */
- int cmp = Value::compare(pLeft, pRight);
- if (cmp) {
- /* if necessary, adjust the return value by the key ordering */
- if (!vAscending[i])
- cmp = -cmp;
-
- return cmp;
- }
- }
-
- /*
- If we got here, everything matched (or didn't exist), so we'll
- consider the documents equal for purposes of this sort.
- */
- return 0;
+ const intrusive_ptr<Document> &pL, const intrusive_ptr<Document> &pR) {
+
+ /*
+ populate() already checked that there is a non-empty sort key,
+ so we shouldn't have to worry about that here.
+
+ However, the tricky part is what to do is none of the sort keys are
+ present. In this case, consider the document less.
+ */
+ const size_t n = vSortKey.size();
+ for(size_t i = 0; i < n; ++i) {
+ /* evaluate the sort keys */
+ ExpressionFieldPath *pE = vSortKey[i].get();
+ intrusive_ptr<const Value> pLeft(pE->evaluate(pL));
+ intrusive_ptr<const Value> pRight(pE->evaluate(pR));
+
+ /*
+ Compare the two values; if they differ, return. If they are
+ the same, move on to the next key.
+ */
+ int cmp = Value::compare(pLeft, pRight);
+ if (cmp) {
+ /* if necessary, adjust the return value by the key ordering */
+ if (!vAscending[i])
+ cmp = -cmp;
+
+ return cmp;
+ }
+ }
+
+ /*
+ If we got here, everything matched (or didn't exist), so we'll
+ consider the documents equal for purposes of this sort.
+ */
+ return 0;
}
bool DocumentSourceSort::Carrier::lessThan(
- const Carrier &rL, const Carrier &rR) {
- /* make sure these aren't from different lists */
- assert(rL.pSort == rR.pSort);
+ const Carrier &rL, const Carrier &rR) {
+ /* make sure these aren't from different lists */
+ assert(rL.pSort == rR.pSort);
- /* compare the documents according to the sort key */
- return (rL.pSort->compare(rL.pDocument, rR.pDocument) < 0);
+ /* compare the documents according to the sort key */
+ return (rL.pSort->compare(rL.pDocument, rR.pDocument) < 0);
}
}
diff --git a/src/mongo/db/pipeline/document_source_unwind.cpp b/src/mongo/db/pipeline/document_source_unwind.cpp
index 3978a334e88..f1d3b4fb420 100755
--- a/src/mongo/db/pipeline/document_source_unwind.cpp
+++ b/src/mongo/db/pipeline/document_source_unwind.cpp
@@ -30,7 +30,7 @@ namespace mongo {
}
DocumentSourceUnwind::DocumentSourceUnwind():
- unwindPath(),
+ unwindPath(),
pNoUnwindDocument(),
pUnwindArray(),
pUnwinder(),
@@ -55,7 +55,7 @@ namespace mongo {
}
/* release the last document and advance */
- resetArray();
+ resetArray();
return pSource->advance();
}
@@ -63,83 +63,83 @@ namespace mongo {
if (!pNoUnwindDocument.get()) {
intrusive_ptr<Document> pInDocument(pSource->getCurrent());
- /* create the result document */
- pNoUnwindDocument = pInDocument;
- fieldIndex.clear();
-
- /*
- First we'll look to see if the path is there. If it isn't,
- we'll pass this document through. If it is, we record the
- indexes of the fields down the field path so that we can
- quickly replace them as we clone the documents along the
- field path.
-
- We have to clone all the documents along the field path so
- that we don't share the end value across documents that have
- come out of this pipeline operator.
- */
- intrusive_ptr<Document> pCurrent(pInDocument);
- const size_t pathLength = unwindPath.getPathLength();
- for(size_t i = 0; i < pathLength; ++i) {
- size_t idx = pCurrent->getFieldIndex(
- unwindPath.getFieldName(i));
- if (idx == pCurrent->getFieldCount() ) {
- /* this document doesn't contain the target field */
- resetArray();
- return pInDocument;
- break;
- }
-
- fieldIndex.push_back(idx);
- Document::FieldPair fp(pCurrent->getField(idx));
- intrusive_ptr<const Value> pPathValue(fp.second);
- if (i < pathLength - 1) {
- if (pPathValue->getType() != Object) {
- /* can't walk down the field path */
- resetArray();
- uassert(15977, str::stream() << unwindName <<
- ": cannot traverse field path past scalar value for \"" <<
- fp.first << "\"", false);
- break;
- }
-
- /* move down the object tree */
- pCurrent = pPathValue->getDocument();
- }
- else /* (i == pathLength - 1) */ {
- if (pPathValue->getType() != Array) {
- /* last item on path must be an array to unwind */
- resetArray();
- uassert(15978, str::stream() << unwindName <<
- ": value at end of field path must be an array",
- false);
- break;
- }
-
- /* keep track of the array we're unwinding */
- pUnwindArray = pPathValue;
- if (pUnwindArray->getArrayLength() == 0) {
- /*
- The $unwind of an empty array is a NULL value. If we
- encounter this, use the non-unwind path, but replace
- pOutField with a null.
-
- Make sure unwind value is clear so the array is
- removed.
- */
- pUnwindValue.reset();
- intrusive_ptr<Document> pClone(clonePath());
- resetArray();
- return pClone;
- }
-
- /* get the iterator we'll use to unwind the array */
- pUnwinder = pUnwindArray->getArray();
- assert(pUnwinder->more()); // we just checked above...
- pUnwindValue = pUnwinder->next();
- }
- }
- }
+ /* create the result document */
+ pNoUnwindDocument = pInDocument;
+ fieldIndex.clear();
+
+ /*
+ First we'll look to see if the path is there. If it isn't,
+ we'll pass this document through. If it is, we record the
+ indexes of the fields down the field path so that we can
+ quickly replace them as we clone the documents along the
+ field path.
+
+ We have to clone all the documents along the field path so
+ that we don't share the end value across documents that have
+ come out of this pipeline operator.
+ */
+ intrusive_ptr<Document> pCurrent(pInDocument);
+ const size_t pathLength = unwindPath.getPathLength();
+ for(size_t i = 0; i < pathLength; ++i) {
+ size_t idx = pCurrent->getFieldIndex(
+ unwindPath.getFieldName(i));
+ if (idx == pCurrent->getFieldCount() ) {
+ /* this document doesn't contain the target field */
+ resetArray();
+ return pInDocument;
+ break;
+ }
+
+ fieldIndex.push_back(idx);
+ Document::FieldPair fp(pCurrent->getField(idx));
+ intrusive_ptr<const Value> pPathValue(fp.second);
+ if (i < pathLength - 1) {
+ if (pPathValue->getType() != Object) {
+ /* can't walk down the field path */
+ resetArray();
+ uassert(15977, str::stream() << unwindName <<
+ ": cannot traverse field path past scalar value for \"" <<
+ fp.first << "\"", false);
+ break;
+ }
+
+ /* move down the object tree */
+ pCurrent = pPathValue->getDocument();
+ }
+ else /* (i == pathLength - 1) */ {
+ if (pPathValue->getType() != Array) {
+ /* last item on path must be an array to unwind */
+ resetArray();
+ uassert(15978, str::stream() << unwindName <<
+ ": value at end of field path must be an array",
+ false);
+ break;
+ }
+
+ /* keep track of the array we're unwinding */
+ pUnwindArray = pPathValue;
+ if (pUnwindArray->getArrayLength() == 0) {
+ /*
+ The $unwind of an empty array is a NULL value. If we
+ encounter this, use the non-unwind path, but replace
+ pOutField with a null.
+
+ Make sure unwind value is clear so the array is
+ removed.
+ */
+ pUnwindValue.reset();
+ intrusive_ptr<Document> pClone(clonePath());
+ resetArray();
+ return pClone;
+ }
+
+ /* get the iterator we'll use to unwind the array */
+ pUnwinder = pUnwindArray->getArray();
+ assert(pUnwinder->more()); // we just checked above...
+ pUnwindValue = pUnwinder->next();
+ }
+ }
+ }
/*
If we're unwinding a field, create an alternate document. In the
@@ -157,40 +157,40 @@ namespace mongo {
}
intrusive_ptr<Document> DocumentSourceUnwind::clonePath() const {
- /*
- For this to be valid, we must already have pNoUnwindDocument set,
- and have set up the vector of indices for that document in fieldIndex.
- */
- assert(pNoUnwindDocument.get());
-
- intrusive_ptr<Document> pClone(pNoUnwindDocument->clone());
- intrusive_ptr<Document> pCurrent(pClone);
- const size_t n = fieldIndex.size();
- assert(n);
- for(size_t i = 0; i < n; ++i) {
- const size_t fi = fieldIndex[i];
- Document::FieldPair fp(pCurrent->getField(fi));
- if (i + 1 < n) {
- /*
- For every object in the path but the last, clone it and
- continue on down.
- */
- intrusive_ptr<Document> pNext(
- fp.second->getDocument()->clone());
- pCurrent->setField(fi, fp.first, Value::createDocument(pNext));
- pCurrent = pNext;
- }
- else {
- /* for the last, subsitute the next unwound value */
- pCurrent->setField(fi, fp.first, pUnwindValue);
- }
- }
-
- return pClone;
+ /*
+ For this to be valid, we must already have pNoUnwindDocument set,
+ and have set up the vector of indices for that document in fieldIndex.
+ */
+ assert(pNoUnwindDocument.get());
+
+ intrusive_ptr<Document> pClone(pNoUnwindDocument->clone());
+ intrusive_ptr<Document> pCurrent(pClone);
+ const size_t n = fieldIndex.size();
+ assert(n);
+ for(size_t i = 0; i < n; ++i) {
+ const size_t fi = fieldIndex[i];
+ Document::FieldPair fp(pCurrent->getField(fi));
+ if (i + 1 < n) {
+ /*
+ For every object in the path but the last, clone it and
+ continue on down.
+ */
+ intrusive_ptr<Document> pNext(
+ fp.second->getDocument()->clone());
+ pCurrent->setField(fi, fp.first, Value::createDocument(pNext));
+ pCurrent = pNext;
+ }
+ else {
+ /* for the last, subsitute the next unwound value */
+ pCurrent->setField(fi, fp.first, pUnwindValue);
+ }
+ }
+
+ return pClone;
}
void DocumentSourceUnwind::sourceToBson(BSONObjBuilder *pBuilder) const {
- pBuilder->append(unwindName, unwindPath.getPath(true));
+ pBuilder->append(unwindName, unwindPath.getPath(true));
}
intrusive_ptr<DocumentSourceUnwind> DocumentSourceUnwind::create() {
@@ -200,33 +200,33 @@ namespace mongo {
}
void DocumentSourceUnwind::unwindField(const FieldPath &rFieldPath) {
- /* can't set more than one unwind field */
- uassert(15979, str::stream() << unwindName <<
- "can't unwind more than one path at once",
- !unwindPath.getPathLength());
+ /* can't set more than one unwind field */
+ uassert(15979, str::stream() << unwindName <<
+ "can't unwind more than one path at once",
+ !unwindPath.getPathLength());
- uassert(15980, "the path of the field to unwind cannot be empty",
- false);
+ uassert(15980, "the path of the field to unwind cannot be empty",
+ false);
- /* record the field path */
- unwindPath = rFieldPath;
+ /* record the field path */
+ unwindPath = rFieldPath;
}
intrusive_ptr<DocumentSource> DocumentSourceUnwind::createFromBson(
- BSONElement *pBsonElement,
- const intrusive_ptr<ExpressionContext> &pCtx) {
+ BSONElement *pBsonElement,
+ const intrusive_ptr<ExpressionContext> &pCtx) {
/*
- The value of $unwind should just be a field path.
+ The value of $unwind should just be a field path.
*/
- uassert(15981, str::stream() << "the " << unwindName <<
- " field path must be specified as a string",
- pBsonElement->type() == String);
+ uassert(15981, str::stream() << "the " << unwindName <<
+ " field path must be specified as a string",
+ pBsonElement->type() == String);
- string prefixedPathString(pBsonElement->String());
- string pathString(Expression::removeFieldPrefix(prefixedPathString));
+ string prefixedPathString(pBsonElement->String());
+ string pathString(Expression::removeFieldPrefix(prefixedPathString));
intrusive_ptr<DocumentSourceUnwind> pUnwind(
- DocumentSourceUnwind::create());
- pUnwind->unwindPath = FieldPath(pathString);
+ DocumentSourceUnwind::create());
+ pUnwind->unwindPath = FieldPath(pathString);
return pUnwind;
}
diff --git a/src/mongo/db/pipeline/expression.cpp b/src/mongo/db/pipeline/expression.cpp
index b3caefcf899..1b5d0a4d771 100755
--- a/src/mongo/db/pipeline/expression.cpp
+++ b/src/mongo/db/pipeline/expression.cpp
@@ -31,8 +31,8 @@ namespace mongo {
/* --------------------------- Expression ------------------------------ */
void Expression::toMatcherBson(
- BSONObjBuilder *pBuilder, unsigned depth) const {
- assert(false && "Expression::toMatcherBson()");
+ BSONObjBuilder *pBuilder, unsigned depth) const {
+ assert(false && "Expression::toMatcherBson()");
}
Expression::ObjectCtx::ObjectCtx(int theOptions):
@@ -54,12 +54,12 @@ namespace mongo {
const char Expression::unwindName[] = "$unwind";
string Expression::removeFieldPrefix(const string &prefixedField) {
- const char *pPrefixedField = prefixedField.c_str();
- uassert(15982, str::stream() <<
- "field path references must be prefixed with a '$' (\"" <<
- prefixedField << "\"", pPrefixedField[0] == '$');
+ const char *pPrefixedField = prefixedField.c_str();
+ uassert(15982, str::stream() <<
+ "field path references must be prefixed with a '$' (\"" <<
+ prefixedField << "\"", pPrefixedField[0] == '$');
- return string(pPrefixedField + 1);
+ return string(pPrefixedField + 1);
}
intrusive_ptr<Expression> Expression::parseObject(
@@ -88,10 +88,10 @@ namespace mongo {
const char *pFieldName = fieldElement.fieldName();
if (pFieldName[0] == '$') {
- uassert(15983, str::stream() <<
- "the operator must be the only field in a pipeline object (at \""
- << pFieldName << "\"",
- fieldCount == 0);
+ uassert(15983, str::stream() <<
+ "the operator must be the only field in a pipeline object (at \""
+ << pFieldName << "\"",
+ fieldCount == 0);
/* we've determined this "object" is an operator expression */
isOp = 1;
@@ -110,18 +110,18 @@ namespace mongo {
assert(fieldElement.type() == String);
// CW TODO $unwind operand must be single field name
- string fieldPath(removeFieldPrefix(fieldElement.String()));
+ string fieldPath(removeFieldPrefix(fieldElement.String()));
pExpression = ExpressionFieldPath::create(fieldPath);
pCtx->unwind(fieldPath);
}
}
else {
- uassert(15984, str::stream() << "this object is already an operator expression, and can't be used as a document expression (at \"" <<
- pFieldName << "\")",
- isOp != 1);
- uassert(15990, str::stream() << "this object is already an operator expression, and can't be used as a document expression (at \"" <<
- pFieldName << "\")",
- kind != OPERATOR);
+ uassert(15984, str::stream() << "this object is already an operator expression, and can't be used as a document expression (at \"" <<
+ pFieldName << "\")",
+ isOp != 1);
+ uassert(15990, str::stream() << "this object is already an operator expression, and can't be used as a document expression (at \"" <<
+ pFieldName << "\")",
+ kind != OPERATOR);
/* if it's our first time, create the document expression */
if (!pExpression.get()) {
@@ -140,45 +140,45 @@ namespace mongo {
string fieldName(pFieldName);
if (fieldType == Object) {
/* it's a nested document */
- ObjectCtx oCtx(
- (pCtx->documentOk() ? ObjectCtx::DOCUMENT_OK : 0));
+ ObjectCtx oCtx(
+ (pCtx->documentOk() ? ObjectCtx::DOCUMENT_OK : 0));
intrusive_ptr<Expression> pNested(
parseObject(&fieldElement, &oCtx));
pExpressionObject->addField(fieldName, pNested);
}
else if (fieldType == String) {
/* it's a renamed field */
- // CW TODO could also be a constant
+ // CW TODO could also be a constant
intrusive_ptr<Expression> pPath(
ExpressionFieldPath::create(
- removeFieldPrefix(fieldElement.String())));
+ removeFieldPrefix(fieldElement.String())));
pExpressionObject->addField(fieldName, pPath);
}
else if (fieldType == NumberDouble) {
/* it's an inclusion specification */
int inclusion = static_cast<int>(fieldElement.Double());
- if (inclusion == 0)
- pExpressionObject->excludePath(fieldName);
- else if (inclusion == 1)
- pExpressionObject->includePath(fieldName);
- else
- uassert(15991, str::stream() <<
- "\"" << fieldName <<
- "\" numeric inclusion or exclusion must be 1 or 0 (or boolean)",
- false);
+ if (inclusion == 0)
+ pExpressionObject->excludePath(fieldName);
+ else if (inclusion == 1)
+ pExpressionObject->includePath(fieldName);
+ else
+ uassert(15991, str::stream() <<
+ "\"" << fieldName <<
+ "\" numeric inclusion or exclusion must be 1 or 0 (or boolean)",
+ false);
}
else if (fieldType == Bool) {
- bool inclusion = fieldElement.Bool();
- if (!inclusion)
- pExpressionObject->excludePath(fieldName);
- else
- pExpressionObject->includePath(fieldName);
- }
- else { /* nothing else is allowed */
- uassert(15992, str::stream() <<
- "disallowed field type " << fieldType <<
- " in object expression (at \"" <<
- fieldName << "\")", false);
+ bool inclusion = fieldElement.Bool();
+ if (!inclusion)
+ pExpressionObject->excludePath(fieldName);
+ else
+ pExpressionObject->includePath(fieldName);
+ }
+ else { /* nothing else is allowed */
+ uassert(15992, str::stream() <<
+ "disallowed field type " << fieldType <<
+ " in object expression (at \"" <<
+ fieldName << "\")", false);
}
}
}
@@ -204,8 +204,8 @@ namespace mongo {
{"$add", ExpressionAdd::create},
{"$and", ExpressionAnd::create},
{"$cmp", ExpressionCompare::createCmp},
- {"$cond", ExpressionCond::create},
- {"$const", ExpressionNoOp::create},
+ {"$cond", ExpressionCond::create},
+ {"$const", ExpressionNoOp::create},
{"$dayOfMonth", ExpressionDayOfMonth::create},
{"$dayOfWeek", ExpressionDayOfWeek::create},
{"$dayOfYear", ExpressionDayOfYear::create},
@@ -244,8 +244,8 @@ namespace mongo {
const OpDesc *pOp = (const OpDesc *)bsearch(
&key, OpTable, NOp, sizeof(OpDesc), OpDescCmp);
- uassert(15999, str::stream() << "invalid operator \"" <<
- pOpName << "\"", pOp);
+ uassert(15999, str::stream() << "invalid operator \"" <<
+ pOpName << "\"", pOp);
/* make the expression node */
intrusive_ptr<ExpressionNary> pExpression((*pOp->pFactory)());
@@ -255,7 +255,7 @@ namespace mongo {
if (elementType == Object) {
/* the operator must be unary and accept an object argument */
BSONObj objOperand(pBsonElement->Obj());
- ObjectCtx oCtx(ObjectCtx::DOCUMENT_OK);
+ ObjectCtx oCtx(ObjectCtx::DOCUMENT_OK);
intrusive_ptr<Expression> pOperand(
Expression::parseObject(pBsonElement, &oCtx));
pExpression->addOperand(pOperand);
@@ -267,13 +267,13 @@ namespace mongo {
for(size_t i = 0; i < n; ++i) {
BSONElement *pBsonOperand = &bsonArray[i];
intrusive_ptr<Expression> pOperand(
- Expression::parseOperand(pBsonOperand));
+ Expression::parseOperand(pBsonOperand));
pExpression->addOperand(pOperand);
}
}
else { /* assume it's an atomic operand */
intrusive_ptr<Expression> pOperand(
- Expression::parseOperand(pBsonElement));
+ Expression::parseOperand(pBsonElement));
pExpression->addOperand(pOperand);
}
@@ -297,25 +297,25 @@ namespace mongo {
string value(opCopy.String());
/* check for a field path */
- if (value[0] != '$')
+ if (value[0] != '$')
goto ExpectConstant; // assume plain string constant
/* if we got here, this is a field path expression */
- string fieldPath(removeFieldPrefix(value));
+ string fieldPath(removeFieldPrefix(value));
intrusive_ptr<Expression> pFieldExpr(
ExpressionFieldPath::create(fieldPath));
return pFieldExpr;
}
case Object: {
- ObjectCtx oCtx(ObjectCtx::DOCUMENT_OK);
+ ObjectCtx oCtx(ObjectCtx::DOCUMENT_OK);
intrusive_ptr<Expression> pSubExpression(
Expression::parseObject(pBsonElement, &oCtx));
return pSubExpression;
}
default:
- ExpectConstant: {
+ ExpectConstant: {
intrusive_ptr<Expression> pOperand(
ExpressionConstant::createFromBsonElement(pBsonElement));
return pOperand;
@@ -334,15 +334,15 @@ namespace mongo {
}
intrusive_ptr<Expression> ExpressionAdd::optimize() {
- intrusive_ptr<Expression> pE(ExpressionNary::optimize());
- ExpressionAdd *pA = dynamic_cast<ExpressionAdd *>(pE.get());
- if (pA) {
- /* don't create a circular reference */
- if (pA != this)
- pA->pAdd = this;
- }
+ intrusive_ptr<Expression> pE(ExpressionNary::optimize());
+ ExpressionAdd *pA = dynamic_cast<ExpressionAdd *>(pE.get());
+ if (pA) {
+ /* don't create a circular reference */
+ if (pA != this)
+ pA->pAdd = this;
+ }
- return pE;
+ return pE;
}
intrusive_ptr<ExpressionNary> ExpressionAdd::create() {
@@ -358,57 +358,57 @@ namespace mongo {
intrusive_ptr<const Value> ExpressionAdd::evaluate(
const intrusive_ptr<Document> &pDocument) const {
unsigned stringCount = 0;
- unsigned nonConstStringCount = 0;
+ unsigned nonConstStringCount = 0;
unsigned dateCount = 0;
const size_t n = vpOperand.size();
- vector<intrusive_ptr<const Value> > vpValue; /* evaluated operands */
+ vector<intrusive_ptr<const Value> > vpValue; /* evaluated operands */
- /* use the original, if we've been told to do so */
- if (useOriginal) {
- return pAdd->evaluate(pDocument);
- }
+ /* use the original, if we've been told to do so */
+ if (useOriginal) {
+ return pAdd->evaluate(pDocument);
+ }
for (size_t i = 0; i < n; ++i) {
intrusive_ptr<const Value> pValue(
- vpOperand[i]->evaluate(pDocument));
- vpValue.push_back(pValue);
+ vpOperand[i]->evaluate(pDocument));
+ vpValue.push_back(pValue);
- BSONType valueType = pValue->getType();
+ BSONType valueType = pValue->getType();
if (valueType == String) {
++stringCount;
- if (!dynamic_cast<ExpressionConstant *>(vpOperand[i].get()))
- ++nonConstStringCount;
- }
+ if (!dynamic_cast<ExpressionConstant *>(vpOperand[i].get()))
+ ++nonConstStringCount;
+ }
else if (valueType == Date)
++dateCount;
}
/*
- We don't allow adding two dates because it doesn't make sense
- especially since they are in epoch time. However, if there is a
- string present then we would be appending the dates to a string so
- having many would not be not a problem.
+ We don't allow adding two dates because it doesn't make sense
+ especially since they are in epoch time. However, if there is a
+ string present then we would be appending the dates to a string so
+ having many would not be not a problem.
*/
if ((dateCount > 1) && !stringCount) {
- uassert(16000, "can't add two dates together", false);
+ uassert(16000, "can't add two dates together", false);
return Value::getNull();
}
- /*
- If there are non-constant strings, and we've got a copy of the
- original, then use that from this point forward. This is necessary
- to keep the order of strings the same for string concatenation;
- constant-folding would violate the order preservation.
-
- This is a one-way conversion we do if we see one of these. It is
- possible that these could vary from document to document, but any
- sane schema probably isn't going to do that, so once we see a string,
- we can probably assume they're going to be strings all the way down.
- */
- if (nonConstStringCount && pAdd.get()) {
- useOriginal = true;
- return pAdd->evaluate(pDocument);
- }
+ /*
+ If there are non-constant strings, and we've got a copy of the
+ original, then use that from this point forward. This is necessary
+ to keep the order of strings the same for string concatenation;
+ constant-folding would violate the order preservation.
+
+ This is a one-way conversion we do if we see one of these. It is
+ possible that these could vary from document to document, but any
+ sane schema probably isn't going to do that, so once we see a string,
+ we can probably assume they're going to be strings all the way down.
+ */
+ if (nonConstStringCount && pAdd.get()) {
+ useOriginal = true;
+ return pAdd->evaluate(pDocument);
+ }
if (stringCount) {
stringstream stringTotal;
@@ -458,20 +458,20 @@ namespace mongo {
}
const char *ExpressionAdd::getOpName() const {
- return "$add";
+ return "$add";
}
intrusive_ptr<ExpressionNary> (*ExpressionAdd::getFactory() const)() {
- return ExpressionAdd::create;
+ return ExpressionAdd::create;
}
void ExpressionAdd::toBson(
- BSONObjBuilder *pBuilder, const char *pOpName, unsigned depth) const {
+ BSONObjBuilder *pBuilder, const char *pOpName, unsigned depth) const {
- if (pAdd)
- pAdd->toBson(pBuilder, pOpName, depth);
- else
- ExpressionNary::toBson(pBuilder, pOpName, depth);
+ if (pAdd)
+ pAdd->toBson(pBuilder, pOpName, depth);
+ else
+ ExpressionNary::toBson(pBuilder, pOpName, depth);
}
@@ -490,58 +490,58 @@ namespace mongo {
}
intrusive_ptr<Expression> ExpressionAnd::optimize() {
- /* optimize the conjunction as much as possible */
- intrusive_ptr<Expression> pE(ExpressionNary::optimize());
-
- /* if the result isn't a conjunction, we can't do anything */
- ExpressionAnd *pAnd = dynamic_cast<ExpressionAnd *>(pE.get());
- if (!pAnd)
- return pE;
-
- /*
- Check the last argument on the result; if it's not constant (as
- promised by ExpressionNary::optimize(),) then there's nothing
- we can do.
- */
- const size_t n = pAnd->vpOperand.size();
- intrusive_ptr<Expression> pLast(pAnd->vpOperand[n - 1]);
- const ExpressionConstant *pConst =
- dynamic_cast<ExpressionConstant *>(pLast.get());
- if (!pConst)
- return pE;
-
- /*
- Evaluate and coerce the last argument to a boolean. If it's false,
- then we can replace this entire expression.
- */
- bool last = pLast->evaluate(intrusive_ptr<Document>())->coerceToBool();
- if (!last) {
- intrusive_ptr<ExpressionConstant> pFinal(
- ExpressionConstant::create(Value::getFalse()));
- return pFinal;
- }
-
- /*
- If we got here, the final operand was true, so we don't need it
- anymore. If there was only one other operand, we don't need the
- conjunction either. Note we still need to keep the promise that
- the result will be a boolean.
- */
- if (n == 2) {
- intrusive_ptr<Expression> pFinal(
- ExpressionCoerceToBool::create(pAnd->vpOperand[0]));
- return pFinal;
- }
-
- /*
- Remove the final "true" value, and return the new expression.
-
- CW TODO:
- Note that because of any implicit conversions, we may need to
- apply an implicit boolean conversion.
- */
- pAnd->vpOperand.resize(n - 1);
- return pE;
+ /* optimize the conjunction as much as possible */
+ intrusive_ptr<Expression> pE(ExpressionNary::optimize());
+
+ /* if the result isn't a conjunction, we can't do anything */
+ ExpressionAnd *pAnd = dynamic_cast<ExpressionAnd *>(pE.get());
+ if (!pAnd)
+ return pE;
+
+ /*
+ Check the last argument on the result; if it's not constant (as
+ promised by ExpressionNary::optimize(),) then there's nothing
+ we can do.
+ */
+ const size_t n = pAnd->vpOperand.size();
+ intrusive_ptr<Expression> pLast(pAnd->vpOperand[n - 1]);
+ const ExpressionConstant *pConst =
+ dynamic_cast<ExpressionConstant *>(pLast.get());
+ if (!pConst)
+ return pE;
+
+ /*
+ Evaluate and coerce the last argument to a boolean. If it's false,
+ then we can replace this entire expression.
+ */
+ bool last = pLast->evaluate(intrusive_ptr<Document>())->coerceToBool();
+ if (!last) {
+ intrusive_ptr<ExpressionConstant> pFinal(
+ ExpressionConstant::create(Value::getFalse()));
+ return pFinal;
+ }
+
+ /*
+ If we got here, the final operand was true, so we don't need it
+ anymore. If there was only one other operand, we don't need the
+ conjunction either. Note we still need to keep the promise that
+ the result will be a boolean.
+ */
+ if (n == 2) {
+ intrusive_ptr<Expression> pFinal(
+ ExpressionCoerceToBool::create(pAnd->vpOperand[0]));
+ return pFinal;
+ }
+
+ /*
+ Remove the final "true" value, and return the new expression.
+
+ CW TODO:
+ Note that because of any implicit conversions, we may need to
+ apply an implicit boolean conversion.
+ */
+ pAnd->vpOperand.resize(n - 1);
+ return pE;
}
intrusive_ptr<const Value> ExpressionAnd::evaluate(
@@ -557,24 +557,24 @@ namespace mongo {
}
const char *ExpressionAnd::getOpName() const {
- return "$and";
+ return "$and";
}
void ExpressionAnd::toMatcherBson(
- BSONObjBuilder *pBuilder, unsigned depth) const {
- /*
- There are two patterns we can handle:
- (1) one or two comparisons on the same field: { a:{$gte:3, $lt:7} }
- (2) multiple field comparisons: {a:7, b:{$lte:6}, c:2}
- This can be recognized as a conjunction of a set of range
- expressions. Direct equality is a degenerate range expression;
- range expressions can be open-ended.
- */
- assert(false && "unimplemented");
+ BSONObjBuilder *pBuilder, unsigned depth) const {
+ /*
+ There are two patterns we can handle:
+ (1) one or two comparisons on the same field: { a:{$gte:3, $lt:7} }
+ (2) multiple field comparisons: {a:7, b:{$lte:6}, c:2}
+ This can be recognized as a conjunction of a set of range
+ expressions. Direct equality is a degenerate range expression;
+ range expressions can be open-ended.
+ */
+ assert(false && "unimplemented");
}
intrusive_ptr<ExpressionNary> (*ExpressionAnd::getFactory() const)() {
- return ExpressionAnd::create;
+ return ExpressionAnd::create;
}
/* -------------------- ExpressionCoerceToBool ------------------------- */
@@ -583,38 +583,38 @@ namespace mongo {
}
intrusive_ptr<ExpressionCoerceToBool> ExpressionCoerceToBool::create(
- const intrusive_ptr<Expression> &pExpression) {
+ const intrusive_ptr<Expression> &pExpression) {
intrusive_ptr<ExpressionCoerceToBool> pNew(
- new ExpressionCoerceToBool(pExpression));
+ new ExpressionCoerceToBool(pExpression));
return pNew;
}
ExpressionCoerceToBool::ExpressionCoerceToBool(
- const intrusive_ptr<Expression> &pTheExpression):
+ const intrusive_ptr<Expression> &pTheExpression):
Expression(),
pExpression(pTheExpression) {
}
intrusive_ptr<Expression> ExpressionCoerceToBool::optimize() {
- /* optimize the operand */
- pExpression = pExpression->optimize();
+ /* optimize the operand */
+ pExpression = pExpression->optimize();
- /* if the operand already produces a boolean, then we don't need this */
- /* LATER - Expression to support a "typeof" query? */
- Expression *pE = pExpression.get();
- if (dynamic_cast<ExpressionAnd *>(pE) ||
- dynamic_cast<ExpressionOr *>(pE) ||
- dynamic_cast<ExpressionNot *>(pE) ||
- dynamic_cast<ExpressionCoerceToBool *>(pE))
- return pExpression;
+ /* if the operand already produces a boolean, then we don't need this */
+ /* LATER - Expression to support a "typeof" query? */
+ Expression *pE = pExpression.get();
+ if (dynamic_cast<ExpressionAnd *>(pE) ||
+ dynamic_cast<ExpressionOr *>(pE) ||
+ dynamic_cast<ExpressionNot *>(pE) ||
+ dynamic_cast<ExpressionCoerceToBool *>(pE))
+ return pExpression;
- return intrusive_ptr<Expression>(this);
+ return intrusive_ptr<Expression>(this);
}
intrusive_ptr<const Value> ExpressionCoerceToBool::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- intrusive_ptr<const Value> pResult(pExpression->evaluate(pDocument));
+ intrusive_ptr<const Value> pResult(pExpression->evaluate(pDocument));
bool b = pResult->coerceToBool();
if (b)
return Value::getTrue();
@@ -622,13 +622,13 @@ namespace mongo {
}
void ExpressionCoerceToBool::addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
- assert(false && "not possible"); // no equivalent of this
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
+ assert(false && "not possible"); // no equivalent of this
}
void ExpressionCoerceToBool::addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const {
- assert(false && "not possible"); // no equivalent of this
+ BSONArrayBuilder *pBuilder, unsigned depth) const {
+ assert(false && "not possible"); // no equivalent of this
}
/* ----------------------- ExpressionCompare --------------------------- */
@@ -684,8 +684,8 @@ namespace mongo {
}
void ExpressionCompare::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(2);
+ const intrusive_ptr<Expression> &pExpression) {
+ checkArgLimit(2);
ExpressionNary::addOperand(pExpression);
}
@@ -693,9 +693,9 @@ namespace mongo {
Lookup table for truth value returns
*/
struct CmpLookup {
- bool truthValue[3]; /* truth value for -1, 0, 1 */
- Expression::CmpOp reverse; /* reverse comparison operator */
- char name[5]; /* string name (w/trailing '\0') */
+ bool truthValue[3]; /* truth value for -1, 0, 1 */
+ Expression::CmpOp reverse; /* reverse comparison operator */
+ char name[5]; /* string name (w/trailing '\0') */
};
static const CmpLookup cmpLookup[7] = {
/* -1 0 1 reverse name */
@@ -709,72 +709,72 @@ namespace mongo {
};
intrusive_ptr<Expression> ExpressionCompare::optimize() {
- /* first optimize the comparison operands */
- intrusive_ptr<Expression> pE(ExpressionNary::optimize());
-
- /*
- If the result of optimization is no longer a comparison, there's
- nothing more we can do.
- */
- ExpressionCompare *pCmp = dynamic_cast<ExpressionCompare *>(pE.get());
- if (!pCmp)
- return pE;
-
- /* check to see if optimizing comparison operator is supported */
- CmpOp newOp = pCmp->cmpOp;
- if (newOp == CMP)
- return pE; // not reversible: there's nothing more we can do
-
- /*
- There's one localized optimization we recognize: a comparison
- between a field and a constant. If we recognize that pattern,
- replace it with an ExpressionFieldRange.
-
- When looking for this pattern, note that the operands could appear
- in any order. If we need to reverse the sense of the comparison to
- put it into the required canonical form, do so.
- */
- intrusive_ptr<Expression> pLeft(pCmp->vpOperand[0]);
- intrusive_ptr<Expression> pRight(pCmp->vpOperand[1]);
- intrusive_ptr<ExpressionFieldPath> pFieldPath(
- dynamic_pointer_cast<ExpressionFieldPath>(pLeft));
- intrusive_ptr<ExpressionConstant> pConstant;
- if (pFieldPath.get()) {
- pConstant = dynamic_pointer_cast<ExpressionConstant>(pRight);
- if (!pConstant.get())
- return pE; // there's nothing more we can do
- }
- else {
- /* if the first operand wasn't a path, see if it's a constant */
- pConstant = dynamic_pointer_cast<ExpressionConstant>(pLeft);
- if (!pConstant.get())
- return pE; // there's nothing more we can do
-
- /* the left operand was a constant; see if the right is a path */
- pFieldPath = dynamic_pointer_cast<ExpressionFieldPath>(pRight);
- if (!pFieldPath.get())
- return pE; // there's nothing more we can do
-
- /* these were not in canonical order, so reverse the sense */
- newOp = cmpLookup[newOp].reverse;
- }
-
- return ExpressionFieldRange::create(
- pFieldPath, newOp, pConstant->getValue());
+ /* first optimize the comparison operands */
+ intrusive_ptr<Expression> pE(ExpressionNary::optimize());
+
+ /*
+ If the result of optimization is no longer a comparison, there's
+ nothing more we can do.
+ */
+ ExpressionCompare *pCmp = dynamic_cast<ExpressionCompare *>(pE.get());
+ if (!pCmp)
+ return pE;
+
+ /* check to see if optimizing comparison operator is supported */
+ CmpOp newOp = pCmp->cmpOp;
+ if (newOp == CMP)
+ return pE; // not reversible: there's nothing more we can do
+
+ /*
+ There's one localized optimization we recognize: a comparison
+ between a field and a constant. If we recognize that pattern,
+ replace it with an ExpressionFieldRange.
+
+ When looking for this pattern, note that the operands could appear
+ in any order. If we need to reverse the sense of the comparison to
+ put it into the required canonical form, do so.
+ */
+ intrusive_ptr<Expression> pLeft(pCmp->vpOperand[0]);
+ intrusive_ptr<Expression> pRight(pCmp->vpOperand[1]);
+ intrusive_ptr<ExpressionFieldPath> pFieldPath(
+ dynamic_pointer_cast<ExpressionFieldPath>(pLeft));
+ intrusive_ptr<ExpressionConstant> pConstant;
+ if (pFieldPath.get()) {
+ pConstant = dynamic_pointer_cast<ExpressionConstant>(pRight);
+ if (!pConstant.get())
+ return pE; // there's nothing more we can do
+ }
+ else {
+ /* if the first operand wasn't a path, see if it's a constant */
+ pConstant = dynamic_pointer_cast<ExpressionConstant>(pLeft);
+ if (!pConstant.get())
+ return pE; // there's nothing more we can do
+
+ /* the left operand was a constant; see if the right is a path */
+ pFieldPath = dynamic_pointer_cast<ExpressionFieldPath>(pRight);
+ if (!pFieldPath.get())
+ return pE; // there's nothing more we can do
+
+ /* these were not in canonical order, so reverse the sense */
+ newOp = cmpLookup[newOp].reverse;
+ }
+
+ return ExpressionFieldRange::create(
+ pFieldPath, newOp, pConstant->getValue());
}
intrusive_ptr<const Value> ExpressionCompare::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(2);
+ checkArgCount(2);
intrusive_ptr<const Value> pLeft(vpOperand[0]->evaluate(pDocument));
intrusive_ptr<const Value> pRight(vpOperand[1]->evaluate(pDocument));
BSONType leftType = pLeft->getType();
BSONType rightType = pRight->getType();
- uassert(15994, str::stream() << getOpName() <<
- ": no automatic conversion for types " <<
- leftType << " and " << rightType,
- leftType == rightType);
+ uassert(15994, str::stream() << getOpName() <<
+ ": no automatic conversion for types " <<
+ leftType << " and " << rightType,
+ leftType == rightType);
// CW TODO at least for now. later, handle automatic conversions
int cmp = 0;
@@ -809,8 +809,8 @@ namespace mongo {
}
default:
- uassert(15995, str::stream() <<
- "can't compare values of type " << leftType, false);
+ uassert(15995, str::stream() <<
+ "can't compare values of type " << leftType, false);
break;
}
@@ -836,7 +836,7 @@ namespace mongo {
}
const char *ExpressionCompare::getOpName() const {
- return cmpLookup[cmpOp].name;
+ return cmpLookup[cmpOp].name;
}
/* ----------------------- ExpressionCond ------------------------------ */
@@ -854,21 +854,21 @@ namespace mongo {
}
void ExpressionCond::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(3);
+ const intrusive_ptr<Expression> &pExpression) {
+ checkArgLimit(3);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionCond::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(3);
+ checkArgCount(3);
intrusive_ptr<const Value> pCond(vpOperand[0]->evaluate(pDocument));
- int idx = pCond->coerceToBool() ? 1 : 2;
- return vpOperand[idx]->evaluate(pDocument);
+ int idx = pCond->coerceToBool() ? 1 : 2;
+ return vpOperand[idx]->evaluate(pDocument);
}
const char *ExpressionCond::getOpName() const {
- return "$cond";
+ return "$cond";
}
/* ---------------------- ExpressionConstant --------------------------- */
@@ -894,14 +894,14 @@ namespace mongo {
}
ExpressionConstant::ExpressionConstant(
- const intrusive_ptr<const Value> &pTheValue):
+ const intrusive_ptr<const Value> &pTheValue):
pValue(pTheValue) {
}
intrusive_ptr<Expression> ExpressionConstant::optimize() {
- /* nothing to do */
- return intrusive_ptr<Expression>(this);
+ /* nothing to do */
+ return intrusive_ptr<Expression>(this);
}
intrusive_ptr<const Value> ExpressionConstant::evaluate(
@@ -910,43 +910,43 @@ namespace mongo {
}
void ExpressionConstant::addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
-
- /*
- For depth greater than one, do the regular thing
-
- This will be one because any top level expression will actually
- be an operator node, so by the time we get to an expression
- constant, we're at level 1 (counting up as we go down the
- expression tree).
-
- See the comment below for more on why this happens.
- */
- if (depth > 1) {
- pValue->addToBsonObj(pBuilder, fieldName);
- return;
- }
-
- /*
- If this happens at the top level, we don't have any direct way
- to express it. However, we may need to if constant folding
- reduced expressions to constants, and we need to re-materialize
- the pipeline in order to ship it to a shard server. This has
- forced the introduction of {$const: ...}.
- */
- BSONObjBuilder constBuilder;
- pValue->addToBsonObj(&constBuilder, "$const");
- pBuilder->append(fieldName, constBuilder.done());
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
+
+ /*
+ For depth greater than one, do the regular thing
+
+ This will be one because any top level expression will actually
+ be an operator node, so by the time we get to an expression
+ constant, we're at level 1 (counting up as we go down the
+ expression tree).
+
+ See the comment below for more on why this happens.
+ */
+ if (depth > 1) {
+ pValue->addToBsonObj(pBuilder, fieldName);
+ return;
+ }
+
+ /*
+ If this happens at the top level, we don't have any direct way
+ to express it. However, we may need to if constant folding
+ reduced expressions to constants, and we need to re-materialize
+ the pipeline in order to ship it to a shard server. This has
+ forced the introduction of {$const: ...}.
+ */
+ BSONObjBuilder constBuilder;
+ pValue->addToBsonObj(&constBuilder, "$const");
+ pBuilder->append(fieldName, constBuilder.done());
}
void ExpressionConstant::addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const {
- pValue->addToBsonArray(pBuilder);
+ BSONArrayBuilder *pBuilder, unsigned depth) const {
+ pValue->addToBsonArray(pBuilder);
}
const char *ExpressionConstant::getOpName() const {
- assert(false); // this has no name
- return NULL;
+ assert(false); // this has no name
+ return NULL;
}
/* ---------------------- ExpressionDayOfMonth ------------------------- */
@@ -964,14 +964,14 @@ namespace mongo {
}
void ExpressionDayOfMonth::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionDayOfMonth::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pDate(vpOperand[0]->evaluate(pDocument));
tm date;
(pDate->coerceToDate()).toTm(&date);
@@ -979,7 +979,7 @@ namespace mongo {
}
const char *ExpressionDayOfMonth::getOpName() const {
- return "$dayOfMonth";
+ return "$dayOfMonth";
}
/* ------------------------- ExpressionDayOfWeek ----------------------------- */
@@ -997,13 +997,13 @@ namespace mongo {
}
void ExpressionDayOfWeek::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionDayOfWeek::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pDate(vpOperand[0]->evaluate(pDocument));
tm date;
(pDate->coerceToDate()).toTm(&date);
@@ -1011,7 +1011,7 @@ namespace mongo {
}
const char *ExpressionDayOfWeek::getOpName() const {
- return "$dayOfWeek";
+ return "$dayOfWeek";
}
/* ------------------------- ExpressionDayOfYear ----------------------------- */
@@ -1029,13 +1029,13 @@ namespace mongo {
}
void ExpressionDayOfYear::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionDayOfYear::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pDate(vpOperand[0]->evaluate(pDocument));
tm date;
(pDate->coerceToDate()).toTm(&date);
@@ -1043,7 +1043,7 @@ namespace mongo {
}
const char *ExpressionDayOfYear::getOpName() const {
- return "$dayOfYear";
+ return "$dayOfYear";
}
/* ----------------------- ExpressionDivide ---------------------------- */
@@ -1061,20 +1061,20 @@ namespace mongo {
}
void ExpressionDivide::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(2);
+ const intrusive_ptr<Expression> &pExpression) {
+ checkArgLimit(2);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionDivide::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(2);
+ checkArgCount(2);
intrusive_ptr<const Value> pLeft(vpOperand[0]->evaluate(pDocument));
intrusive_ptr<const Value> pRight(vpOperand[1]->evaluate(pDocument));
double right = pRight->coerceToDouble();
- if (right == 0)
- return Value::getUndefined();
+ if (right == 0)
+ return Value::getUndefined();
double left = pLeft->coerceToDouble();
@@ -1082,7 +1082,7 @@ namespace mongo {
}
const char *ExpressionDivide::getOpName() const {
- return "$divide";
+ return "$divide";
}
/* ---------------------- ExpressionObject --------------------------- */
@@ -1096,402 +1096,402 @@ namespace mongo {
}
ExpressionObject::ExpressionObject():
- excludePaths(false),
- path(),
+ excludePaths(false),
+ path(),
vFieldName(),
vpExpression() {
}
intrusive_ptr<Expression> ExpressionObject::optimize() {
- const size_t n = vpExpression.size();
- for(size_t i = 0; i < n; ++i) {
- intrusive_ptr<Expression> pE(vpExpression[i]->optimize());
- vpExpression[i] = pE;
- }
+ const size_t n = vpExpression.size();
+ for(size_t i = 0; i < n; ++i) {
+ intrusive_ptr<Expression> pE(vpExpression[i]->optimize());
+ vpExpression[i] = pE;
+ }
- return intrusive_ptr<Expression>(this);
+ return intrusive_ptr<Expression>(this);
}
void ExpressionObject::addToDocument(
- const intrusive_ptr<Document> &pResult,
+ const intrusive_ptr<Document> &pResult,
const intrusive_ptr<Document> &pDocument) const {
- const size_t pathSize = path.size();
- set<string>::const_iterator end(path.end());
-
- /*
- Take care of inclusions or exclusions. Note that _id is special,
- that that it is always included, unless it is specifically excluded.
- we use excludeId for that in case excludePaths if false, which means
- to include paths.
- */
- if (pathSize) {
- auto_ptr<FieldIterator> pIter(pDocument->createFieldIterator());
- if (excludePaths) {
- while(pIter->more()) {
- pair<string, intrusive_ptr<const Value> > field(pIter->next());
-
- /*
- If the field in the document is not in the exclusion set,
- add it to the result document.
-
- Note that exclusions are only allowed on leaves, so we
- can assume we don't have to descend recursively here.
- */
- if (path.find(field.first) != end)
- continue; // we found it, so don't add it
-
- pResult->addField(field.first, field.second);
- }
- }
- else { /* !excludePaths */
- while(pIter->more()) {
- pair<string, intrusive_ptr<const Value> > field(
- pIter->next());
- /*
- If the field in the document is in the inclusion set,
- add it to the result document. Or, if we're not
- excluding _id, and it is _id, include it.
-
- Note that this could be an inclusion along a pathway,
- so we look for an ExpressionObject in vpExpression; when
- we find one, we populate the result with the evaluation
- of that on the nested object, yielding relative paths.
- This also allows us to handle intermediate arrays; if we
- encounter one, we repeat this for each array element.
- */
- if (path.find(field.first) != end) {
- /* find the Expression */
- const size_t n = vFieldName.size();
- size_t i;
- Expression *pE = NULL;
- for(i = 0; i < n; ++i) {
- if (field.first.compare(vFieldName[i]) == 0) {
- pE = vpExpression[i].get();
- break;
- }
- }
-
- /*
- If we didn't find an expression, it's the last path
- element to include.
- */
- if (!pE) {
- pResult->addField(field.first, field.second);
- continue;
- }
-
- ExpressionObject *pChild =
- dynamic_cast<ExpressionObject *>(pE);
- assert(pChild);
-
- /*
- Check on the type of the result object. If it's an
- object, just walk down into that recursively, and
- add it to the result.
- */
- BSONType valueType = field.second->getType();
- if (valueType == Object) {
- intrusive_ptr<Document> pD(
- pChild->evaluateDocument(
- field.second->getDocument()));
- pResult->addField(vFieldName[i],
- Value::createDocument(pD));
- }
- else if (valueType == Array) {
- /*
- If it's an array, we have to do the same thing,
- but to each array element. Then, add the array
- of results to the current document.
- */
- vector<intrusive_ptr<const Value> > result;
- intrusive_ptr<ValueIterator> pVI(
- field.second->getArray());
- while(pVI->more()) {
- intrusive_ptr<Document> pD(
- pChild->evaluateDocument(
- pVI->next()->getDocument()));
- result.push_back(Value::createDocument(pD));
- }
-
- pResult->addField(vFieldName[i],
- Value::createArray(result));
- }
- }
- }
- }
- }
-
- /* add any remaining fields we haven't already taken care of */
+ const size_t pathSize = path.size();
+ set<string>::const_iterator end(path.end());
+
+ /*
+ Take care of inclusions or exclusions. Note that _id is special,
+ that that it is always included, unless it is specifically excluded.
+ we use excludeId for that in case excludePaths if false, which means
+ to include paths.
+ */
+ if (pathSize) {
+ auto_ptr<FieldIterator> pIter(pDocument->createFieldIterator());
+ if (excludePaths) {
+ while(pIter->more()) {
+ pair<string, intrusive_ptr<const Value> > field(pIter->next());
+
+ /*
+ If the field in the document is not in the exclusion set,
+ add it to the result document.
+
+ Note that exclusions are only allowed on leaves, so we
+ can assume we don't have to descend recursively here.
+ */
+ if (path.find(field.first) != end)
+ continue; // we found it, so don't add it
+
+ pResult->addField(field.first, field.second);
+ }
+ }
+ else { /* !excludePaths */
+ while(pIter->more()) {
+ pair<string, intrusive_ptr<const Value> > field(
+ pIter->next());
+ /*
+ If the field in the document is in the inclusion set,
+ add it to the result document. Or, if we're not
+ excluding _id, and it is _id, include it.
+
+ Note that this could be an inclusion along a pathway,
+ so we look for an ExpressionObject in vpExpression; when
+ we find one, we populate the result with the evaluation
+ of that on the nested object, yielding relative paths.
+ This also allows us to handle intermediate arrays; if we
+ encounter one, we repeat this for each array element.
+ */
+ if (path.find(field.first) != end) {
+ /* find the Expression */
+ const size_t n = vFieldName.size();
+ size_t i;
+ Expression *pE = NULL;
+ for(i = 0; i < n; ++i) {
+ if (field.first.compare(vFieldName[i]) == 0) {
+ pE = vpExpression[i].get();
+ break;
+ }
+ }
+
+ /*
+ If we didn't find an expression, it's the last path
+ element to include.
+ */
+ if (!pE) {
+ pResult->addField(field.first, field.second);
+ continue;
+ }
+
+ ExpressionObject *pChild =
+ dynamic_cast<ExpressionObject *>(pE);
+ assert(pChild);
+
+ /*
+ Check on the type of the result object. If it's an
+ object, just walk down into that recursively, and
+ add it to the result.
+ */
+ BSONType valueType = field.second->getType();
+ if (valueType == Object) {
+ intrusive_ptr<Document> pD(
+ pChild->evaluateDocument(
+ field.second->getDocument()));
+ pResult->addField(vFieldName[i],
+ Value::createDocument(pD));
+ }
+ else if (valueType == Array) {
+ /*
+ If it's an array, we have to do the same thing,
+ but to each array element. Then, add the array
+ of results to the current document.
+ */
+ vector<intrusive_ptr<const Value> > result;
+ intrusive_ptr<ValueIterator> pVI(
+ field.second->getArray());
+ while(pVI->more()) {
+ intrusive_ptr<Document> pD(
+ pChild->evaluateDocument(
+ pVI->next()->getDocument()));
+ result.push_back(Value::createDocument(pD));
+ }
+
+ pResult->addField(vFieldName[i],
+ Value::createArray(result));
+ }
+ }
+ }
+ }
+ }
+
+ /* add any remaining fields we haven't already taken care of */
const size_t n = vFieldName.size();
for(size_t i = 0; i < n; ++i) {
- string fieldName(vFieldName[i]);
+ string fieldName(vFieldName[i]);
- /* if we've already dealt with this field, above, do nothing */
- if (path.find(fieldName) != end)
- continue;
+ /* if we've already dealt with this field, above, do nothing */
+ if (path.find(fieldName) != end)
+ continue;
- intrusive_ptr<const Value> pValue(
- vpExpression[i]->evaluate(pDocument));
+ intrusive_ptr<const Value> pValue(
+ vpExpression[i]->evaluate(pDocument));
- /*
- Don't add non-existent values (note: different from NULL);
- this is consistent with existing selection syntax which doesn't
- force the appearnance of non-existent fields.
- */
- if (pValue->getType() == Undefined)
- continue;
+ /*
+ Don't add non-existent values (note: different from NULL);
+ this is consistent with existing selection syntax which doesn't
+ force the appearnance of non-existent fields.
+ */
+ if (pValue->getType() == Undefined)
+ continue;
- pResult->addField(fieldName, pValue);
+ pResult->addField(fieldName, pValue);
}
}
size_t ExpressionObject::getSizeHint(
- const intrusive_ptr<Document> &pDocument) const {
- size_t sizeHint = pDocument->getFieldCount();
- const size_t pathSize = path.size();
- if (!excludePaths)
- sizeHint += pathSize;
- else {
- size_t excludeCount = pathSize;
- if (sizeHint > excludeCount)
- sizeHint -= excludeCount;
- else
- sizeHint = 0;
- }
+ const intrusive_ptr<Document> &pDocument) const {
+ size_t sizeHint = pDocument->getFieldCount();
+ const size_t pathSize = path.size();
+ if (!excludePaths)
+ sizeHint += pathSize;
+ else {
+ size_t excludeCount = pathSize;
+ if (sizeHint > excludeCount)
+ sizeHint -= excludeCount;
+ else
+ sizeHint = 0;
+ }
- /* account for the additional computed fields */
- sizeHint += vFieldName.size();
+ /* account for the additional computed fields */
+ sizeHint += vFieldName.size();
- return sizeHint;
+ return sizeHint;
}
intrusive_ptr<Document> ExpressionObject::evaluateDocument(
const intrusive_ptr<Document> &pDocument) const {
- /* create and populate the result */
+ /* create and populate the result */
intrusive_ptr<Document> pResult(
- Document::create(getSizeHint(pDocument)));
- addToDocument(pResult, pDocument);
+ Document::create(getSizeHint(pDocument)));
+ addToDocument(pResult, pDocument);
return pResult;
}
intrusive_ptr<const Value> ExpressionObject::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- return Value::createDocument(evaluateDocument(pDocument));
+ return Value::createDocument(evaluateDocument(pDocument));
}
void ExpressionObject::addField(const string &fieldName,
- const intrusive_ptr<Expression> &pExpression) {
- /* must have an expression */
- assert(pExpression.get());
-
- /* parse the field path */
- FieldPath fieldPath(fieldName);
- uassert(16008, str::stream() <<
- "an expression object's field names cannot be field paths (at \"" <<
- fieldName << "\")", fieldPath.getPathLength() == 1);
-
- /* make sure it isn't a name we've included or excluded */
- set<string>::iterator ex(path.find(fieldName));
- uassert(16009, str::stream() <<
- "can't add a field to an object expression that has already been excluded (at \"" <<
- fieldName << "\")", ex == path.end());
-
- /* make sure it isn't a name we've already got */
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- uassert(16010, str::stream() <<
- "can't add the same field to an object expression more than once (at \"" <<
- fieldName << "\")",
- fieldName.compare(vFieldName[i]) != 0);
- }
-
- vFieldName.push_back(fieldName);
- vpExpression.push_back(pExpression);
+ const intrusive_ptr<Expression> &pExpression) {
+ /* must have an expression */
+ assert(pExpression.get());
+
+ /* parse the field path */
+ FieldPath fieldPath(fieldName);
+ uassert(16008, str::stream() <<
+ "an expression object's field names cannot be field paths (at \"" <<
+ fieldName << "\")", fieldPath.getPathLength() == 1);
+
+ /* make sure it isn't a name we've included or excluded */
+ set<string>::iterator ex(path.find(fieldName));
+ uassert(16009, str::stream() <<
+ "can't add a field to an object expression that has already been excluded (at \"" <<
+ fieldName << "\")", ex == path.end());
+
+ /* make sure it isn't a name we've already got */
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ uassert(16010, str::stream() <<
+ "can't add the same field to an object expression more than once (at \"" <<
+ fieldName << "\")",
+ fieldName.compare(vFieldName[i]) != 0);
+ }
+
+ vFieldName.push_back(fieldName);
+ vpExpression.push_back(pExpression);
}
void ExpressionObject::includePath(
- const FieldPath *pPath, size_t pathi, size_t pathn, bool excludeLast) {
-
- /* get the current path field name */
- string fieldName(pPath->getFieldName(pathi));
- uassert(16011,
- "an object expression can't include an empty field-name",
- fieldName.length());
-
- const size_t pathCount = path.size();
-
- /* if this is the leaf-most object, stop */
- if (pathi == pathn - 1) {
- /*
- Make sure the exclusion configuration of this node matches
- the requested result. Or, that this is the first (determining)
- specification.
- */
- uassert(16012, str::stream() <<
- "incompatible exclusion for \"" <<
- pPath->getPath(false) <<
- "\" because of a prior inclusion that includes a common sub-path",
- ((excludePaths == excludeLast) || !pathCount));
-
- excludePaths = excludeLast; // if (!pathCount), set this
- path.insert(fieldName);
- return;
- }
-
- /* this level had better be about inclusions */
- uassert(16013, str::stream() <<
- "incompatible inclusion for \"" << pPath->getPath(false) <<
- "\" because of a prior exclusion that includes a common sub-path",
- !excludePaths);
-
- /* see if we already know about this field */
- const size_t n = vFieldName.size();
- size_t i;
- for(i = 0; i < n; ++i) {
- if (fieldName.compare(vFieldName[i]) == 0)
- break;
- }
-
- /* find the right object, and continue */
- ExpressionObject *pChild;
- if (i < n) {
- /* the intermediate child already exists */
- pChild = dynamic_cast<ExpressionObject *>(vpExpression[i].get());
- assert(pChild);
- }
- else {
- /*
- If we get here, the intervening child isn't already there,
- so create it.
- */
- intrusive_ptr<ExpressionObject> pSharedChild(
- ExpressionObject::create());
- path.insert(fieldName);
- vFieldName.push_back(fieldName);
- vpExpression.push_back(pSharedChild);
- pChild = pSharedChild.get();
- }
-
- // LATER CW TODO turn this into a loop
- pChild->includePath(pPath, pathi + 1, pathn, excludeLast);
+ const FieldPath *pPath, size_t pathi, size_t pathn, bool excludeLast) {
+
+ /* get the current path field name */
+ string fieldName(pPath->getFieldName(pathi));
+ uassert(16011,
+ "an object expression can't include an empty field-name",
+ fieldName.length());
+
+ const size_t pathCount = path.size();
+
+ /* if this is the leaf-most object, stop */
+ if (pathi == pathn - 1) {
+ /*
+ Make sure the exclusion configuration of this node matches
+ the requested result. Or, that this is the first (determining)
+ specification.
+ */
+ uassert(16012, str::stream() <<
+ "incompatible exclusion for \"" <<
+ pPath->getPath(false) <<
+ "\" because of a prior inclusion that includes a common sub-path",
+ ((excludePaths == excludeLast) || !pathCount));
+
+ excludePaths = excludeLast; // if (!pathCount), set this
+ path.insert(fieldName);
+ return;
+ }
+
+ /* this level had better be about inclusions */
+ uassert(16013, str::stream() <<
+ "incompatible inclusion for \"" << pPath->getPath(false) <<
+ "\" because of a prior exclusion that includes a common sub-path",
+ !excludePaths);
+
+ /* see if we already know about this field */
+ const size_t n = vFieldName.size();
+ size_t i;
+ for(i = 0; i < n; ++i) {
+ if (fieldName.compare(vFieldName[i]) == 0)
+ break;
+ }
+
+ /* find the right object, and continue */
+ ExpressionObject *pChild;
+ if (i < n) {
+ /* the intermediate child already exists */
+ pChild = dynamic_cast<ExpressionObject *>(vpExpression[i].get());
+ assert(pChild);
+ }
+ else {
+ /*
+ If we get here, the intervening child isn't already there,
+ so create it.
+ */
+ intrusive_ptr<ExpressionObject> pSharedChild(
+ ExpressionObject::create());
+ path.insert(fieldName);
+ vFieldName.push_back(fieldName);
+ vpExpression.push_back(pSharedChild);
+ pChild = pSharedChild.get();
+ }
+
+ // LATER CW TODO turn this into a loop
+ pChild->includePath(pPath, pathi + 1, pathn, excludeLast);
}
void ExpressionObject::includePath(const string &theFieldPath) {
- /* parse the field path */
- FieldPath fieldPath(theFieldPath);
- includePath(&fieldPath, 0, fieldPath.getPathLength(), false);
+ /* parse the field path */
+ FieldPath fieldPath(theFieldPath);
+ includePath(&fieldPath, 0, fieldPath.getPathLength(), false);
}
void ExpressionObject::excludePath(const string &theFieldPath) {
- /* parse the field path */
- FieldPath fieldPath(theFieldPath);
- includePath(&fieldPath, 0, fieldPath.getPathLength(), true);
+ /* parse the field path */
+ FieldPath fieldPath(theFieldPath);
+ includePath(&fieldPath, 0, fieldPath.getPathLength(), true);
}
intrusive_ptr<Expression> ExpressionObject::getField(
- const string &fieldName) const {
- const size_t n = vFieldName.size();
- for(size_t i = 0; i < n; ++i) {
- if (fieldName.compare(vFieldName[i]) == 0)
- return vpExpression[i];
- }
+ const string &fieldName) const {
+ const size_t n = vFieldName.size();
+ for(size_t i = 0; i < n; ++i) {
+ if (fieldName.compare(vFieldName[i]) == 0)
+ return vpExpression[i];
+ }
- /* if we got here, we didn't find it */
- return intrusive_ptr<Expression>();
+ /* if we got here, we didn't find it */
+ return intrusive_ptr<Expression>();
}
void ExpressionObject::emitPaths(
- BSONObjBuilder *pBuilder, vector<string> *pvPath) const {
- if (!path.size())
- return;
-
- /* we use these for loops */
- const size_t nField = vFieldName.size();
- const size_t nPath = pvPath->size();
-
- /*
- We can iterate over the inclusion/exclusion paths in their
- (random) set order because they don't affect the order that
- fields are listed in the result. That comes from the underlying
- Document they are fetched from.
- */
- for(set<string>::const_iterator end(path.end()),
- iter(path.begin()); iter != end; ++iter) {
-
- /* find the matching field description */
- size_t iField = 0;
- for(; iField < nField; ++iField) {
- if (iter->compare(vFieldName[iField]) == 0)
- break;
- }
-
- if (iField == nField) {
- /*
- If we didn't find a matching field description, this is the
- leaf, so add the path.
- */
- stringstream ss;
-
- for(size_t iPath = 0; iPath < nPath; ++iPath)
- ss << (*pvPath)[iPath] << ".";
- ss << *iter;
-
- pBuilder->append(ss.str(), !excludePaths);
- }
- else {
- /*
- If we found a matching field description, then we need to
- descend into the next level.
- */
- Expression *pE = vpExpression[iField].get();
- ExpressionObject *pEO = dynamic_cast<ExpressionObject *>(pE);
- assert(pEO);
-
- /*
- Add the current field name to the path being built up,
- then go down into the next level.
- */
- PathPusher pathPusher(pvPath, vFieldName[iField]);
- pEO->emitPaths(pBuilder, pvPath);
- }
- }
+ BSONObjBuilder *pBuilder, vector<string> *pvPath) const {
+ if (!path.size())
+ return;
+
+ /* we use these for loops */
+ const size_t nField = vFieldName.size();
+ const size_t nPath = pvPath->size();
+
+ /*
+ We can iterate over the inclusion/exclusion paths in their
+ (random) set order because they don't affect the order that
+ fields are listed in the result. That comes from the underlying
+ Document they are fetched from.
+ */
+ for(set<string>::const_iterator end(path.end()),
+ iter(path.begin()); iter != end; ++iter) {
+
+ /* find the matching field description */
+ size_t iField = 0;
+ for(; iField < nField; ++iField) {
+ if (iter->compare(vFieldName[iField]) == 0)
+ break;
+ }
+
+ if (iField == nField) {
+ /*
+ If we didn't find a matching field description, this is the
+ leaf, so add the path.
+ */
+ stringstream ss;
+
+ for(size_t iPath = 0; iPath < nPath; ++iPath)
+ ss << (*pvPath)[iPath] << ".";
+ ss << *iter;
+
+ pBuilder->append(ss.str(), !excludePaths);
+ }
+ else {
+ /*
+ If we found a matching field description, then we need to
+ descend into the next level.
+ */
+ Expression *pE = vpExpression[iField].get();
+ ExpressionObject *pEO = dynamic_cast<ExpressionObject *>(pE);
+ assert(pEO);
+
+ /*
+ Add the current field name to the path being built up,
+ then go down into the next level.
+ */
+ PathPusher pathPusher(pvPath, vFieldName[iField]);
+ pEO->emitPaths(pBuilder, pvPath);
+ }
+ }
}
void ExpressionObject::documentToBson(
- BSONObjBuilder *pBuilder, unsigned depth) const {
+ BSONObjBuilder *pBuilder, unsigned depth) const {
- /* emit any inclusion/exclusion paths */
- vector<string> vPath;
- emitPaths(pBuilder, &vPath);
+ /* emit any inclusion/exclusion paths */
+ vector<string> vPath;
+ emitPaths(pBuilder, &vPath);
- /* then add any expressions */
- const size_t nField = vFieldName.size();
- const set<string>::const_iterator pathEnd(path.end());
- for(size_t iField = 0; iField < nField; ++iField) {
- string fieldName(vFieldName[iField]);
+ /* then add any expressions */
+ const size_t nField = vFieldName.size();
+ const set<string>::const_iterator pathEnd(path.end());
+ for(size_t iField = 0; iField < nField; ++iField) {
+ string fieldName(vFieldName[iField]);
- /* if we already took care of this, don't repeat it */
- if (path.find(fieldName) != pathEnd)
- continue;
+ /* if we already took care of this, don't repeat it */
+ if (path.find(fieldName) != pathEnd)
+ continue;
- vpExpression[iField]->addToBsonObj(pBuilder, fieldName, depth + 1);
- }
+ vpExpression[iField]->addToBsonObj(pBuilder, fieldName, depth + 1);
+ }
}
void ExpressionObject::addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
- BSONObjBuilder objBuilder;
- documentToBson(&objBuilder, depth);
- pBuilder->append(fieldName, objBuilder.done());
+ BSONObjBuilder objBuilder;
+ documentToBson(&objBuilder, depth);
+ pBuilder->append(fieldName, objBuilder.done());
}
void ExpressionObject::addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const {
+ BSONArrayBuilder *pBuilder, unsigned depth) const {
- BSONObjBuilder objBuilder;
- documentToBson(&objBuilder, depth);
- pBuilder->append(objBuilder.done());
+ BSONObjBuilder objBuilder;
+ documentToBson(&objBuilder, depth);
+ pBuilder->append(objBuilder.done());
}
/* --------------------- ExpressionFieldPath --------------------------- */
@@ -1507,93 +1507,93 @@ namespace mongo {
}
ExpressionFieldPath::ExpressionFieldPath(
- const string &theFieldPath):
+ const string &theFieldPath):
fieldPath(theFieldPath) {
}
intrusive_ptr<Expression> ExpressionFieldPath::optimize() {
- /* nothing can be done for these */
- return intrusive_ptr<Expression>(this);
+ /* nothing can be done for these */
+ return intrusive_ptr<Expression>(this);
}
intrusive_ptr<const Value> ExpressionFieldPath::evaluatePath(
- size_t index, const size_t pathLength,
- intrusive_ptr<Document> pDocument) const {
+ size_t index, const size_t pathLength,
+ intrusive_ptr<Document> pDocument) const {
intrusive_ptr<const Value> pValue; /* the return value */
- pValue = pDocument->getValue(fieldPath.getFieldName(index));
-
- /* if the field doesn't exist, quit with an undefined value */
- if (!pValue.get())
- return Value::getUndefined();
-
- /* if we've hit the end of the path, stop */
- ++index;
- if (index >= pathLength)
- return pValue;
-
- /*
- We're diving deeper. If the value was null, return null.
- */
- BSONType type = pValue->getType();
- if ((type == Undefined) || (type == jstNULL))
- return Value::getUndefined();
-
- if (type == Object) {
- /* extract from the next level down */
- return evaluatePath(index, pathLength, pValue->getDocument());
- }
-
- if (type == Array) {
- /*
- We're going to repeat this for each member of the array,
- building up a new array as we go.
- */
- vector<intrusive_ptr<const Value> > result;
- intrusive_ptr<ValueIterator> pIter(pValue->getArray());
- while(pIter->more()) {
- intrusive_ptr<const Value> pItem(pIter->next());
- BSONType iType = pItem->getType();
- if ((iType == Undefined) || (iType == jstNULL)) {
- result.push_back(pItem);
- continue;
- }
-
- uassert(16014, str::stream() <<
- "the element \"" << fieldPath.getFieldName(index) <<
- "\" along the dotted path \"" <<
- fieldPath.getPath(false) <<
- "\" is not an object, and cannot be navigated",
- iType == Object);
- intrusive_ptr<const Value> itemResult(
- evaluatePath(index, pathLength, pItem->getDocument()));
- result.push_back(itemResult);
- }
-
- return Value::createArray(result);
- }
-
- uassert(16015, str::stream() <<
- "can't navigate into value of type " << type <<
- "at \"" << fieldPath.getFieldName(index) <<
- "\" in dotted path \"" << fieldPath.getPath(false),
- false);
- return intrusive_ptr<const Value>();
+ pValue = pDocument->getValue(fieldPath.getFieldName(index));
+
+ /* if the field doesn't exist, quit with an undefined value */
+ if (!pValue.get())
+ return Value::getUndefined();
+
+ /* if we've hit the end of the path, stop */
+ ++index;
+ if (index >= pathLength)
+ return pValue;
+
+ /*
+ We're diving deeper. If the value was null, return null.
+ */
+ BSONType type = pValue->getType();
+ if ((type == Undefined) || (type == jstNULL))
+ return Value::getUndefined();
+
+ if (type == Object) {
+ /* extract from the next level down */
+ return evaluatePath(index, pathLength, pValue->getDocument());
+ }
+
+ if (type == Array) {
+ /*
+ We're going to repeat this for each member of the array,
+ building up a new array as we go.
+ */
+ vector<intrusive_ptr<const Value> > result;
+ intrusive_ptr<ValueIterator> pIter(pValue->getArray());
+ while(pIter->more()) {
+ intrusive_ptr<const Value> pItem(pIter->next());
+ BSONType iType = pItem->getType();
+ if ((iType == Undefined) || (iType == jstNULL)) {
+ result.push_back(pItem);
+ continue;
+ }
+
+ uassert(16014, str::stream() <<
+ "the element \"" << fieldPath.getFieldName(index) <<
+ "\" along the dotted path \"" <<
+ fieldPath.getPath(false) <<
+ "\" is not an object, and cannot be navigated",
+ iType == Object);
+ intrusive_ptr<const Value> itemResult(
+ evaluatePath(index, pathLength, pItem->getDocument()));
+ result.push_back(itemResult);
+ }
+
+ return Value::createArray(result);
+ }
+
+ uassert(16015, str::stream() <<
+ "can't navigate into value of type " << type <<
+ "at \"" << fieldPath.getFieldName(index) <<
+ "\" in dotted path \"" << fieldPath.getPath(false),
+ false);
+ return intrusive_ptr<const Value>();
}
intrusive_ptr<const Value> ExpressionFieldPath::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- return evaluatePath(0, fieldPath.getPathLength(), pDocument);
+ return evaluatePath(0, fieldPath.getPathLength(), pDocument);
}
void ExpressionFieldPath::addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
- pBuilder->append(fieldName, fieldPath.getPath(true));
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
+ pBuilder->append(fieldName, fieldPath.getPath(true));
}
void ExpressionFieldPath::addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const {
- pBuilder->append(getFieldPath(true));
+ BSONArrayBuilder *pBuilder, unsigned depth) const {
+ pBuilder->append(getFieldPath(true));
}
/* --------------------- ExpressionFieldPath --------------------------- */
@@ -1602,310 +1602,310 @@ namespace mongo {
}
intrusive_ptr<Expression> ExpressionFieldRange::optimize() {
- /* if there is no range to match, this will never evaluate true */
- if (!pRange.get())
- return ExpressionConstant::create(Value::getFalse());
-
- /*
- If we ended up with a double un-ended range, anything matches. I
- don't know how that can happen, given intersect()'s interface, but
- here it is, just in case.
- */
- if (!pRange->pBottom.get() && !pRange->pTop.get())
- return ExpressionConstant::create(Value::getTrue());
-
- /*
- In all other cases, we have to test candidate values. The
- intersect() method has already optimized those tests, so there
- aren't any more optimizations to look for here.
- */
- return intrusive_ptr<Expression>(this);
+ /* if there is no range to match, this will never evaluate true */
+ if (!pRange.get())
+ return ExpressionConstant::create(Value::getFalse());
+
+ /*
+ If we ended up with a double un-ended range, anything matches. I
+ don't know how that can happen, given intersect()'s interface, but
+ here it is, just in case.
+ */
+ if (!pRange->pBottom.get() && !pRange->pTop.get())
+ return ExpressionConstant::create(Value::getTrue());
+
+ /*
+ In all other cases, we have to test candidate values. The
+ intersect() method has already optimized those tests, so there
+ aren't any more optimizations to look for here.
+ */
+ return intrusive_ptr<Expression>(this);
}
intrusive_ptr<const Value> ExpressionFieldRange::evaluate(
- const intrusive_ptr<Document> &pDocument) const {
- /* if there's no range, there can't be a match */
- if (!pRange.get())
- return Value::getFalse();
+ const intrusive_ptr<Document> &pDocument) const {
+ /* if there's no range, there can't be a match */
+ if (!pRange.get())
+ return Value::getFalse();
- /* get the value of the specified field */
- intrusive_ptr<const Value> pValue(pFieldPath->evaluate(pDocument));
+ /* get the value of the specified field */
+ intrusive_ptr<const Value> pValue(pFieldPath->evaluate(pDocument));
- /* see if it fits within any of the ranges */
- if (pRange->contains(pValue))
- return Value::getTrue();
+ /* see if it fits within any of the ranges */
+ if (pRange->contains(pValue))
+ return Value::getTrue();
- return Value::getFalse();
+ return Value::getFalse();
}
void ExpressionFieldRange::addToBson(
- Builder *pBuilder, unsigned depth) const {
- if (!pRange.get()) {
- /* nothing will satisfy this predicate */
- pBuilder->append(false);
- return;
- }
-
- if (!pRange->pTop.get() && !pRange->pBottom.get()) {
- /* any value will satisfy this predicate */
- pBuilder->append(true);
- return;
- }
-
- if (pRange->pTop.get() == pRange->pBottom.get()) {
- BSONArrayBuilder operands;
- pFieldPath->addToBsonArray(&operands, depth);
- pRange->pTop->addToBsonArray(&operands);
-
- BSONObjBuilder equals;
- equals.append("$eq", operands.arr());
- pBuilder->append(&equals);
- return;
- }
-
- BSONObjBuilder leftOperator;
- if (pRange->pBottom.get()) {
- BSONArrayBuilder leftOperands;
- pFieldPath->addToBsonArray(&leftOperands, depth);
- pRange->pBottom->addToBsonArray(&leftOperands);
- leftOperator.append(
- (pRange->bottomOpen ? "$gt" : "$gte"),
- leftOperands.arr());
-
- if (!pRange->pTop.get()) {
- pBuilder->append(&leftOperator);
- return;
- }
- }
-
- BSONObjBuilder rightOperator;
- if (pRange->pTop.get()) {
- BSONArrayBuilder rightOperands;
- pFieldPath->addToBsonArray(&rightOperands, depth);
- pRange->pTop->addToBsonArray(&rightOperands);
- rightOperator.append(
- (pRange->topOpen ? "$lt" : "$lte"),
- rightOperands.arr());
-
- if (!pRange->pBottom.get()) {
- pBuilder->append(&rightOperator);
- return;
- }
- }
-
- BSONArrayBuilder andOperands;
- andOperands.append(leftOperator.done());
- andOperands.append(rightOperator.done());
- BSONObjBuilder andOperator;
- andOperator.append("$and", andOperands.arr());
- pBuilder->append(&andOperator);
+ Builder *pBuilder, unsigned depth) const {
+ if (!pRange.get()) {
+ /* nothing will satisfy this predicate */
+ pBuilder->append(false);
+ return;
+ }
+
+ if (!pRange->pTop.get() && !pRange->pBottom.get()) {
+ /* any value will satisfy this predicate */
+ pBuilder->append(true);
+ return;
+ }
+
+ if (pRange->pTop.get() == pRange->pBottom.get()) {
+ BSONArrayBuilder operands;
+ pFieldPath->addToBsonArray(&operands, depth);
+ pRange->pTop->addToBsonArray(&operands);
+
+ BSONObjBuilder equals;
+ equals.append("$eq", operands.arr());
+ pBuilder->append(&equals);
+ return;
+ }
+
+ BSONObjBuilder leftOperator;
+ if (pRange->pBottom.get()) {
+ BSONArrayBuilder leftOperands;
+ pFieldPath->addToBsonArray(&leftOperands, depth);
+ pRange->pBottom->addToBsonArray(&leftOperands);
+ leftOperator.append(
+ (pRange->bottomOpen ? "$gt" : "$gte"),
+ leftOperands.arr());
+
+ if (!pRange->pTop.get()) {
+ pBuilder->append(&leftOperator);
+ return;
+ }
+ }
+
+ BSONObjBuilder rightOperator;
+ if (pRange->pTop.get()) {
+ BSONArrayBuilder rightOperands;
+ pFieldPath->addToBsonArray(&rightOperands, depth);
+ pRange->pTop->addToBsonArray(&rightOperands);
+ rightOperator.append(
+ (pRange->topOpen ? "$lt" : "$lte"),
+ rightOperands.arr());
+
+ if (!pRange->pBottom.get()) {
+ pBuilder->append(&rightOperator);
+ return;
+ }
+ }
+
+ BSONArrayBuilder andOperands;
+ andOperands.append(leftOperator.done());
+ andOperands.append(rightOperator.done());
+ BSONObjBuilder andOperator;
+ andOperator.append("$and", andOperands.arr());
+ pBuilder->append(&andOperator);
}
void ExpressionFieldRange::addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
- BuilderObj builder(pBuilder, fieldName);
- addToBson(&builder, depth);
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
+ BuilderObj builder(pBuilder, fieldName);
+ addToBson(&builder, depth);
}
void ExpressionFieldRange::addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const {
- BuilderArray builder(pBuilder);
- addToBson(&builder, depth);
+ BSONArrayBuilder *pBuilder, unsigned depth) const {
+ BuilderArray builder(pBuilder);
+ addToBson(&builder, depth);
}
void ExpressionFieldRange::toMatcherBson(
- BSONObjBuilder *pBuilder, unsigned depth) const {
- assert(pRange.get()); // otherwise, we can't do anything
-
- /* if there are no endpoints, then every value is accepted */
- if (!pRange->pBottom.get() && !pRange->pTop.get())
- return; // nothing to add to the predicate
-
- /* we're going to need the field path */
- string fieldPath(pFieldPath->getFieldPath(false));
-
- BSONObjBuilder range;
- if (pRange->pBottom.get()) {
- /* the test for equality doesn't generate a subobject */
- if (pRange->pBottom.get() == pRange->pTop.get()) {
- pRange->pBottom->addToBsonObj(pBuilder, fieldPath);
- return;
- }
+ BSONObjBuilder *pBuilder, unsigned depth) const {
+ assert(pRange.get()); // otherwise, we can't do anything
+
+ /* if there are no endpoints, then every value is accepted */
+ if (!pRange->pBottom.get() && !pRange->pTop.get())
+ return; // nothing to add to the predicate
+
+ /* we're going to need the field path */
+ string fieldPath(pFieldPath->getFieldPath(false));
+
+ BSONObjBuilder range;
+ if (pRange->pBottom.get()) {
+ /* the test for equality doesn't generate a subobject */
+ if (pRange->pBottom.get() == pRange->pTop.get()) {
+ pRange->pBottom->addToBsonObj(pBuilder, fieldPath);
+ return;
+ }
- pRange->pBottom->addToBsonObj(
- pBuilder, (pRange->bottomOpen ? "$gt" : "$gte"));
- }
+ pRange->pBottom->addToBsonObj(
+ pBuilder, (pRange->bottomOpen ? "$gt" : "$gte"));
+ }
- if (pRange->pTop.get()) {
- pRange->pTop->addToBsonObj(
- pBuilder, (pRange->topOpen ? "$lt" : "$lte"));
- }
+ if (pRange->pTop.get()) {
+ pRange->pTop->addToBsonObj(
+ pBuilder, (pRange->topOpen ? "$lt" : "$lte"));
+ }
- pBuilder->append(fieldPath, range.done());
+ pBuilder->append(fieldPath, range.done());
}
intrusive_ptr<ExpressionFieldRange> ExpressionFieldRange::create(
- const intrusive_ptr<ExpressionFieldPath> &pFieldPath, CmpOp cmpOp,
- const intrusive_ptr<const Value> &pValue) {
- intrusive_ptr<ExpressionFieldRange> pE(
- new ExpressionFieldRange(pFieldPath, cmpOp, pValue));
- return pE;
+ const intrusive_ptr<ExpressionFieldPath> &pFieldPath, CmpOp cmpOp,
+ const intrusive_ptr<const Value> &pValue) {
+ intrusive_ptr<ExpressionFieldRange> pE(
+ new ExpressionFieldRange(pFieldPath, cmpOp, pValue));
+ return pE;
}
ExpressionFieldRange::ExpressionFieldRange(
- const intrusive_ptr<ExpressionFieldPath> &pTheFieldPath, CmpOp cmpOp,
- const intrusive_ptr<const Value> &pValue):
+ const intrusive_ptr<ExpressionFieldPath> &pTheFieldPath, CmpOp cmpOp,
+ const intrusive_ptr<const Value> &pValue):
pFieldPath(pTheFieldPath),
- pRange(new Range(cmpOp, pValue)) {
+ pRange(new Range(cmpOp, pValue)) {
}
void ExpressionFieldRange::intersect(
- CmpOp cmpOp, const intrusive_ptr<const Value> &pValue) {
+ CmpOp cmpOp, const intrusive_ptr<const Value> &pValue) {
- /* create the new range */
- scoped_ptr<Range> pNew(new Range(cmpOp, pValue));
+ /* create the new range */
+ scoped_ptr<Range> pNew(new Range(cmpOp, pValue));
- /*
- Go through the range list. For every range, either add the
- intersection of that to the range list, or if there is none, the
- original range. This has the effect of restricting overlapping
- ranges, but leaving non-overlapping ones as-is.
- */
- pRange.reset(pRange->intersect(pNew.get()));
+ /*
+ Go through the range list. For every range, either add the
+ intersection of that to the range list, or if there is none, the
+ original range. This has the effect of restricting overlapping
+ ranges, but leaving non-overlapping ones as-is.
+ */
+ pRange.reset(pRange->intersect(pNew.get()));
}
ExpressionFieldRange::Range::Range(
- CmpOp cmpOp, const intrusive_ptr<const Value> &pValue):
- bottomOpen(false),
- topOpen(false),
- pBottom(),
- pTop() {
- switch(cmpOp) {
- case NE:
- bottomOpen = topOpen = true;
- /* FALLTHROUGH */
- case EQ:
- pBottom = pTop = pValue;
- break;
-
- case GT:
- bottomOpen = true;
- /* FALLTHROUGH */
- case GTE:
- topOpen = true;
- pBottom = pValue;
- break;
-
- case LT:
- topOpen = true;
- /* FALLTHROUGH */
- case LTE:
- bottomOpen = true;
- pTop = pValue;
- break;
-
- case CMP:
- assert(false); // not allowed
- break;
- }
+ CmpOp cmpOp, const intrusive_ptr<const Value> &pValue):
+ bottomOpen(false),
+ topOpen(false),
+ pBottom(),
+ pTop() {
+ switch(cmpOp) {
+ case NE:
+ bottomOpen = topOpen = true;
+ /* FALLTHROUGH */
+ case EQ:
+ pBottom = pTop = pValue;
+ break;
+
+ case GT:
+ bottomOpen = true;
+ /* FALLTHROUGH */
+ case GTE:
+ topOpen = true;
+ pBottom = pValue;
+ break;
+
+ case LT:
+ topOpen = true;
+ /* FALLTHROUGH */
+ case LTE:
+ bottomOpen = true;
+ pTop = pValue;
+ break;
+
+ case CMP:
+ assert(false); // not allowed
+ break;
+ }
}
ExpressionFieldRange::Range::Range(const Range &rRange):
- bottomOpen(rRange.bottomOpen),
- topOpen(rRange.topOpen),
- pBottom(rRange.pBottom),
- pTop(rRange.pTop) {
+ bottomOpen(rRange.bottomOpen),
+ topOpen(rRange.topOpen),
+ pBottom(rRange.pBottom),
+ pTop(rRange.pTop) {
}
ExpressionFieldRange::Range::Range(
- const intrusive_ptr<const Value> &pTheBottom, bool theBottomOpen,
- const intrusive_ptr<const Value> &pTheTop, bool theTopOpen):
- bottomOpen(theBottomOpen),
- topOpen(theTopOpen),
- pBottom(pTheBottom),
- pTop(pTheTop) {
- }
-
+ const intrusive_ptr<const Value> &pTheBottom, bool theBottomOpen,
+ const intrusive_ptr<const Value> &pTheTop, bool theTopOpen):
+ bottomOpen(theBottomOpen),
+ topOpen(theTopOpen),
+ pBottom(pTheBottom),
+ pTop(pTheTop) {
+ }
+
ExpressionFieldRange::Range *ExpressionFieldRange::Range::intersect(
- const Range *pRange) const {
- /*
- Find the max of the bottom end of the ranges.
-
- Start by assuming the maximum is from pRange. Then, if we have
- values of our own, see if they're greater.
- */
- intrusive_ptr<const Value> pMaxBottom(pRange->pBottom);
- bool maxBottomOpen = pRange->bottomOpen;
- if (pBottom.get()) {
- if (!pRange->pBottom.get()) {
- pMaxBottom = pBottom;
- maxBottomOpen = bottomOpen;
- }
- else {
- const int cmp = Value::compare(pBottom, pRange->pBottom);
- if (cmp == 0)
- maxBottomOpen = bottomOpen || pRange->bottomOpen;
- else if (cmp > 0) {
- pMaxBottom = pBottom;
- maxBottomOpen = bottomOpen;
- }
- }
- }
-
- /*
- Find the minimum of the tops of the ranges.
-
- Start by assuming the minimum is from pRange. Then, if we have
- values of our own, see if they are less.
- */
- intrusive_ptr<const Value> pMinTop(pRange->pTop);
- bool minTopOpen = pRange->topOpen;
- if (pTop.get()) {
- if (!pRange->pTop.get()) {
- pMinTop = pTop;
- minTopOpen = topOpen;
- }
- else {
- const int cmp = Value::compare(pTop, pRange->pTop);
- if (cmp == 0)
- minTopOpen = topOpen || pRange->topOpen;
- else if (cmp < 0) {
- pMinTop = pTop;
- minTopOpen = topOpen;
- }
- }
- }
-
- /*
- If the intersections didn't create a disjoint set, create the
- new range.
- */
- if (Value::compare(pMaxBottom, pMinTop) <= 0)
- return new Range(pMaxBottom, maxBottomOpen, pMinTop, minTopOpen);
-
- /* if we got here, the intersection is empty */
- return NULL;
+ const Range *pRange) const {
+ /*
+ Find the max of the bottom end of the ranges.
+
+ Start by assuming the maximum is from pRange. Then, if we have
+ values of our own, see if they're greater.
+ */
+ intrusive_ptr<const Value> pMaxBottom(pRange->pBottom);
+ bool maxBottomOpen = pRange->bottomOpen;
+ if (pBottom.get()) {
+ if (!pRange->pBottom.get()) {
+ pMaxBottom = pBottom;
+ maxBottomOpen = bottomOpen;
+ }
+ else {
+ const int cmp = Value::compare(pBottom, pRange->pBottom);
+ if (cmp == 0)
+ maxBottomOpen = bottomOpen || pRange->bottomOpen;
+ else if (cmp > 0) {
+ pMaxBottom = pBottom;
+ maxBottomOpen = bottomOpen;
+ }
+ }
+ }
+
+ /*
+ Find the minimum of the tops of the ranges.
+
+ Start by assuming the minimum is from pRange. Then, if we have
+ values of our own, see if they are less.
+ */
+ intrusive_ptr<const Value> pMinTop(pRange->pTop);
+ bool minTopOpen = pRange->topOpen;
+ if (pTop.get()) {
+ if (!pRange->pTop.get()) {
+ pMinTop = pTop;
+ minTopOpen = topOpen;
+ }
+ else {
+ const int cmp = Value::compare(pTop, pRange->pTop);
+ if (cmp == 0)
+ minTopOpen = topOpen || pRange->topOpen;
+ else if (cmp < 0) {
+ pMinTop = pTop;
+ minTopOpen = topOpen;
+ }
+ }
+ }
+
+ /*
+ If the intersections didn't create a disjoint set, create the
+ new range.
+ */
+ if (Value::compare(pMaxBottom, pMinTop) <= 0)
+ return new Range(pMaxBottom, maxBottomOpen, pMinTop, minTopOpen);
+
+ /* if we got here, the intersection is empty */
+ return NULL;
}
bool ExpressionFieldRange::Range::contains(
- const intrusive_ptr<const Value> &pValue) const {
- if (pBottom.get()) {
- const int cmp = Value::compare(pValue, pBottom);
- if (cmp < 0)
- return false;
- if (bottomOpen && (cmp == 0))
- return false;
- }
-
- if (pTop.get()) {
- const int cmp = Value::compare(pValue, pTop);
- if (cmp > 0)
- return false;
- if (topOpen && (cmp == 0))
- return false;
- }
-
- return true;
+ const intrusive_ptr<const Value> &pValue) const {
+ if (pBottom.get()) {
+ const int cmp = Value::compare(pValue, pBottom);
+ if (cmp < 0)
+ return false;
+ if (bottomOpen && (cmp == 0))
+ return false;
+ }
+
+ if (pTop.get()) {
+ const int cmp = Value::compare(pValue, pTop);
+ if (cmp > 0)
+ return false;
+ if (topOpen && (cmp == 0))
+ return false;
+ }
+
+ return true;
}
/* ------------------------- ExpressionMinute ----------------------------- */
@@ -1923,13 +1923,13 @@ namespace mongo {
}
void ExpressionMinute::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionMinute::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pDate(vpOperand[0]->evaluate(pDocument));
tm date;
(pDate->coerceToDate()).toTm(&date);
@@ -1937,7 +1937,7 @@ namespace mongo {
}
const char *ExpressionMinute::getOpName() const {
- return "$minute";
+ return "$minute";
}
/* ----------------------- ExpressionMod ---------------------------- */
@@ -1955,23 +1955,23 @@ namespace mongo {
}
void ExpressionMod::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(2);
+ const intrusive_ptr<Expression> &pExpression) {
+ checkArgLimit(2);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionMod::evaluate(
const intrusive_ptr<Document> &pDocument) const {
BSONType productType;
- checkArgCount(2);
+ checkArgCount(2);
intrusive_ptr<const Value> pLeft(vpOperand[0]->evaluate(pDocument));
intrusive_ptr<const Value> pRight(vpOperand[1]->evaluate(pDocument));
productType = Value::getWidestNumeric(pRight->getType(), pLeft->getType());
long long right = pRight->coerceToLong();
- if (right == 0)
- return Value::getUndefined();
+ if (right == 0)
+ return Value::getUndefined();
long long left = pLeft->coerceToLong();
if (productType == NumberLong)
@@ -1980,7 +1980,7 @@ namespace mongo {
}
const char *ExpressionMod::getOpName() const {
- return "$mod";
+ return "$mod";
}
/* ------------------------- ExpressionMonth ----------------------------- */
@@ -1998,13 +1998,13 @@ namespace mongo {
}
void ExpressionMonth::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionMonth::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pDate(vpOperand[0]->evaluate(pDocument));
tm date;
(pDate->coerceToDate()).toTm(&date);
@@ -2012,7 +2012,7 @@ namespace mongo {
}
const char *ExpressionMonth::getOpName() const {
- return "$month";
+ return "$month";
}
/* ------------------------- ExpressionMultiply ----------------------------- */
@@ -2080,13 +2080,13 @@ namespace mongo {
}
void ExpressionHour::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionHour::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pDate(vpOperand[0]->evaluate(pDocument));
tm date;
(pDate->coerceToDate()).toTm(&date);
@@ -2094,7 +2094,7 @@ namespace mongo {
}
const char *ExpressionHour::getOpName() const {
- return "$hour";
+ return "$hour";
}
/* ----------------------- ExpressionIfNull ---------------------------- */
@@ -2112,16 +2112,16 @@ namespace mongo {
}
void ExpressionIfNull::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(2);
+ const intrusive_ptr<Expression> &pExpression) {
+ checkArgLimit(2);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionIfNull::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(2);
+ checkArgCount(2);
intrusive_ptr<const Value> pLeft(vpOperand[0]->evaluate(pDocument));
- BSONType leftType = pLeft->getType();
+ BSONType leftType = pLeft->getType();
if ((leftType != Undefined) && (leftType != jstNULL))
return pLeft;
@@ -2131,7 +2131,7 @@ namespace mongo {
}
const char *ExpressionIfNull::getOpName() const {
- return "$ifNull";
+ return "$ifNull";
}
/* ------------------------ ExpressionNary ----------------------------- */
@@ -2141,135 +2141,135 @@ namespace mongo {
}
intrusive_ptr<Expression> ExpressionNary::optimize() {
- unsigned constCount = 0; // count of constant operands
- unsigned stringCount = 0; // count of constant string operands
- const size_t n = vpOperand.size();
- for(size_t i = 0; i < n; ++i) {
- intrusive_ptr<Expression> pNew(vpOperand[i]->optimize());
-
- /* subsitute the optimized expression */
- vpOperand[i] = pNew;
-
- /* check to see if the result was a constant */
- const ExpressionConstant *pConst =
- dynamic_cast<ExpressionConstant *>(pNew.get());
- if (pConst) {
- ++constCount;
- if (pConst->getValue()->getType() == String)
- ++stringCount;
- }
- }
-
- /*
- If all the operands are constant, we can replace this expression
- with a constant. We can find the value by evaluating this
- expression over a NULL Document because evaluating the
- ExpressionConstant never refers to the argument Document.
- */
- if (constCount == n) {
- intrusive_ptr<const Value> pResult(
- evaluate(intrusive_ptr<Document>()));
- intrusive_ptr<Expression> pReplacement(
- ExpressionConstant::create(pResult));
- return pReplacement;
- }
-
- /*
- If there are any strings, we can't re-arrange anything, so stop
- now.
-
- LATER: we could concatenate adjacent strings as a special case.
- */
- if (stringCount)
- return intrusive_ptr<Expression>(this);
-
- /*
- If there's no more than one constant, then we can't do any
- constant folding, so don't bother going any further.
- */
- if (constCount <= 1)
- return intrusive_ptr<Expression>(this);
-
- /*
- If the operator isn't commutative or associative, there's nothing
- more we can do. We test that by seeing if we can get a factory;
- if we can, we can use it to construct a temporary expression which
- we'll evaluate to collapse as many constants as we can down to
- a single one.
- */
- intrusive_ptr<ExpressionNary> (*const pFactory)() = getFactory();
- if (!pFactory)
- return intrusive_ptr<Expression>(this);
-
- /*
- Create a new Expression that will be the replacement for this one.
- We actually create two: one to hold constant expressions, and
- one to hold non-constants. Once we've got these, we evaluate
- the constant expression to produce a single value, as above.
- We then add this operand to the end of the non-constant expression,
- and return that.
- */
- intrusive_ptr<ExpressionNary> pNew((*pFactory)());
- intrusive_ptr<ExpressionNary> pConst((*pFactory)());
- for(size_t i = 0; i < n; ++i) {
- intrusive_ptr<Expression> pE(vpOperand[i]);
- if (dynamic_cast<ExpressionConstant *>(pE.get()))
- pConst->addOperand(pE);
- else {
- /*
- If the child operand is the same type as this, then we can
- extract its operands and inline them here because we already
- know this is commutative and associative because it has a
- factory. We can detect sameness of the child operator by
- checking for equality of the factory
-
- Note we don't have to do this recursively, because we
- called optimize() on all the children first thing in
- this call to optimize().
- */
- ExpressionNary *pNary =
- dynamic_cast<ExpressionNary *>(pE.get());
- if (!pNary)
- pNew->addOperand(pE);
- else {
- intrusive_ptr<ExpressionNary> (*const pChildFactory)() =
- pNary->getFactory();
- if (pChildFactory != pFactory)
- pNew->addOperand(pE);
- else {
- /* same factory, so flatten */
- size_t nChild = pNary->vpOperand.size();
- for(size_t iChild = 0; iChild < nChild; ++iChild) {
- intrusive_ptr<Expression> pCE(
- pNary->vpOperand[iChild]);
- if (dynamic_cast<ExpressionConstant *>(pCE.get()))
- pConst->addOperand(pCE);
- else
- pNew->addOperand(pCE);
- }
- }
- }
- }
- }
-
- /*
- If there was only one constant, add it to the end of the expression
- operand vector.
- */
- if (pConst->vpOperand.size() == 1)
- pNew->addOperand(pConst->vpOperand[0]);
- else if (pConst->vpOperand.size() > 1) {
- /*
- If there was more than one constant, collapse all the constants
- together before adding the result to the end of the expression
- operand vector.
- */
- intrusive_ptr<const Value> pResult(
- pConst->evaluate(intrusive_ptr<Document>()));
- pNew->addOperand(ExpressionConstant::create(pResult));
- }
-
- return pNew;
+ unsigned constCount = 0; // count of constant operands
+ unsigned stringCount = 0; // count of constant string operands
+ const size_t n = vpOperand.size();
+ for(size_t i = 0; i < n; ++i) {
+ intrusive_ptr<Expression> pNew(vpOperand[i]->optimize());
+
+ /* subsitute the optimized expression */
+ vpOperand[i] = pNew;
+
+ /* check to see if the result was a constant */
+ const ExpressionConstant *pConst =
+ dynamic_cast<ExpressionConstant *>(pNew.get());
+ if (pConst) {
+ ++constCount;
+ if (pConst->getValue()->getType() == String)
+ ++stringCount;
+ }
+ }
+
+ /*
+ If all the operands are constant, we can replace this expression
+ with a constant. We can find the value by evaluating this
+ expression over a NULL Document because evaluating the
+ ExpressionConstant never refers to the argument Document.
+ */
+ if (constCount == n) {
+ intrusive_ptr<const Value> pResult(
+ evaluate(intrusive_ptr<Document>()));
+ intrusive_ptr<Expression> pReplacement(
+ ExpressionConstant::create(pResult));
+ return pReplacement;
+ }
+
+ /*
+ If there are any strings, we can't re-arrange anything, so stop
+ now.
+
+ LATER: we could concatenate adjacent strings as a special case.
+ */
+ if (stringCount)
+ return intrusive_ptr<Expression>(this);
+
+ /*
+ If there's no more than one constant, then we can't do any
+ constant folding, so don't bother going any further.
+ */
+ if (constCount <= 1)
+ return intrusive_ptr<Expression>(this);
+
+ /*
+ If the operator isn't commutative or associative, there's nothing
+ more we can do. We test that by seeing if we can get a factory;
+ if we can, we can use it to construct a temporary expression which
+ we'll evaluate to collapse as many constants as we can down to
+ a single one.
+ */
+ intrusive_ptr<ExpressionNary> (*const pFactory)() = getFactory();
+ if (!pFactory)
+ return intrusive_ptr<Expression>(this);
+
+ /*
+ Create a new Expression that will be the replacement for this one.
+ We actually create two: one to hold constant expressions, and
+ one to hold non-constants. Once we've got these, we evaluate
+ the constant expression to produce a single value, as above.
+ We then add this operand to the end of the non-constant expression,
+ and return that.
+ */
+ intrusive_ptr<ExpressionNary> pNew((*pFactory)());
+ intrusive_ptr<ExpressionNary> pConst((*pFactory)());
+ for(size_t i = 0; i < n; ++i) {
+ intrusive_ptr<Expression> pE(vpOperand[i]);
+ if (dynamic_cast<ExpressionConstant *>(pE.get()))
+ pConst->addOperand(pE);
+ else {
+ /*
+ If the child operand is the same type as this, then we can
+ extract its operands and inline them here because we already
+ know this is commutative and associative because it has a
+ factory. We can detect sameness of the child operator by
+ checking for equality of the factory
+
+ Note we don't have to do this recursively, because we
+ called optimize() on all the children first thing in
+ this call to optimize().
+ */
+ ExpressionNary *pNary =
+ dynamic_cast<ExpressionNary *>(pE.get());
+ if (!pNary)
+ pNew->addOperand(pE);
+ else {
+ intrusive_ptr<ExpressionNary> (*const pChildFactory)() =
+ pNary->getFactory();
+ if (pChildFactory != pFactory)
+ pNew->addOperand(pE);
+ else {
+ /* same factory, so flatten */
+ size_t nChild = pNary->vpOperand.size();
+ for(size_t iChild = 0; iChild < nChild; ++iChild) {
+ intrusive_ptr<Expression> pCE(
+ pNary->vpOperand[iChild]);
+ if (dynamic_cast<ExpressionConstant *>(pCE.get()))
+ pConst->addOperand(pCE);
+ else
+ pNew->addOperand(pCE);
+ }
+ }
+ }
+ }
+ }
+
+ /*
+ If there was only one constant, add it to the end of the expression
+ operand vector.
+ */
+ if (pConst->vpOperand.size() == 1)
+ pNew->addOperand(pConst->vpOperand[0]);
+ else if (pConst->vpOperand.size() > 1) {
+ /*
+ If there was more than one constant, collapse all the constants
+ together before adding the result to the end of the expression
+ operand vector.
+ */
+ intrusive_ptr<const Value> pResult(
+ pConst->evaluate(intrusive_ptr<Document>()));
+ pNew->addOperand(ExpressionConstant::create(pResult));
+ }
+
+ return pNew;
}
void ExpressionNary::addOperand(
@@ -2278,52 +2278,52 @@ namespace mongo {
}
intrusive_ptr<ExpressionNary> (*ExpressionNary::getFactory() const)() {
- return NULL;
+ return NULL;
}
void ExpressionNary::toBson(
- BSONObjBuilder *pBuilder, const char *pOpName, unsigned depth) const {
- const size_t nOperand = vpOperand.size();
- assert(nOperand > 0);
- if (nOperand == 1) {
- vpOperand[0]->addToBsonObj(pBuilder, pOpName, depth + 1);
- return;
- }
+ BSONObjBuilder *pBuilder, const char *pOpName, unsigned depth) const {
+ const size_t nOperand = vpOperand.size();
+ assert(nOperand > 0);
+ if (nOperand == 1) {
+ vpOperand[0]->addToBsonObj(pBuilder, pOpName, depth + 1);
+ return;
+ }
- /* build up the array */
- BSONArrayBuilder arrBuilder;
- for(size_t i = 0; i < nOperand; ++i)
- vpOperand[i]->addToBsonArray(&arrBuilder, depth + 1);
+ /* build up the array */
+ BSONArrayBuilder arrBuilder;
+ for(size_t i = 0; i < nOperand; ++i)
+ vpOperand[i]->addToBsonArray(&arrBuilder, depth + 1);
- pBuilder->append(pOpName, arrBuilder.arr());
+ pBuilder->append(pOpName, arrBuilder.arr());
}
void ExpressionNary::addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
- BSONObjBuilder exprBuilder;
- toBson(&exprBuilder, getOpName(), depth);
- pBuilder->append(fieldName, exprBuilder.done());
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const {
+ BSONObjBuilder exprBuilder;
+ toBson(&exprBuilder, getOpName(), depth);
+ pBuilder->append(fieldName, exprBuilder.done());
}
void ExpressionNary::addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const {
- BSONObjBuilder exprBuilder;
- toBson(&exprBuilder, getOpName(), depth);
- pBuilder->append(exprBuilder.done());
+ BSONArrayBuilder *pBuilder, unsigned depth) const {
+ BSONObjBuilder exprBuilder;
+ toBson(&exprBuilder, getOpName(), depth);
+ pBuilder->append(exprBuilder.done());
}
void ExpressionNary::checkArgLimit(unsigned maxArgs) const {
- uassert(15993, str::stream() << getOpName() <<
- " only takes " << maxArgs <<
- " operand" << (maxArgs == 1 ? "" : "s"),
- vpOperand.size() < maxArgs);
+ uassert(15993, str::stream() << getOpName() <<
+ " only takes " << maxArgs <<
+ " operand" << (maxArgs == 1 ? "" : "s"),
+ vpOperand.size() < maxArgs);
}
void ExpressionNary::checkArgCount(unsigned reqArgs) const {
- uassert(15997, str::stream() << getOpName() <<
- ": insufficient operands; " << reqArgs <<
- " required, only got " << vpOperand.size(),
- vpOperand.size() == reqArgs);
+ uassert(15997, str::stream() << getOpName() <<
+ ": insufficient operands; " << reqArgs <<
+ " required, only got " << vpOperand.size(),
+ vpOperand.size() == reqArgs);
}
/* ----------------------- ExpressionNoOp ------------------------------ */
@@ -2337,9 +2337,9 @@ namespace mongo {
}
intrusive_ptr<Expression> ExpressionNoOp::optimize() {
- checkArgCount(1);
- intrusive_ptr<Expression> pR(vpOperand[0]->optimize());
- return pR;
+ checkArgCount(1);
+ intrusive_ptr<Expression> pR(vpOperand[0]->optimize());
+ return pR;
}
ExpressionNoOp::ExpressionNoOp():
@@ -2347,19 +2347,19 @@ namespace mongo {
}
void ExpressionNoOp::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionNoOp::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pValue(vpOperand[0]->evaluate(pDocument));
- return pValue;
+ return pValue;
}
const char *ExpressionNoOp::getOpName() const {
- return "$noOp";
+ return "$noOp";
}
/* ------------------------- ExpressionNot ----------------------------- */
@@ -2377,13 +2377,13 @@ namespace mongo {
}
void ExpressionNot::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionNot::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pOp(vpOperand[0]->evaluate(pDocument));
bool b = pOp->coerceToBool();
@@ -2393,7 +2393,7 @@ namespace mongo {
}
const char *ExpressionNot::getOpName() const {
- return "$not";
+ return "$not";
}
/* -------------------------- ExpressionOr ----------------------------- */
@@ -2423,72 +2423,72 @@ namespace mongo {
}
void ExpressionOr::toMatcherBson(
- BSONObjBuilder *pBuilder, unsigned depth) const {
- BSONObjBuilder opArray;
- const size_t n = vpOperand.size();
- for(size_t i = 0; i < n; ++i)
- vpOperand[i]->toMatcherBson(&opArray, depth + 1);
+ BSONObjBuilder *pBuilder, unsigned depth) const {
+ BSONObjBuilder opArray;
+ const size_t n = vpOperand.size();
+ for(size_t i = 0; i < n; ++i)
+ vpOperand[i]->toMatcherBson(&opArray, depth + 1);
- pBuilder->append("$or", opArray.done());
+ pBuilder->append("$or", opArray.done());
}
intrusive_ptr<ExpressionNary> (*ExpressionOr::getFactory() const)() {
- return ExpressionOr::create;
+ return ExpressionOr::create;
}
intrusive_ptr<Expression> ExpressionOr::optimize() {
- /* optimize the disjunction as much as possible */
- intrusive_ptr<Expression> pE(ExpressionNary::optimize());
-
- /* if the result isn't a conjunction, we can't do anything */
- ExpressionOr *pOr = dynamic_cast<ExpressionOr *>(pE.get());
- if (!pOr)
- return pE;
-
- /*
- Check the last argument on the result; if it's not constant (as
- promised by ExpressionNary::optimize(),) then there's nothing
- we can do.
- */
- const size_t n = pOr->vpOperand.size();
- intrusive_ptr<Expression> pLast(pOr->vpOperand[n - 1]);
- const ExpressionConstant *pConst =
- dynamic_cast<ExpressionConstant *>(pLast.get());
- if (!pConst)
- return pE;
-
- /*
- Evaluate and coerce the last argument to a boolean. If it's true,
- then we can replace this entire expression.
- */
- bool last = pLast->evaluate(intrusive_ptr<Document>())->coerceToBool();
- if (last) {
- intrusive_ptr<ExpressionConstant> pFinal(
- ExpressionConstant::create(Value::getTrue()));
- return pFinal;
- }
-
- /*
- If we got here, the final operand was false, so we don't need it
- anymore. If there was only one other operand, we don't need the
- conjunction either. Note we still need to keep the promise that
- the result will be a boolean.
- */
- if (n == 2) {
- intrusive_ptr<Expression> pFinal(
- ExpressionCoerceToBool::create(pOr->vpOperand[0]));
- return pFinal;
- }
-
- /*
- Remove the final "false" value, and return the new expression.
- */
- pOr->vpOperand.resize(n - 1);
- return pE;
+ /* optimize the disjunction as much as possible */
+ intrusive_ptr<Expression> pE(ExpressionNary::optimize());
+
+ /* if the result isn't a conjunction, we can't do anything */
+ ExpressionOr *pOr = dynamic_cast<ExpressionOr *>(pE.get());
+ if (!pOr)
+ return pE;
+
+ /*
+ Check the last argument on the result; if it's not constant (as
+ promised by ExpressionNary::optimize(),) then there's nothing
+ we can do.
+ */
+ const size_t n = pOr->vpOperand.size();
+ intrusive_ptr<Expression> pLast(pOr->vpOperand[n - 1]);
+ const ExpressionConstant *pConst =
+ dynamic_cast<ExpressionConstant *>(pLast.get());
+ if (!pConst)
+ return pE;
+
+ /*
+ Evaluate and coerce the last argument to a boolean. If it's true,
+ then we can replace this entire expression.
+ */
+ bool last = pLast->evaluate(intrusive_ptr<Document>())->coerceToBool();
+ if (last) {
+ intrusive_ptr<ExpressionConstant> pFinal(
+ ExpressionConstant::create(Value::getTrue()));
+ return pFinal;
+ }
+
+ /*
+ If we got here, the final operand was false, so we don't need it
+ anymore. If there was only one other operand, we don't need the
+ conjunction either. Note we still need to keep the promise that
+ the result will be a boolean.
+ */
+ if (n == 2) {
+ intrusive_ptr<Expression> pFinal(
+ ExpressionCoerceToBool::create(pOr->vpOperand[0]));
+ return pFinal;
+ }
+
+ /*
+ Remove the final "false" value, and return the new expression.
+ */
+ pOr->vpOperand.resize(n - 1);
+ return pE;
}
const char *ExpressionOr::getOpName() const {
- return "$or";
+ return "$or";
}
/* ------------------------- ExpressionSecond ----------------------------- */
@@ -2506,13 +2506,13 @@ namespace mongo {
}
void ExpressionSecond::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionSecond::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pDate(vpOperand[0]->evaluate(pDocument));
tm date;
(pDate->coerceToDate()).toTm(&date);
@@ -2520,7 +2520,7 @@ namespace mongo {
}
const char *ExpressionSecond::getOpName() const {
- return "$second";
+ return "$second";
}
/* ----------------------- ExpressionStrcasecmp ---------------------------- */
@@ -2538,14 +2538,14 @@ namespace mongo {
}
void ExpressionStrcasecmp::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(2);
+ const intrusive_ptr<Expression> &pExpression) {
+ checkArgLimit(2);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionStrcasecmp::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(2);
+ checkArgCount(2);
intrusive_ptr<const Value> pString1(vpOperand[0]->evaluate(pDocument));
intrusive_ptr<const Value> pString2(vpOperand[1]->evaluate(pDocument));
@@ -2562,7 +2562,7 @@ namespace mongo {
}
const char *ExpressionStrcasecmp::getOpName() const {
- return "$strcasecmp";
+ return "$strcasecmp";
}
/* ----------------------- ExpressionSubstr ---------------------------- */
@@ -2580,38 +2580,38 @@ namespace mongo {
}
void ExpressionSubstr::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(3);
+ const intrusive_ptr<Expression> &pExpression) {
+ checkArgLimit(3);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionSubstr::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(3);
+ checkArgCount(3);
intrusive_ptr<const Value> pString(vpOperand[0]->evaluate(pDocument));
intrusive_ptr<const Value> pLower(vpOperand[1]->evaluate(pDocument));
intrusive_ptr<const Value> pLength(vpOperand[2]->evaluate(pDocument));
string str = pString->coerceToString();
- uassert(16034, str::stream() << getOpName() <<
- ": starting index must be a numeric type (is BSON type " <<
- pLower->getType() << ")",
- (pLower->getType() == NumberInt
- || pLower->getType() == NumberLong
- || pLower->getType() == NumberDouble));
- uassert(16035, str::stream() << getOpName() <<
- ": length must be a numeric type (is BSON type " <<
- pLength->getType() << ")",
- (pLength->getType() == NumberInt
- || pLength->getType() == NumberLong
- || pLength->getType() == NumberDouble));
+ uassert(16034, str::stream() << getOpName() <<
+ ": starting index must be a numeric type (is BSON type " <<
+ pLower->getType() << ")",
+ (pLower->getType() == NumberInt
+ || pLower->getType() == NumberLong
+ || pLower->getType() == NumberDouble));
+ uassert(16035, str::stream() << getOpName() <<
+ ": length must be a numeric type (is BSON type " <<
+ pLength->getType() << ")",
+ (pLength->getType() == NumberInt
+ || pLength->getType() == NumberLong
+ || pLength->getType() == NumberDouble));
string::size_type lower = static_cast< string::size_type >( pLower->coerceToLong() );
string::size_type length = static_cast< string::size_type >( pLength->coerceToLong() );
return Value::createString( str.substr(lower, length) );
}
const char *ExpressionSubstr::getOpName() const {
- return "$substr";
+ return "$substr";
}
/* ----------------------- ExpressionSubtract ---------------------------- */
@@ -2629,15 +2629,15 @@ namespace mongo {
}
void ExpressionSubtract::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(2);
+ const intrusive_ptr<Expression> &pExpression) {
+ checkArgLimit(2);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionSubtract::evaluate(
const intrusive_ptr<Document> &pDocument) const {
BSONType productType;
- checkArgCount(2);
+ checkArgCount(2);
intrusive_ptr<const Value> pLeft(vpOperand[0]->evaluate(pDocument));
intrusive_ptr<const Value> pRight(vpOperand[1]->evaluate(pDocument));
if (pLeft->getType() == Date) {
@@ -2650,11 +2650,11 @@ namespace mongo {
return Value::createDate(Date_t(left-right));
}
- uassert(15996, "cannot subtract one date from another",
- pRight->getType() != Date);
+ uassert(15996, "cannot subtract one date from another",
+ pRight->getType() != Date);
productType = Value::getWidestNumeric(
- pRight->getType(), pLeft->getType());
+ pRight->getType(), pLeft->getType());
if (productType == NumberDouble) {
@@ -2671,7 +2671,7 @@ namespace mongo {
}
const char *ExpressionSubtract::getOpName() const {
- return "$subtract";
+ return "$subtract";
}
/* ------------------------- ExpressionToLower ----------------------------- */
@@ -2689,13 +2689,13 @@ namespace mongo {
}
void ExpressionToLower::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionToLower::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pString(vpOperand[0]->evaluate(pDocument));
string str = pString->coerceToString();
boost::to_lower(str);
@@ -2703,7 +2703,7 @@ namespace mongo {
}
const char *ExpressionToLower::getOpName() const {
- return "$toLower";
+ return "$toLower";
}
/* ------------------------- ExpressionToUpper -------------------------- */
@@ -2721,14 +2721,14 @@ namespace mongo {
}
void ExpressionToUpper::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ const intrusive_ptr<Expression> &pExpression) {
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionToUpper::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pString(vpOperand[0]->evaluate(pDocument));
string str(pString->coerceToString());
boost::to_upper(str);
@@ -2736,7 +2736,7 @@ namespace mongo {
}
const char *ExpressionToUpper::getOpName() const {
- return "$toUpper";
+ return "$toUpper";
}
/* ------------------------- ExpressionWeek ----------------------------- */
@@ -2754,13 +2754,13 @@ namespace mongo {
}
void ExpressionWeek::addOperand(const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionWeek::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pDate(vpOperand[0]->evaluate(pDocument));
tm date;
(pDate->coerceToDate()).toTm(&date);
@@ -2777,7 +2777,7 @@ namespace mongo {
}
const char *ExpressionWeek::getOpName() const {
- return "$week";
+ return "$week";
}
/* ------------------------- ExpressionYear ----------------------------- */
@@ -2795,14 +2795,14 @@ namespace mongo {
}
void ExpressionYear::addOperand(
- const intrusive_ptr<Expression> &pExpression) {
- checkArgLimit(1);
+ const intrusive_ptr<Expression> &pExpression) {
+ checkArgLimit(1);
ExpressionNary::addOperand(pExpression);
}
intrusive_ptr<const Value> ExpressionYear::evaluate(
const intrusive_ptr<Document> &pDocument) const {
- checkArgCount(1);
+ checkArgCount(1);
intrusive_ptr<const Value> pDate(vpOperand[0]->evaluate(pDocument));
tm date;
(pDate->coerceToDate()).toTm(&date);
@@ -2810,6 +2810,6 @@ namespace mongo {
}
const char *ExpressionYear::getOpName() const {
- return "$year";
+ return "$year";
}
}
diff --git a/src/mongo/db/pipeline/expression.h b/src/mongo/db/pipeline/expression.h
index c49e385a3c7..dd63df779ee 100755
--- a/src/mongo/db/pipeline/expression.h
+++ b/src/mongo/db/pipeline/expression.h
@@ -36,20 +36,20 @@ namespace mongo {
public:
virtual ~Expression() {};
- /*
- Optimize the Expression.
+ /*
+ Optimize the Expression.
- This provides an opportunity to do constant folding, or to
- collapse nested operators that have the same precedence, such as
- $add, $and, or $or.
+ This provides an opportunity to do constant folding, or to
+ collapse nested operators that have the same precedence, such as
+ $add, $and, or $or.
- The Expression should be replaced with the return value, which may
- or may not be the same object. In the case of constant folding,
- a computed expression may be replaced by a constant.
+ The Expression should be replaced with the return value, which may
+ or may not be the same object. In the case of constant folding,
+ a computed expression may be replaced by a constant.
- @returns the optimized Expression
- */
- virtual intrusive_ptr<Expression> optimize() = 0;
+ @returns the optimized Expression
+ */
+ virtual intrusive_ptr<Expression> optimize() = 0;
/*
Evaluate the Expression using the given document as input.
@@ -59,67 +59,67 @@ namespace mongo {
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const = 0;
- /*
- Add the Expression (and any descendant Expressions) into a BSON
- object that is under construction.
-
- Unevaluated Expressions always materialize as objects. Evaluation
- may produce a scalar or another object, either of which will be
- substituted inline.
-
- @param pBuilder the builder to add the expression to
- @param fieldName the name the object should be given
- */
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName,
- unsigned depth) const = 0;
-
- /*
- Add the Expression (and any descendant Expressions) into a BSON
- array that is under construction.
-
- Unevaluated Expressions always materialize as objects. Evaluation
- may produce a scalar or another object, either of which will be
- substituted inline.
-
- @param pBuilder the builder to add the expression to
- */
- virtual void addToBsonArray(BSONArrayBuilder *pBuilder,
- unsigned depth) const = 0;
-
- /*
- Convert the expression into a BSONObj that corresponds to the
- db.collection.find() predicate language. This is intended for
- use by DocumentSourceFilter.
-
- This is more limited than the full expression language supported
- by all available expressions in a DocumentSource processing
- pipeline, and will fail with an assertion if an attempt is made
- to go outside the bounds of the recognized patterns, which don't
- include full computed expressions. There are other methods available
- on DocumentSourceFilter which can be used to analyze a filter
- predicate and break it up into appropriate expressions which can
- be translated within these constraints. As a result, the default
- implementation is to fail with an assertion; only a subset of
- operators will be able to fulfill this request.
-
- @param pBuilder the builder to add the expression to.
- */
- virtual void toMatcherBson(
- BSONObjBuilder *pBuilder, unsigned depth) const;
-
- /*
- Utility class for parseObject() below.
-
- Only one array can be unwound in a processing pipeline. If the
- UNWIND_OK option is used, unwindOk() will return true, and a field
- can be declared as unwound using unwind(), after which unwindUsed()
- will return true. Only specify UNWIND_OK if it is OK to unwind an
- array in the current context.
-
- DOCUMENT_OK indicates that it is OK to use a Document in the current
- context.
- */
+ /*
+ Add the Expression (and any descendant Expressions) into a BSON
+ object that is under construction.
+
+ Unevaluated Expressions always materialize as objects. Evaluation
+ may produce a scalar or another object, either of which will be
+ substituted inline.
+
+ @param pBuilder the builder to add the expression to
+ @param fieldName the name the object should be given
+ */
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName,
+ unsigned depth) const = 0;
+
+ /*
+ Add the Expression (and any descendant Expressions) into a BSON
+ array that is under construction.
+
+ Unevaluated Expressions always materialize as objects. Evaluation
+ may produce a scalar or another object, either of which will be
+ substituted inline.
+
+ @param pBuilder the builder to add the expression to
+ */
+ virtual void addToBsonArray(BSONArrayBuilder *pBuilder,
+ unsigned depth) const = 0;
+
+ /*
+ Convert the expression into a BSONObj that corresponds to the
+ db.collection.find() predicate language. This is intended for
+ use by DocumentSourceFilter.
+
+ This is more limited than the full expression language supported
+ by all available expressions in a DocumentSource processing
+ pipeline, and will fail with an assertion if an attempt is made
+ to go outside the bounds of the recognized patterns, which don't
+ include full computed expressions. There are other methods available
+ on DocumentSourceFilter which can be used to analyze a filter
+ predicate and break it up into appropriate expressions which can
+ be translated within these constraints. As a result, the default
+ implementation is to fail with an assertion; only a subset of
+ operators will be able to fulfill this request.
+
+ @param pBuilder the builder to add the expression to.
+ */
+ virtual void toMatcherBson(
+ BSONObjBuilder *pBuilder, unsigned depth) const;
+
+ /*
+ Utility class for parseObject() below.
+
+ Only one array can be unwound in a processing pipeline. If the
+ UNWIND_OK option is used, unwindOk() will return true, and a field
+ can be declared as unwound using unwind(), after which unwindUsed()
+ will return true. Only specify UNWIND_OK if it is OK to unwind an
+ array in the current context.
+
+ DOCUMENT_OK indicates that it is OK to use a Document in the current
+ context.
+ */
class ObjectCtx {
public:
ObjectCtx(int options);
@@ -137,81 +137,81 @@ namespace mongo {
string unwindField;
};
- /*
- Parse a BSONElement Object. The object could represent a functional
- expression or a Document expression.
+ /*
+ Parse a BSONElement Object. The object could represent a functional
+ expression or a Document expression.
- @param pBsonElement the element representing the object
- @param pCtx a MiniCtx representing the options above
- @returns the parsed Expression
- */
+ @param pBsonElement the element representing the object
+ @param pCtx a MiniCtx representing the options above
+ @returns the parsed Expression
+ */
static intrusive_ptr<Expression> parseObject(
BSONElement *pBsonElement, ObjectCtx *pCtx);
- static const char unwindName[];
+ static const char unwindName[];
/*
- Parse a BSONElement Object which has already been determined to be
- functional expression.
+ Parse a BSONElement Object which has already been determined to be
+ functional expression.
- @param pOpName the name of the (prefix) operator
- @param pBsonElement the BSONElement to parse
- @returns the parsed Expression
- */
+ @param pOpName the name of the (prefix) operator
+ @param pBsonElement the BSONElement to parse
+ @returns the parsed Expression
+ */
static intrusive_ptr<Expression> parseExpression(
const char *pOpName, BSONElement *pBsonElement);
- /*
- Parse a BSONElement which is an operand in an Expression.
+ /*
+ Parse a BSONElement which is an operand in an Expression.
- @param pBsonElement the expected operand's BSONElement
- @returns the parsed operand, as an Expression
- */
+ @param pBsonElement the expected operand's BSONElement
+ @returns the parsed operand, as an Expression
+ */
static intrusive_ptr<Expression> parseOperand(
- BSONElement *pBsonElement);
-
- /*
- Produce a field path string with the field prefix removed.
-
- Throws an error if the field prefix is not present.
-
- @param prefixedField the prefixed field
- @returns the field path with the prefix removed
- */
- static string removeFieldPrefix(const string &prefixedField);
-
- /*
- Enumeration of comparison operators. These are shared between a
- few expression implementations, so they are factored out here.
-
- Any changes to these values require adjustment of the lookup
- table in the implementation.
- */
- enum CmpOp {
- EQ = 0, // return true for a == b, false otherwise
- NE = 1, // return true for a != b, false otherwise
- GT = 2, // return true for a > b, false otherwise
- GTE = 3, // return true for a >= b, false otherwise
- LT = 4, // return true for a < b, false otherwise
- LTE = 5, // return true for a <= b, false otherwise
- CMP = 6, // return -1, 0, 1 for a < b, a == b, a > b
- };
-
- static int signum(int i);
+ BSONElement *pBsonElement);
+
+ /*
+ Produce a field path string with the field prefix removed.
+
+ Throws an error if the field prefix is not present.
+
+ @param prefixedField the prefixed field
+ @returns the field path with the prefix removed
+ */
+ static string removeFieldPrefix(const string &prefixedField);
+
+ /*
+ Enumeration of comparison operators. These are shared between a
+ few expression implementations, so they are factored out here.
+
+ Any changes to these values require adjustment of the lookup
+ table in the implementation.
+ */
+ enum CmpOp {
+ EQ = 0, // return true for a == b, false otherwise
+ NE = 1, // return true for a != b, false otherwise
+ GT = 2, // return true for a > b, false otherwise
+ GTE = 3, // return true for a >= b, false otherwise
+ LT = 4, // return true for a < b, false otherwise
+ LTE = 5, // return true for a <= b, false otherwise
+ CMP = 6, // return -1, 0, 1 for a < b, a == b, a > b
+ };
+
+ static int signum(int i);
};
class ExpressionNary :
- public Expression,
+ public Expression,
public boost::enable_shared_from_this<ExpressionNary> {
public:
// virtuals from Expression
- virtual intrusive_ptr<Expression> optimize();
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
- virtual void addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const;
+ virtual intrusive_ptr<Expression> optimize();
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
+ virtual void addToBsonArray(
+ BSONArrayBuilder *pBuilder, unsigned depth) const;
/*
Add an operand to the n-ary expression.
@@ -220,75 +220,75 @@ namespace mongo {
*/
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
- /*
- Return a factory function that will make Expression nodes of
- the same type as this. This will be used to create constant
- expressions for constant folding for optimize(). Only return
- a factory function if this operator is both associative and
- commutative. The default implementation returns NULL; optimize()
- will recognize that and stop.
-
- Note that ExpressionNary::optimize() promises that if it uses this
- to fold constants, then if optimize() returns an ExpressionNary,
- any remaining constant will be the last one in vpOperand. Derived
- classes may take advantage of this to do further optimizations in
- their optimize().
-
- @returns pointer to a factory function or NULL
- */
- virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
-
- /*
- Get the name of the operator.
-
- @returns the name of the operator; this string belongs to the class
- implementation, and should not be deleted
- and should not
- */
- virtual const char *getOpName() const = 0;
+ /*
+ Return a factory function that will make Expression nodes of
+ the same type as this. This will be used to create constant
+ expressions for constant folding for optimize(). Only return
+ a factory function if this operator is both associative and
+ commutative. The default implementation returns NULL; optimize()
+ will recognize that and stop.
+
+ Note that ExpressionNary::optimize() promises that if it uses this
+ to fold constants, then if optimize() returns an ExpressionNary,
+ any remaining constant will be the last one in vpOperand. Derived
+ classes may take advantage of this to do further optimizations in
+ their optimize().
+
+ @returns pointer to a factory function or NULL
+ */
+ virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
+
+ /*
+ Get the name of the operator.
+
+ @returns the name of the operator; this string belongs to the class
+ implementation, and should not be deleted
+ and should not
+ */
+ virtual const char *getOpName() const = 0;
protected:
ExpressionNary();
vector<intrusive_ptr<Expression> > vpOperand;
- /*
- Add the expression to the builder.
+ /*
+ Add the expression to the builder.
- If there is only one operand (a unary operator), then the operand
- is added directly, without an array. For more than one operand,
- a named array is created. In both cases, the result is an object.
+ If there is only one operand (a unary operator), then the operand
+ is added directly, without an array. For more than one operand,
+ a named array is created. In both cases, the result is an object.
- @param pBuilder the (blank) builder to add the expression to
- @param pOpName the name of the operator
- */
- virtual void toBson(BSONObjBuilder *pBuilder,
- const char *pOpName, unsigned depth) const;
+ @param pBuilder the (blank) builder to add the expression to
+ @param pOpName the name of the operator
+ */
+ virtual void toBson(BSONObjBuilder *pBuilder,
+ const char *pOpName, unsigned depth) const;
- /*
- Checks the current size of vpOperand; if the size equal to or
- greater than maxArgs, fires a user assertion indicating that this
- operator cannot have this many arguments.
+ /*
+ Checks the current size of vpOperand; if the size equal to or
+ greater than maxArgs, fires a user assertion indicating that this
+ operator cannot have this many arguments.
- The equal is there because this is intended to be used in
- addOperand() to check for the limit *before* adding the requested
- argument.
+ The equal is there because this is intended to be used in
+ addOperand() to check for the limit *before* adding the requested
+ argument.
- @param maxArgs the maximum number of arguments the operator accepts
- */
- void checkArgLimit(unsigned maxArgs) const;
+ @param maxArgs the maximum number of arguments the operator accepts
+ */
+ void checkArgLimit(unsigned maxArgs) const;
- /*
- Checks the current size of vpOperand; if the size is not equal to
- reqArgs, fires a user assertion indicating that this must have
- exactly reqArgs arguments.
+ /*
+ Checks the current size of vpOperand; if the size is not equal to
+ reqArgs, fires a user assertion indicating that this must have
+ exactly reqArgs arguments.
- This is meant to be used in evaluate(), *before* the evaluation
- takes place.
+ This is meant to be used in evaluate(), *before* the evaluation
+ takes place.
- @param reqArgs the number of arguments this operator requires
- */
- void checkArgCount(unsigned reqArgs) const;
+ @param reqArgs the number of arguments this operator requires
+ */
+ void checkArgCount(unsigned reqArgs) const;
};
@@ -297,13 +297,13 @@ namespace mongo {
public:
// virtuals from Expression
virtual ~ExpressionAdd();
- virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<Expression> optimize();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
- // virtuals from ExpressionNary
- virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
+ // virtuals from ExpressionNary
+ virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
/*
Create an expression that finds the sum of n operands.
@@ -313,22 +313,22 @@ namespace mongo {
static intrusive_ptr<ExpressionNary> create();
protected:
- // virtuals from ExpressionNary
- virtual void toBson(BSONObjBuilder *pBuilder,
- const char *pOpName, unsigned depth) const;
+ // virtuals from ExpressionNary
+ virtual void toBson(BSONObjBuilder *pBuilder,
+ const char *pOpName, unsigned depth) const;
private:
ExpressionAdd();
- /*
- If the operator can be optimized, we save the original here.
+ /*
+ If the operator can be optimized, we save the original here.
- This is necessary because addition must follow its original operand
- ordering strictly if a string is detected, otherwise string
- concatenation may appear to have re-ordered the operands.
- */
- intrusive_ptr<ExpressionAdd> pAdd;
- mutable bool useOriginal;
+ This is necessary because addition must follow its original operand
+ ordering strictly if a string is detected, otherwise string
+ concatenation may appear to have re-ordered the operands.
+ */
+ intrusive_ptr<ExpressionAdd> pAdd;
+ mutable bool useOriginal;
};
@@ -337,15 +337,15 @@ namespace mongo {
public:
// virtuals from Expression
virtual ~ExpressionAnd();
- virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<Expression> optimize();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void toMatcherBson(
- BSONObjBuilder *pBuilder, unsigned depth) const;
+ virtual const char *getOpName() const;
+ virtual void toMatcherBson(
+ BSONObjBuilder *pBuilder, unsigned depth) const;
- // virtuals from ExpressionNary
- virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
+ // virtuals from ExpressionNary
+ virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
/*
Create an expression that finds the conjunction of n operands.
@@ -364,26 +364,26 @@ namespace mongo {
class ExpressionCoerceToBool :
- public Expression,
+ public Expression,
public boost::enable_shared_from_this<ExpressionCoerceToBool> {
public:
// virtuals from ExpressionNary
virtual ~ExpressionCoerceToBool();
- virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<Expression> optimize();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
- virtual void addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const;
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
+ virtual void addToBsonArray(
+ BSONArrayBuilder *pBuilder, unsigned depth) const;
static intrusive_ptr<ExpressionCoerceToBool> create(
- const intrusive_ptr<Expression> &pExpression);
+ const intrusive_ptr<Expression> &pExpression);
private:
ExpressionCoerceToBool(const intrusive_ptr<Expression> &pExpression);
- intrusive_ptr<Expression> pExpression;
+ intrusive_ptr<Expression> pExpression;
};
@@ -392,10 +392,10 @@ namespace mongo {
public:
// virtuals from ExpressionNary
virtual ~ExpressionCompare();
- virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<Expression> optimize();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
/*
@@ -415,7 +415,7 @@ namespace mongo {
static intrusive_ptr<ExpressionNary> createLte();
private:
- friend class ExpressionFieldRange;
+ friend class ExpressionFieldRange;
ExpressionCompare(CmpOp cmpOp);
CmpOp cmpOp;
@@ -429,7 +429,7 @@ namespace mongo {
virtual ~ExpressionCond();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -445,30 +445,30 @@ namespace mongo {
public:
// virtuals from Expression
virtual ~ExpressionConstant();
- virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<Expression> optimize();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
- virtual void addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const;
+ virtual const char *getOpName() const;
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
+ virtual void addToBsonArray(
+ BSONArrayBuilder *pBuilder, unsigned depth) const;
static intrusive_ptr<ExpressionConstant> createFromBsonElement(
BSONElement *pBsonElement);
- static intrusive_ptr<ExpressionConstant> create(
- const intrusive_ptr<const Value> &pValue);
+ static intrusive_ptr<ExpressionConstant> create(
+ const intrusive_ptr<const Value> &pValue);
- /*
- Get the constant value represented by this Expression.
+ /*
+ Get the constant value represented by this Expression.
- @returns the value
- */
- intrusive_ptr<const Value> getValue() const;
+ @returns the value
+ */
+ intrusive_ptr<const Value> getValue() const;
private:
ExpressionConstant(BSONElement *pBsonElement);
- ExpressionConstant(const intrusive_ptr<const Value> &pValue);
+ ExpressionConstant(const intrusive_ptr<const Value> &pValue);
intrusive_ptr<const Value> pValue;
};
@@ -481,7 +481,7 @@ namespace mongo {
virtual ~ExpressionDayOfMonth();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -498,7 +498,7 @@ namespace mongo {
virtual ~ExpressionDayOfWeek();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -515,7 +515,7 @@ namespace mongo {
virtual ~ExpressionDayOfYear();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -532,7 +532,7 @@ namespace mongo {
virtual ~ExpressionDivide();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -548,161 +548,161 @@ namespace mongo {
public:
// virtuals from Expression
virtual ~ExpressionFieldPath();
- virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<Expression> optimize();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
- virtual void addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const;
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
+ virtual void addToBsonArray(
+ BSONArrayBuilder *pBuilder, unsigned depth) const;
- /*
- Create a field path expression.
+ /*
+ Create a field path expression.
- Evaluation will extract the value associated with the given field
- path from the source document.
+ Evaluation will extract the value associated with the given field
+ path from the source document.
- @param fieldPath the field path string, without any leading document
- indicator
- @returns the newly created field path expression
- */
+ @param fieldPath the field path string, without any leading document
+ indicator
+ @returns the newly created field path expression
+ */
static intrusive_ptr<ExpressionFieldPath> create(
- const string &fieldPath);
+ const string &fieldPath);
- /*
- Return a string representation of the field path.
+ /*
+ Return a string representation of the field path.
- @param fieldPrefix whether or not to include the document field
- indicator prefix
- @returns the dot-delimited field path
- */
- string getFieldPath(bool fieldPrefix) const;
+ @param fieldPrefix whether or not to include the document field
+ indicator prefix
+ @returns the dot-delimited field path
+ */
+ string getFieldPath(bool fieldPrefix) const;
- /*
- Write a string representation of the field path to a stream.
+ /*
+ Write a string representation of the field path to a stream.
- @param the stream to write to
- @param fieldPrefix whether or not to include the document field
- indicator prefix
- */
- void writeFieldPath(ostream &outStream, bool fieldPrefix) const;
+ @param the stream to write to
+ @param fieldPrefix whether or not to include the document field
+ indicator prefix
+ */
+ void writeFieldPath(ostream &outStream, bool fieldPrefix) const;
private:
ExpressionFieldPath(const string &fieldPath);
- /*
- Internal implementation of evaluate(), used recursively.
-
- The internal implementation doesn't just use a loop because of
- the possibility that we need to skip over an array. If the path
- is "a.b.c", and a is an array, then we fan out from there, and
- traverse "b.c" for each element of a:[...]. This requires that
- a be an array of objects in order to navigate more deeply.
-
- @param index current path field index to extract
- @param pathLength maximum number of fields on field path
- @param pDocument current document traversed to (not the top-level one)
- @returns the field found; could be an array
- */
- intrusive_ptr<const Value> evaluatePath(
- size_t index, const size_t pathLength,
- intrusive_ptr<Document> pDocument) const;
-
- FieldPath fieldPath;
+ /*
+ Internal implementation of evaluate(), used recursively.
+
+ The internal implementation doesn't just use a loop because of
+ the possibility that we need to skip over an array. If the path
+ is "a.b.c", and a is an array, then we fan out from there, and
+ traverse "b.c" for each element of a:[...]. This requires that
+ a be an array of objects in order to navigate more deeply.
+
+ @param index current path field index to extract
+ @param pathLength maximum number of fields on field path
+ @param pDocument current document traversed to (not the top-level one)
+ @returns the field found; could be an array
+ */
+ intrusive_ptr<const Value> evaluatePath(
+ size_t index, const size_t pathLength,
+ intrusive_ptr<Document> pDocument) const;
+
+ FieldPath fieldPath;
};
class ExpressionFieldRange :
- public Expression,
- public boost::enable_shared_from_this<ExpressionFieldRange> {
+ public Expression,
+ public boost::enable_shared_from_this<ExpressionFieldRange> {
public:
- // virtuals from expression
+ // virtuals from expression
virtual ~ExpressionFieldRange();
- virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<Expression> optimize();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
- virtual void addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const;
- virtual void toMatcherBson(
- BSONObjBuilder *pBuilder, unsigned depth) const;
-
- /*
- Create a field range expression.
-
- Field ranges are meant to match up with classic Matcher semantics,
- and therefore are conjunctions. For example, these appear in
- mongo shell predicates in one of these forms:
- { a : C } -> (a == C) // degenerate "point" range
- { a : { $lt : C } } -> (a < C) // open range
- { a : { $gt : C1, $lte : C2 } } -> ((a > C1) && (a <= C2)) // closed
-
- When initially created, a field range only includes one end of
- the range. Additional points may be added via intersect().
-
- Note that NE and CMP are not supported.
-
- @param pFieldPath the field path for extracting the field value
- @param cmpOp the comparison operator
- @param pValue the value to compare against
- @returns the newly created field range expression
- */
- static intrusive_ptr<ExpressionFieldRange> create(
- const intrusive_ptr<ExpressionFieldPath> &pFieldPath,
- CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
-
- /*
- Add an intersecting range.
-
- This can be done any number of times after creation. The
- range is internally optimized for each new addition. If the new
- intersection extends or reduces the values within the range, the
- internal representation is adjusted to reflect that.
-
- Note that NE and CMP are not supported.
-
- @param cmpOp the comparison operator
- @param pValue the value to compare against
- */
- void intersect(CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
+ virtual void addToBsonArray(
+ BSONArrayBuilder *pBuilder, unsigned depth) const;
+ virtual void toMatcherBson(
+ BSONObjBuilder *pBuilder, unsigned depth) const;
+
+ /*
+ Create a field range expression.
+
+ Field ranges are meant to match up with classic Matcher semantics,
+ and therefore are conjunctions. For example, these appear in
+ mongo shell predicates in one of these forms:
+ { a : C } -> (a == C) // degenerate "point" range
+ { a : { $lt : C } } -> (a < C) // open range
+ { a : { $gt : C1, $lte : C2 } } -> ((a > C1) && (a <= C2)) // closed
+
+ When initially created, a field range only includes one end of
+ the range. Additional points may be added via intersect().
+
+ Note that NE and CMP are not supported.
+
+ @param pFieldPath the field path for extracting the field value
+ @param cmpOp the comparison operator
+ @param pValue the value to compare against
+ @returns the newly created field range expression
+ */
+ static intrusive_ptr<ExpressionFieldRange> create(
+ const intrusive_ptr<ExpressionFieldPath> &pFieldPath,
+ CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
+
+ /*
+ Add an intersecting range.
+
+ This can be done any number of times after creation. The
+ range is internally optimized for each new addition. If the new
+ intersection extends or reduces the values within the range, the
+ internal representation is adjusted to reflect that.
+
+ Note that NE and CMP are not supported.
+
+ @param cmpOp the comparison operator
+ @param pValue the value to compare against
+ */
+ void intersect(CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
private:
- ExpressionFieldRange(const intrusive_ptr<ExpressionFieldPath> &pFieldPath,
- CmpOp cmpOp,
- const intrusive_ptr<const Value> &pValue);
+ ExpressionFieldRange(const intrusive_ptr<ExpressionFieldPath> &pFieldPath,
+ CmpOp cmpOp,
+ const intrusive_ptr<const Value> &pValue);
- intrusive_ptr<ExpressionFieldPath> pFieldPath;
+ intrusive_ptr<ExpressionFieldPath> pFieldPath;
- class Range {
- public:
- Range(CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
- Range(const Range &rRange);
+ class Range {
+ public:
+ Range(CmpOp cmpOp, const intrusive_ptr<const Value> &pValue);
+ Range(const Range &rRange);
- Range *intersect(const Range *pRange) const;
- bool contains(const intrusive_ptr<const Value> &pValue) const;
+ Range *intersect(const Range *pRange) const;
+ bool contains(const intrusive_ptr<const Value> &pValue) const;
- Range(const intrusive_ptr<const Value> &pBottom, bool bottomOpen,
- const intrusive_ptr<const Value> &pTop, bool topOpen);
+ Range(const intrusive_ptr<const Value> &pBottom, bool bottomOpen,
+ const intrusive_ptr<const Value> &pTop, bool topOpen);
- bool bottomOpen;
- bool topOpen;
- intrusive_ptr<const Value> pBottom;
- intrusive_ptr<const Value> pTop;
- };
+ bool bottomOpen;
+ bool topOpen;
+ intrusive_ptr<const Value> pBottom;
+ intrusive_ptr<const Value> pTop;
+ };
- scoped_ptr<Range> pRange;
+ scoped_ptr<Range> pRange;
- /*
- Add to a generic Builder.
+ /*
+ Add to a generic Builder.
- The methods to append items to an object and an array differ by
- their inclusion of a field name. For more complicated objects,
- it makes sense to abstract that out and use a generic builder that
- always looks the same, and then implement addToBsonObj() and
- addToBsonArray() by using the common method.
- */
- void addToBson(Builder *pBuilder, unsigned depth) const;
+ The methods to append items to an object and an array differ by
+ their inclusion of a field name. For more complicated objects,
+ it makes sense to abstract that out and use a generic builder that
+ always looks the same, and then implement addToBsonObj() and
+ addToBsonArray() by using the common method.
+ */
+ void addToBson(Builder *pBuilder, unsigned depth) const;
};
@@ -713,7 +713,7 @@ namespace mongo {
virtual ~ExpressionHour();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -730,7 +730,7 @@ namespace mongo {
virtual ~ExpressionIfNull();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -747,7 +747,7 @@ namespace mongo {
virtual ~ExpressionMinute();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -764,7 +764,7 @@ namespace mongo {
virtual ~ExpressionMod();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -805,7 +805,7 @@ namespace mongo {
virtual ~ExpressionMonth();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -820,10 +820,10 @@ namespace mongo {
public:
// virtuals from ExpressionNary
virtual ~ExpressionNoOp();
- virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<Expression> optimize();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -840,7 +840,7 @@ namespace mongo {
virtual ~ExpressionNot();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -856,46 +856,46 @@ namespace mongo {
public:
// virtuals from Expression
virtual ~ExpressionObject();
- virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<Expression> optimize();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual void addToBsonObj(
- BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
- virtual void addToBsonArray(
- BSONArrayBuilder *pBuilder, unsigned depth) const;
-
- /*
- evaluate(), but return a Document instead of a Value-wrapped
- Document.
-
- @param pDocument the input Document
- @returns the result document
- */
- intrusive_ptr<Document> evaluateDocument(
- const intrusive_ptr<Document> &pDocument) const;
-
- /*
- evaluate(), but add the evaluated fields to a given document
- instead of creating a new one.
-
- @param pResult the Document to add the evaluated expressions to
- @param pDocument the input Document
- */
- void addToDocument(const intrusive_ptr<Document> &pResult,
- const intrusive_ptr<Document> &pDocument) const;
-
- /*
- Estimate the number of fields that will result from evaluating
- this over pDocument. Does not include _id. This is an estimate
- (really an upper bound) because we can't account for undefined
- fields without actually doing the evaluation. But this is still
- useful as an argument to Document::create(), if you plan to use
- addToDocument().
-
- @param pDocument the input document
- @returns estimated number of fields that will result
- */
- size_t getSizeHint(const intrusive_ptr<Document> &pDocument) const;
+ virtual void addToBsonObj(
+ BSONObjBuilder *pBuilder, string fieldName, unsigned depth) const;
+ virtual void addToBsonArray(
+ BSONArrayBuilder *pBuilder, unsigned depth) const;
+
+ /*
+ evaluate(), but return a Document instead of a Value-wrapped
+ Document.
+
+ @param pDocument the input Document
+ @returns the result document
+ */
+ intrusive_ptr<Document> evaluateDocument(
+ const intrusive_ptr<Document> &pDocument) const;
+
+ /*
+ evaluate(), but add the evaluated fields to a given document
+ instead of creating a new one.
+
+ @param pResult the Document to add the evaluated expressions to
+ @param pDocument the input Document
+ */
+ void addToDocument(const intrusive_ptr<Document> &pResult,
+ const intrusive_ptr<Document> &pDocument) const;
+
+ /*
+ Estimate the number of fields that will result from evaluating
+ this over pDocument. Does not include _id. This is an estimate
+ (really an upper bound) because we can't account for undefined
+ fields without actually doing the evaluation. But this is still
+ useful as an argument to Document::create(), if you plan to use
+ addToDocument().
+
+ @param pDocument the input document
+ @returns estimated number of fields that will result
+ */
+ size_t getSizeHint(const intrusive_ptr<Document> &pDocument) const;
/*
Create an empty expression. Until fields are added, this
@@ -912,96 +912,96 @@ namespace mongo {
Value in the result Document
*/
void addField(const string &fieldPath,
- const intrusive_ptr<Expression> &pExpression);
+ const intrusive_ptr<Expression> &pExpression);
- /*
- Add a field path to the set of those to be included.
+ /*
+ Add a field path to the set of those to be included.
- Note that including a nested field implies including everything on
- the path leading down to it.
+ Note that including a nested field implies including everything on
+ the path leading down to it.
- @param fieldPath the name of the field to be included
- */
- void includePath(const string &fieldPath);
+ @param fieldPath the name of the field to be included
+ */
+ void includePath(const string &fieldPath);
- /*
- Add a field path to the set of those to be excluded.
+ /*
+ Add a field path to the set of those to be excluded.
- Note that excluding a nested field implies including everything on
- the path leading down to it (because you're stating you want to see
- all the other fields that aren't being excluded).
+ Note that excluding a nested field implies including everything on
+ the path leading down to it (because you're stating you want to see
+ all the other fields that aren't being excluded).
- @param fieldName the name of the field to be excluded
- */
- void excludePath(const string &fieldPath);
+ @param fieldName the name of the field to be excluded
+ */
+ void excludePath(const string &fieldPath);
- /*
- Return the expression for a field.
+ /*
+ Return the expression for a field.
- @param fieldName the field name for the expression to return
- @returns the expression used to compute the field, if it is present,
- otherwise NULL.
- */
- intrusive_ptr<Expression> getField(const string &fieldName) const;
+ @param fieldName the field name for the expression to return
+ @returns the expression used to compute the field, if it is present,
+ otherwise NULL.
+ */
+ intrusive_ptr<Expression> getField(const string &fieldName) const;
- /*
- Get a count of the added fields.
+ /*
+ Get a count of the added fields.
- @returns how many fields have been added
- */
- size_t getFieldCount() const;
+ @returns how many fields have been added
+ */
+ size_t getFieldCount() const;
- /*
- Get a count of the exclusions.
+ /*
+ Get a count of the exclusions.
- @returns how many fields have been excluded.
- */
- size_t getExclusionCount() const;
+ @returns how many fields have been excluded.
+ */
+ size_t getExclusionCount() const;
- /*
- Specialized BSON conversion that allows for writing out a
- $project specification. This creates a standalone object, which must
- be added to a containing object with a name
+ /*
+ Specialized BSON conversion that allows for writing out a
+ $project specification. This creates a standalone object, which must
+ be added to a containing object with a name
- @param pBuilder where to write the object to
- */
- void documentToBson(BSONObjBuilder *pBuilder, unsigned depth) const;
+ @param pBuilder where to write the object to
+ */
+ void documentToBson(BSONObjBuilder *pBuilder, unsigned depth) const;
private:
ExpressionObject();
- void includePath(
- const FieldPath *pPath, size_t pathi, size_t pathn,
- bool excludeLast);
+ void includePath(
+ const FieldPath *pPath, size_t pathi, size_t pathn,
+ bool excludeLast);
- bool excludePaths;
- set<string> path;
+ bool excludePaths;
+ set<string> path;
/* these two vectors are maintained in parallel */
vector<string> vFieldName;
vector<intrusive_ptr<Expression> > vpExpression;
- /*
- Utility function used by documentToBson(). Emits inclusion
- and exclusion paths by recursively walking down the nested
- ExpressionObject trees these have created.
-
- @param pBuilder the builder to write boolean valued path "fields" to
- @param pvPath pointer to a vector of strings describing the path on
- descent; the top-level call should pass an empty vector
- */
- void emitPaths(BSONObjBuilder *pBuilder, vector<string> *pvPath) const;
-
- /* utility class used by emitPaths() */
- class PathPusher :
- boost::noncopyable {
- public:
- PathPusher(vector<string> *pvPath, const string &s);
- ~PathPusher();
-
- private:
- vector<string> *pvPath;
- };
+ /*
+ Utility function used by documentToBson(). Emits inclusion
+ and exclusion paths by recursively walking down the nested
+ ExpressionObject trees these have created.
+
+ @param pBuilder the builder to write boolean valued path "fields" to
+ @param pvPath pointer to a vector of strings describing the path on
+ descent; the top-level call should pass an empty vector
+ */
+ void emitPaths(BSONObjBuilder *pBuilder, vector<string> *pvPath) const;
+
+ /* utility class used by emitPaths() */
+ class PathPusher :
+ boost::noncopyable {
+ public:
+ PathPusher(vector<string> *pvPath, const string &s);
+ ~PathPusher();
+
+ private:
+ vector<string> *pvPath;
+ };
};
@@ -1010,15 +1010,15 @@ namespace mongo {
public:
// virtuals from Expression
virtual ~ExpressionOr();
- virtual intrusive_ptr<Expression> optimize();
+ virtual intrusive_ptr<Expression> optimize();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
- virtual void toMatcherBson(
- BSONObjBuilder *pBuilder, unsigned depth) const;
+ virtual const char *getOpName() const;
+ virtual void toMatcherBson(
+ BSONObjBuilder *pBuilder, unsigned depth) const;
- // virtuals from ExpressionNary
- virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
+ // virtuals from ExpressionNary
+ virtual intrusive_ptr<ExpressionNary> (*getFactory() const)();
/*
Create an expression that finds the conjunction of n operands.
@@ -1043,7 +1043,7 @@ namespace mongo {
virtual ~ExpressionSecond();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -1060,7 +1060,7 @@ namespace mongo {
virtual ~ExpressionStrcasecmp();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -1077,7 +1077,7 @@ namespace mongo {
virtual ~ExpressionSubstr();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -1094,7 +1094,7 @@ namespace mongo {
virtual ~ExpressionSubtract();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -1111,7 +1111,7 @@ namespace mongo {
virtual ~ExpressionToLower();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -1128,7 +1128,7 @@ namespace mongo {
virtual ~ExpressionToUpper();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -1145,7 +1145,7 @@ namespace mongo {
virtual ~ExpressionWeek();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -1162,7 +1162,7 @@ namespace mongo {
virtual ~ExpressionYear();
virtual intrusive_ptr<const Value> evaluate(
const intrusive_ptr<Document> &pDocument) const;
- virtual const char *getOpName() const;
+ virtual const char *getOpName() const;
virtual void addOperand(const intrusive_ptr<Expression> &pExpression);
static intrusive_ptr<ExpressionNary> create();
@@ -1186,38 +1186,38 @@ namespace mongo {
}
inline int Expression::signum(int i) {
- if (i < 0)
- return -1;
- if (i > 0)
- return 1;
- return 0;
+ if (i < 0)
+ return -1;
+ if (i > 0)
+ return 1;
+ return 0;
}
inline intrusive_ptr<const Value> ExpressionConstant::getValue() const {
- return pValue;
+ return pValue;
}
inline string ExpressionFieldPath::getFieldPath(bool fieldPrefix) const {
- return fieldPath.getPath(fieldPrefix);
+ return fieldPath.getPath(fieldPrefix);
}
inline void ExpressionFieldPath::writeFieldPath(
- ostream &outStream, bool fieldPrefix) const {
- return fieldPath.writePath(outStream, fieldPrefix);
+ ostream &outStream, bool fieldPrefix) const {
+ return fieldPath.writePath(outStream, fieldPrefix);
}
inline size_t ExpressionObject::getFieldCount() const {
- return vFieldName.size();
+ return vFieldName.size();
}
inline ExpressionObject::PathPusher::PathPusher(
- vector<string> *pTheVPath, const string &s):
- pvPath(pTheVPath) {
- pvPath->push_back(s);
+ vector<string> *pTheVPath, const string &s):
+ pvPath(pTheVPath) {
+ pvPath->push_back(s);
}
inline ExpressionObject::PathPusher::~PathPusher() {
- pvPath->pop_back();
+ pvPath->pop_back();
}
}
diff --git a/src/mongo/db/pipeline/expression_context.cpp b/src/mongo/db/pipeline/expression_context.cpp
index 4835dcfa5a9..315e4e55bbf 100755
--- a/src/mongo/db/pipeline/expression_context.cpp
+++ b/src/mongo/db/pipeline/expression_context.cpp
@@ -24,12 +24,12 @@ namespace mongo {
}
inline ExpressionContext::ExpressionContext():
- inShard(false),
- inRouter(false) {
+ inShard(false),
+ inRouter(false) {
}
ExpressionContext *ExpressionContext::create() {
- return new ExpressionContext();
+ return new ExpressionContext();
}
}
diff --git a/src/mongo/db/pipeline/expression_context.h b/src/mongo/db/pipeline/expression_context.h
index 0277039c80b..89e2878b54c 100755
--- a/src/mongo/db/pipeline/expression_context.h
+++ b/src/mongo/db/pipeline/expression_context.h
@@ -25,21 +25,21 @@ namespace mongo {
class ExpressionContext :
public IntrusiveCounterUnsigned {
public:
- virtual ~ExpressionContext();
+ virtual ~ExpressionContext();
- void setInShard(bool b);
- void setInRouter(bool b);
+ void setInShard(bool b);
+ void setInRouter(bool b);
- bool getInShard() const;
- bool getInRouter() const;
+ bool getInShard() const;
+ bool getInRouter() const;
- static ExpressionContext *create();
+ static ExpressionContext *create();
private:
- ExpressionContext();
-
- bool inShard;
- bool inRouter;
+ ExpressionContext();
+
+ bool inShard;
+ bool inRouter;
};
}
@@ -49,19 +49,19 @@ namespace mongo {
namespace mongo {
inline void ExpressionContext::setInShard(bool b) {
- inShard = b;
+ inShard = b;
}
inline void ExpressionContext::setInRouter(bool b) {
- inRouter = b;
+ inRouter = b;
}
inline bool ExpressionContext::getInShard() const {
- return inShard;
+ return inShard;
}
inline bool ExpressionContext::getInRouter() const {
- return inRouter;
+ return inRouter;
}
};
diff --git a/src/mongo/db/pipeline/field_path.cpp b/src/mongo/db/pipeline/field_path.cpp
index 96e1fc92f83..52baefa560c 100755
--- a/src/mongo/db/pipeline/field_path.cpp
+++ b/src/mongo/db/pipeline/field_path.cpp
@@ -25,11 +25,11 @@ namespace mongo {
}
FieldPath::FieldPath():
- vFieldName() {
+ vFieldName() {
}
FieldPath::FieldPath(const string &fieldPath):
- vFieldName() {
+ vFieldName() {
/*
The field path could be using dot notation.
Break the field path up by peeling off successive pieces.
@@ -47,10 +47,10 @@ namespace mongo {
/* use the string up to the dot */
const size_t length = dotpos - startpos;
- uassert(15998, str::stream() <<
- "field names cannot be zero length (in path \"" <<
- fieldPath << "\")",
- length > 0);
+ uassert(15998, str::stream() <<
+ "field names cannot be zero length (in path \"" <<
+ fieldPath << "\")",
+ length > 0);
vFieldName.push_back(fieldPath.substr(startpos, length));
@@ -60,28 +60,28 @@ namespace mongo {
}
string FieldPath::getPath(bool fieldPrefix) const {
- stringstream ss;
- writePath(ss, fieldPrefix);
- return ss.str();
+ stringstream ss;
+ writePath(ss, fieldPrefix);
+ return ss.str();
}
void FieldPath::writePath(ostream &outStream, bool fieldPrefix) const {
- if (fieldPrefix)
- outStream << "$";
+ if (fieldPrefix)
+ outStream << "$";
- outStream << vFieldName[0];
+ outStream << vFieldName[0];
- const size_t n = vFieldName.size();
- for(size_t i = 1; i < n; ++i)
- outStream << "." << vFieldName[i];
+ const size_t n = vFieldName.size();
+ for(size_t i = 1; i < n; ++i)
+ outStream << "." << vFieldName[i];
}
FieldPath &FieldPath::operator=(const FieldPath &rRHS) {
- if (this != &rRHS) {
- vFieldName = rRHS.vFieldName;
- }
+ if (this != &rRHS) {
+ vFieldName = rRHS.vFieldName;
+ }
- return *this;
+ return *this;
}
}
diff --git a/src/mongo/db/pipeline/field_path.h b/src/mongo/db/pipeline/field_path.h
index 810c5d0c7ea..d54c2edd1da 100755
--- a/src/mongo/db/pipeline/field_path.h
+++ b/src/mongo/db/pipeline/field_path.h
@@ -22,46 +22,46 @@ namespace mongo {
class FieldPath {
public:
- virtual ~FieldPath();
+ virtual ~FieldPath();
- FieldPath(const string &fieldPath);
- FieldPath();
+ FieldPath(const string &fieldPath);
+ FieldPath();
- /*
- Get the number of path elements in the field path.
+ /*
+ Get the number of path elements in the field path.
- @returns the number of path elements
- */
- size_t getPathLength() const;
+ @returns the number of path elements
+ */
+ size_t getPathLength() const;
- /*
- Get a particular path element from the path.
+ /*
+ Get a particular path element from the path.
- @param i the index of the path element
- @returns the path element
- */
- string getFieldName(size_t i) const;
+ @param i the index of the path element
+ @returns the path element
+ */
+ string getFieldName(size_t i) const;
- /*
- Get the full path.
+ /*
+ Get the full path.
- @param fieldPrefix whether or not to include the field prefix
- @returns the complete field path
- */
- string getPath(bool fieldPrefix) const;
+ @param fieldPrefix whether or not to include the field prefix
+ @returns the complete field path
+ */
+ string getPath(bool fieldPrefix) const;
- /*
- Write the full path.
+ /*
+ Write the full path.
- @param outStream where to write the path to
- @param fieldPrefix whether or not to include the field prefix
- */
- void writePath(ostream &outStream, bool fieldPrefix) const;
+ @param outStream where to write the path to
+ @param fieldPrefix whether or not to include the field prefix
+ */
+ void writePath(ostream &outStream, bool fieldPrefix) const;
- FieldPath &operator=(const FieldPath &rRHS);
+ FieldPath &operator=(const FieldPath &rRHS);
private:
- vector<string> vFieldName;
+ vector<string> vFieldName;
};
}
@@ -71,11 +71,11 @@ namespace mongo {
namespace mongo {
inline size_t FieldPath::getPathLength() const {
- return vFieldName.size();
+ return vFieldName.size();
}
inline string FieldPath::getFieldName(size_t i) const {
- return vFieldName[i];
+ return vFieldName[i];
}
}
diff --git a/src/mongo/db/pipeline/value.cpp b/src/mongo/db/pipeline/value.cpp
index 1d3e84aad73..ae4faea9a9f 100755
--- a/src/mongo/db/pipeline/value.cpp
+++ b/src/mongo/db/pipeline/value.cpp
@@ -27,7 +27,7 @@ namespace mongo {
using namespace mongoutils;
const intrusive_ptr<const Value> Value::pFieldUndefined(
- new ValueStatic(Undefined));
+ new ValueStatic(Undefined));
const intrusive_ptr<const Value> Value::pFieldNull(new ValueStatic());
const intrusive_ptr<const Value> Value::pFieldTrue(new ValueStatic(true));
const intrusive_ptr<const Value> Value::pFieldFalse(new ValueStatic(false));
@@ -54,39 +54,39 @@ namespace mongo {
stringValue(),
pDocumentValue(),
vpValue() {
- switch(type) {
- case Undefined:
- case jstNULL:
- case Object: // empty
- case Array: // empty
- break;
-
- case NumberDouble:
- simple.doubleValue = 0;
- break;
-
- case Bool:
- simple.boolValue = false;
- break;
-
- case NumberInt:
- simple.intValue = 0;
- break;
-
- case Timestamp:
- simple.timestampValue = 0;
- break;
-
- case NumberLong:
- simple.longValue = 0;
- break;
-
- default:
- // nothing else is allowed
- uassert(16001, str::stream() <<
- "can't create empty Value of type " << type, false);
- break;
- }
+ switch(type) {
+ case Undefined:
+ case jstNULL:
+ case Object: // empty
+ case Array: // empty
+ break;
+
+ case NumberDouble:
+ simple.doubleValue = 0;
+ break;
+
+ case Bool:
+ simple.boolValue = false;
+ break;
+
+ case NumberInt:
+ simple.intValue = 0;
+ break;
+
+ case Timestamp:
+ simple.timestampValue = 0;
+ break;
+
+ case NumberLong:
+ simple.longValue = 0;
+ break;
+
+ default:
+ // nothing else is allowed
+ uassert(16001, str::stream() <<
+ "can't create empty Value of type " << type, false);
+ break;
+ }
}
Value::Value(bool boolValue):
@@ -164,14 +164,14 @@ namespace mongo {
break;
case jstNULL:
- break;
+ break;
case BinData:
case Symbol:
case CodeWScope:
- uassert(16002, str::stream() <<
- "can't create Value of type " << type, false);
- break;
+ uassert(16002, str::stream() <<
+ "can't create Value of type " << type, false);
+ break;
/* these shouldn't happen in this context */
case MinKey:
@@ -315,7 +315,7 @@ namespace mongo {
intrusive_ptr<ValueIterator> Value::getArray() const {
assert(getType() == Array);
intrusive_ptr<ValueIterator> pVI(
- new vi(intrusive_ptr<const Value>(this), &vpValue));
+ new vi(intrusive_ptr<const Value>(this), &vpValue));
return pVI;
}
@@ -434,9 +434,9 @@ namespace mongo {
pBuilder->append(getLong());
break;
- case jstNULL:
- pBuilder->append();
- break;
+ case jstNULL:
+ pBuilder->append();
+ break;
/* these shouldn't appear in this context */
case MinKey:
@@ -451,13 +451,13 @@ namespace mongo {
}
void Value::addToBsonObj(BSONObjBuilder *pBuilder, string fieldName) const {
- BuilderObj objBuilder(pBuilder, fieldName);
- addToBson(&objBuilder);
+ BuilderObj objBuilder(pBuilder, fieldName);
+ addToBson(&objBuilder);
}
void Value::addToBsonArray(BSONArrayBuilder *pBuilder) const {
- BuilderArray arrBuilder(pBuilder);
- addToBson(&arrBuilder);
+ BuilderArray arrBuilder(pBuilder);
+ addToBson(&arrBuilder);
}
bool Value::coerceToBool() const {
@@ -536,16 +536,16 @@ namespace mongo {
case NumberLong:
return (int)simple.longValue;
- case jstNULL:
- case Undefined:
- break;
+ case jstNULL:
+ case Undefined:
+ break;
case String:
default:
- uassert(16003, str::stream() <<
- "can't convert from BSON type " << type <<
- " to int",
- false);
+ uassert(16003, str::stream() <<
+ "can't convert from BSON type " << type <<
+ " to int",
+ false);
} // switch(type)
return (int)0;
@@ -562,16 +562,16 @@ namespace mongo {
case NumberLong:
return simple.longValue;
- case jstNULL:
- case Undefined:
- break;
+ case jstNULL:
+ case Undefined:
+ break;
case String:
default:
- uassert(16004, str::stream() <<
- "can't convert from BSON type " << type <<
- " to long",
- false);
+ uassert(16004, str::stream() <<
+ "can't convert from BSON type " << type <<
+ " to long",
+ false);
} // switch(type)
return (long long)0;
@@ -588,16 +588,16 @@ namespace mongo {
case NumberLong:
return (double)simple.longValue;
- case jstNULL:
- case Undefined:
- break;
+ case jstNULL:
+ case Undefined:
+ break;
case String:
default:
- uassert(16005, str::stream() <<
- "can't convert from BSON type " << type <<
- " to double",
- false);
+ uassert(16005, str::stream() <<
+ "can't convert from BSON type " << type <<
+ " to double",
+ false);
} // switch(type)
return (double)0;
@@ -609,15 +609,15 @@ namespace mongo {
case Date:
return dateValue;
- case jstNULL:
- case Undefined:
- break;
+ case jstNULL:
+ case Undefined:
+ break;
default:
- uassert(16006, str::stream() <<
- "can't convert from BSON type " << type <<
- " to double",
- false);
+ uassert(16006, str::stream() <<
+ "can't convert from BSON type " << type <<
+ " to double",
+ false);
} // switch(type)
assert(false); // CW TODO no conversion available
@@ -645,15 +645,15 @@ namespace mongo {
case Date:
return dateValue.toString();
- case jstNULL:
- case Undefined:
- break;
+ case jstNULL:
+ case Undefined:
+ break;
default:
- uassert(16007, str::stream() <<
- "can't convert from BSON type " << type <<
- " to double",
- false);
+ uassert(16007, str::stream() <<
+ "can't convert from BSON type " << type <<
+ " to double",
+ false);
} // switch(type)
return "";
@@ -662,46 +662,46 @@ namespace mongo {
int Value::compare(const intrusive_ptr<const Value> &rL,
const intrusive_ptr<const Value> &rR) {
BSONType lType = rL->getType();
- BSONType rType = rR->getType();
-
- /*
- Special handling for Undefined and NULL values; these are types,
- so it's easier to handle them here before we go below to handle
- values of the same types. This allows us to compare Undefined and
- NULL values with everything else. As coded now:
- (*) Undefined is less than everything except itself (which is equal)
- (*) NULL is less than everything except Undefined and itself
- */
- if (lType == Undefined) {
- if (rType == Undefined)
- return 0;
-
- /* if rType is anything else, the left value is less */
- return -1;
- }
-
- if (lType == jstNULL) {
- if (rType == Undefined)
- return 1;
- if (rType == jstNULL)
- return 0;
-
- return -1;
- }
-
- if ((rType == Undefined) || (rType == jstNULL)) {
- /*
- We know the left value isn't Undefined, because of the above.
- Count a NULL value as greater than an undefined one.
- */
- return 1;
- }
+ BSONType rType = rR->getType();
+
+ /*
+ Special handling for Undefined and NULL values; these are types,
+ so it's easier to handle them here before we go below to handle
+ values of the same types. This allows us to compare Undefined and
+ NULL values with everything else. As coded now:
+ (*) Undefined is less than everything except itself (which is equal)
+ (*) NULL is less than everything except Undefined and itself
+ */
+ if (lType == Undefined) {
+ if (rType == Undefined)
+ return 0;
+
+ /* if rType is anything else, the left value is less */
+ return -1;
+ }
+
+ if (lType == jstNULL) {
+ if (rType == Undefined)
+ return 1;
+ if (rType == jstNULL)
+ return 0;
+
+ return -1;
+ }
+
+ if ((rType == Undefined) || (rType == jstNULL)) {
+ /*
+ We know the left value isn't Undefined, because of the above.
+ Count a NULL value as greater than an undefined one.
+ */
+ return 1;
+ }
// CW TODO for now, only compare like values
- uassert(16016, str::stream() <<
- "can't compare values of BSON types " << lType <<
- " and " << rType,
- lType == rType);
+ uassert(16016, str::stream() <<
+ "can't compare values of BSON types " << lType <<
+ " and " << rType,
+ lType == rType);
switch(lType) {
case NumberDouble:
@@ -750,9 +750,9 @@ namespace mongo {
case BinData:
case Symbol:
case CodeWScope:
- uassert(16017, str::stream() <<
- "comparisons of values of BSON type " << lType <<
- " are not supported", false);
+ uassert(16017, str::stream() <<
+ "comparisons of values of BSON type " << lType <<
+ " are not supported", false);
// pBuilder->appendBinData(fieldName, ...);
break;
@@ -803,7 +803,7 @@ namespace mongo {
case Undefined:
case jstNULL:
- return 0; // treat two Undefined or NULL values as equal
+ return 0; // treat two Undefined or NULL values as equal
/* these shouldn't happen in this context */
case MinKey:
@@ -820,70 +820,70 @@ namespace mongo {
}
void Value::hash_combine(size_t &seed) const {
- BSONType type = getType();
- boost::hash_combine(seed, (int)type);
+ BSONType type = getType();
+ boost::hash_combine(seed, (int)type);
switch(type) {
case NumberDouble:
- boost::hash_combine(seed, simple.doubleValue);
- break;
+ boost::hash_combine(seed, simple.doubleValue);
+ break;
case String:
- boost::hash_combine(seed, stringValue);
- break;
+ boost::hash_combine(seed, stringValue);
+ break;
case Object:
- getDocument()->hash_combine(seed);
- break;
+ getDocument()->hash_combine(seed);
+ break;
case Array: {
- intrusive_ptr<ValueIterator> pIter(getArray());
- while(pIter->more()) {
- intrusive_ptr<const Value> pValue(pIter->next());
- pValue->hash_combine(seed);
- };
+ intrusive_ptr<ValueIterator> pIter(getArray());
+ while(pIter->more()) {
+ intrusive_ptr<const Value> pValue(pIter->next());
+ pValue->hash_combine(seed);
+ };
break;
}
case BinData:
case Symbol:
case CodeWScope:
- uassert(16018, str::stream() <<
- "hashes of values of BSON type " << type <<
- " are not supported", false);
+ uassert(16018, str::stream() <<
+ "hashes of values of BSON type " << type <<
+ " are not supported", false);
break;
case jstOID:
- oidValue.hash_combine(seed);
- break;
+ oidValue.hash_combine(seed);
+ break;
case Bool:
- boost::hash_combine(seed, simple.boolValue);
- break;
+ boost::hash_combine(seed, simple.boolValue);
+ break;
case Date:
- boost::hash_combine(seed, (unsigned long long)dateValue);
- break;
+ boost::hash_combine(seed, (unsigned long long)dateValue);
+ break;
case RegEx:
- boost::hash_combine(seed, stringValue);
- break;
+ boost::hash_combine(seed, stringValue);
+ break;
case NumberInt:
- boost::hash_combine(seed, simple.intValue);
- break;
+ boost::hash_combine(seed, simple.intValue);
+ break;
case Timestamp:
- boost::hash_combine(seed, (unsigned long long)dateValue);
- break;
+ boost::hash_combine(seed, (unsigned long long)dateValue);
+ break;
case NumberLong:
- boost::hash_combine(seed, simple.longValue);
- break;
+ boost::hash_combine(seed, simple.longValue);
+ break;
case Undefined:
case jstNULL:
- break;
+ break;
/* these shouldn't happen in this context */
case MinKey:
@@ -897,66 +897,66 @@ namespace mongo {
}
BSONType Value::getWidestNumeric(BSONType lType, BSONType rType) {
- if (lType == NumberDouble) {
- switch(rType) {
- case NumberDouble:
- case NumberLong:
- case NumberInt:
- case jstNULL:
- case Undefined:
- return NumberDouble;
-
- default:
- break;
- }
- }
- else if (lType == NumberLong) {
- switch(rType) {
- case NumberDouble:
- return NumberDouble;
-
- case NumberLong:
- case NumberInt:
- case jstNULL:
- case Undefined:
- return NumberLong;
-
- default:
- break;
- }
- }
- else if (lType == NumberInt) {
- switch(rType) {
- case NumberDouble:
- return NumberDouble;
-
- case NumberLong:
- return NumberLong;
-
- case NumberInt:
- case jstNULL:
- case Undefined:
- return NumberInt;
-
- default:
- break;
- }
- }
- else if ((lType == jstNULL) || (lType == Undefined)) {
- switch(rType) {
- case NumberDouble:
- return NumberDouble;
-
- case NumberLong:
- return NumberLong;
-
- case NumberInt:
- return NumberInt;
-
- default:
- break;
- }
- }
+ if (lType == NumberDouble) {
+ switch(rType) {
+ case NumberDouble:
+ case NumberLong:
+ case NumberInt:
+ case jstNULL:
+ case Undefined:
+ return NumberDouble;
+
+ default:
+ break;
+ }
+ }
+ else if (lType == NumberLong) {
+ switch(rType) {
+ case NumberDouble:
+ return NumberDouble;
+
+ case NumberLong:
+ case NumberInt:
+ case jstNULL:
+ case Undefined:
+ return NumberLong;
+
+ default:
+ break;
+ }
+ }
+ else if (lType == NumberInt) {
+ switch(rType) {
+ case NumberDouble:
+ return NumberDouble;
+
+ case NumberLong:
+ return NumberLong;
+
+ case NumberInt:
+ case jstNULL:
+ case Undefined:
+ return NumberInt;
+
+ default:
+ break;
+ }
+ }
+ else if ((lType == jstNULL) || (lType == Undefined)) {
+ switch(rType) {
+ case NumberDouble:
+ return NumberDouble;
+
+ case NumberLong:
+ return NumberLong;
+
+ case NumberInt:
+ return NumberInt;
+
+ default:
+ break;
+ }
+ }
/* NOTREACHED */
return Undefined;
@@ -965,21 +965,21 @@ namespace mongo {
size_t Value::getApproximateSize() const {
switch(type) {
case String:
- return sizeof(Value) + stringValue.length();
+ return sizeof(Value) + stringValue.length();
case Object:
- return sizeof(Value) + pDocumentValue->getApproximateSize();
+ return sizeof(Value) + pDocumentValue->getApproximateSize();
case Array: {
- size_t size = sizeof(Value);
+ size_t size = sizeof(Value);
const size_t n = vpValue.size();
for(size_t i = 0; i < n; ++i) {
- size += vpValue[i]->getApproximateSize();
+ size += vpValue[i]->getApproximateSize();
}
- return size;
+ return size;
}
- case NumberDouble:
+ case NumberDouble:
case BinData:
case jstOID:
case Bool:
@@ -992,7 +992,7 @@ namespace mongo {
case NumberLong:
case jstNULL:
case Undefined:
- return sizeof(Value);
+ return sizeof(Value);
/* these shouldn't happen in this context */
case MinKey:
@@ -1001,18 +1001,18 @@ namespace mongo {
case Code:
case MaxKey:
assert(false); // CW TODO better message
- return sizeof(Value);
+ return sizeof(Value);
}
- /*
- We shouldn't get here. In order to make the implementor think about
- these cases, they are all listed explicitly, above. The compiler
- should complain if they aren't all listed, because there's no
- default. However, not all the compilers seem to do that. Therefore,
- this final catch-all is here.
- */
- assert(false);
- return sizeof(Value);
+ /*
+ We shouldn't get here. In order to make the implementor think about
+ these cases, they are all listed explicitly, above. The compiler
+ should complain if they aren't all listed, because there's no
+ default. However, not all the compilers seem to do that. Therefore,
+ this final catch-all is here.
+ */
+ assert(false);
+ return sizeof(Value);
}
diff --git a/src/mongo/db/pipeline/value.h b/src/mongo/db/pipeline/value.h
index 52ff8f368f8..a638bd012a3 100755
--- a/src/mongo/db/pipeline/value.h
+++ b/src/mongo/db/pipeline/value.h
@@ -60,8 +60,8 @@ namespace mongo {
/*
Construct a Value from a BSONElement.
- This ignores the name of the element, and only uses the value,
- whatever type it is.
+ This ignores the name of the element, and only uses the value,
+ whatever type it is.
@returns a new Value initialized from the bsonElement
*/
@@ -160,13 +160,13 @@ namespace mongo {
unsigned long long getTimestamp() const;
long long getLong() const;
- /*
- Get the length of an array value.
+ /*
+ Get the length of an array value.
- @returns the length of the array, if this is array-valued; otherwise
- throws an error
- */
- size_t getArrayLength() const;
+ @returns the length of the array, if this is array-valued; otherwise
+ throws an error
+ */
+ size_t getArrayLength() const;
/*
Add this value to the BSON object under construction.
@@ -183,7 +183,7 @@ namespace mongo {
/*
Get references to singleton instances of commonly used field values.
*/
- static intrusive_ptr<const Value> getUndefined();
+ static intrusive_ptr<const Value> getUndefined();
static intrusive_ptr<const Value> getNull();
static intrusive_ptr<const Value> getTrue();
static intrusive_ptr<const Value> getFalse();
@@ -264,39 +264,39 @@ namespace mongo {
*/
static BSONType getWidestNumeric(BSONType lType, BSONType rType);
- /*
- Get the approximate storage size of the value, in bytes.
+ /*
+ Get the approximate storage size of the value, in bytes.
- @returns approximate storage size of the value.
- */
- size_t getApproximateSize() const;
+ @returns approximate storage size of the value.
+ */
+ size_t getApproximateSize() const;
- /*
- Calculate a hash value.
+ /*
+ Calculate a hash value.
- Meant to be used to create composite hashes suitable for
- boost classes such as unordered_map<>.
+ Meant to be used to create composite hashes suitable for
+ boost classes such as unordered_map<>.
- @param seed value to augment with this' hash
- */
- void hash_combine(size_t &seed) const;
+ @param seed value to augment with this' hash
+ */
+ void hash_combine(size_t &seed) const;
- /*
- struct Hash is defined to enable the use of Values as
- keys in boost::unordered_map<>.
+ /*
+ struct Hash is defined to enable the use of Values as
+ keys in boost::unordered_map<>.
- Values are always referenced as immutables in the form
- intrusive_ptr<const Value>, so these operate on that construction.
- */
- struct Hash :
- unary_function<intrusive_ptr<const Value>, size_t> {
- size_t operator()(const intrusive_ptr<const Value> &rV) const;
- };
+ Values are always referenced as immutables in the form
+ intrusive_ptr<const Value>, so these operate on that construction.
+ */
+ struct Hash :
+ unary_function<intrusive_ptr<const Value>, size_t> {
+ size_t operator()(const intrusive_ptr<const Value> &rV) const;
+ };
protected:
Value(); // creates null value
- Value(BSONType type); // creates an empty (unitialized value) of type
- // mostly useful for Undefined
+ Value(BSONType type); // creates an empty (unitialized value) of type
+ // mostly useful for Undefined
Value(bool boolValue);
Value(int intValue);
@@ -310,7 +310,7 @@ namespace mongo {
Value(const intrusive_ptr<Document> &pDocument);
Value(const vector<intrusive_ptr<const Value> > &vpValue);
- void addToBson(Builder *pBuilder) const;
+ void addToBson(Builder *pBuilder) const;
BSONType type;
@@ -336,7 +336,7 @@ namespace mongo {
These are obtained via public static getters defined above.
*/
- static const intrusive_ptr<const Value> pFieldUndefined;
+ static const intrusive_ptr<const Value> pFieldUndefined;
static const intrusive_ptr<const Value> pFieldNull;
static const intrusive_ptr<const Value> pFieldTrue;
static const intrusive_ptr<const Value> pFieldFalse;
@@ -349,7 +349,7 @@ namespace mongo {
public ValueIterator {
public:
// virtuals from ValueIterator
- virtual ~vi();
+ virtual ~vi();
virtual bool more() const;
virtual intrusive_ptr<const Value> next();
@@ -361,7 +361,7 @@ namespace mongo {
size_t size;
size_t nextIndex;
const vector<intrusive_ptr<const Value> > *pvpValue;
- }; /* class vi */
+ }; /* class vi */
};
@@ -371,8 +371,8 @@ namespace mongo {
Useful for unordered_map<>, etc.
*/
inline bool operator==(const intrusive_ptr<const Value> &v1,
- const intrusive_ptr<const Value> &v2) {
- return (Value::compare(v1, v2) == 0);
+ const intrusive_ptr<const Value> &v2) {
+ return (Value::compare(v1, v2) == 0);
}
/*
@@ -390,15 +390,15 @@ namespace mongo {
class ValueStatic :
public Value {
public:
- // virtuals from IntrusiveCounterUnsigned
- virtual void addRef() const;
- virtual void release() const;
-
- // constructors
- ValueStatic();
- ValueStatic(BSONType type);
- ValueStatic(bool boolValue);
- ValueStatic(int intValue);
+ // virtuals from IntrusiveCounterUnsigned
+ virtual void addRef() const;
+ virtual void release() const;
+
+ // constructors
+ ValueStatic();
+ ValueStatic(BSONType type);
+ ValueStatic(bool boolValue);
+ ValueStatic(int intValue);
};
}
@@ -412,7 +412,7 @@ namespace mongo {
inline size_t Value::getArrayLength() const {
assert(getType() == Array);
- return vpValue.size();
+ return vpValue.size();
}
inline intrusive_ptr<const Value> Value::getUndefined() {
@@ -444,26 +444,26 @@ namespace mongo {
}
inline size_t Value::Hash::operator()(
- const intrusive_ptr<const Value> &rV) const {
- size_t seed = 0xf0afbeef;
- rV->hash_combine(seed);
- return seed;
+ const intrusive_ptr<const Value> &rV) const {
+ size_t seed = 0xf0afbeef;
+ rV->hash_combine(seed);
+ return seed;
}
inline ValueStatic::ValueStatic():
- Value() {
+ Value() {
}
inline ValueStatic::ValueStatic(BSONType type):
- Value(type) {
+ Value(type) {
}
inline ValueStatic::ValueStatic(bool boolValue):
- Value(boolValue) {
+ Value(boolValue) {
}
inline ValueStatic::ValueStatic(int intValue):
- Value(intValue) {
+ Value(intValue) {
}
};
diff --git a/src/mongo/s/commands_public.cpp b/src/mongo/s/commands_public.cpp
index b0c4e437cf2..04dbcb858e4 100644
--- a/src/mongo/s/commands_public.cpp
+++ b/src/mongo/s/commands_public.cpp
@@ -1361,133 +1361,133 @@ namespace mongo {
} compactCmd;
- /*
- Note these are in the pub_grid_cmds namespace, so they don't
- conflict with those in db/commands/pipeline_command.cpp.
- */
- class PipelineCommand :
- public PublicGridCommand {
- public:
- PipelineCommand();
-
- // virtuals from Command
- virtual bool run(const string &dbName , BSONObj &cmdObj,
- int options, string &errmsg,
- BSONObjBuilder &result, bool fromRepl);
-
- private:
-
- };
-
-
- /* -------------------- PipelineCommand ----------------------------- */
-
- static const PipelineCommand pipelineCommand;
-
- PipelineCommand::PipelineCommand():
- PublicGridCommand(Pipeline::commandName) {
- }
-
- bool PipelineCommand::run(const string &dbName , BSONObj &cmdObj,
- int options, string &errmsg,
- BSONObjBuilder &result, bool fromRepl) {
- //const string shardedOutputCollection = getTmpName( collection );
-
- intrusive_ptr<ExpressionContext> pCtx(
- ExpressionContext::create());
- pCtx->setInRouter(true);
-
- /* parse the pipeline specification */
- boost::shared_ptr<Pipeline> pPipeline(
- Pipeline::parseCommand(errmsg, cmdObj, pCtx));
- if (!pPipeline.get())
- return false; // there was some parsing error
-
- string fullns(dbName + "." + pPipeline->getCollectionName());
-
- /*
- If the system isn't running sharded, or the target collection
- isn't sharded, pass this on to a mongod.
- */
- DBConfigPtr conf(grid.getDBConfig(dbName , false));
- if (!conf || !conf->isShardingEnabled() || !conf->isSharded(fullns))
- return passthrough(conf, cmdObj, result);
-
- /* split the pipeline into pieces for mongods and this mongos */
- boost::shared_ptr<Pipeline> pShardPipeline(
- pPipeline->splitForSharded());
-
- /* create the command for the shards */
- BSONObjBuilder commandBuilder;
- pShardPipeline->toBson(&commandBuilder);
- BSONObj shardedCommand(commandBuilder.done());
-
- BSONObjBuilder shardQueryBuilder;
+ /*
+ Note these are in the pub_grid_cmds namespace, so they don't
+ conflict with those in db/commands/pipeline_command.cpp.
+ */
+ class PipelineCommand :
+ public PublicGridCommand {
+ public:
+ PipelineCommand();
+
+ // virtuals from Command
+ virtual bool run(const string &dbName , BSONObj &cmdObj,
+ int options, string &errmsg,
+ BSONObjBuilder &result, bool fromRepl);
+
+ private:
+
+ };
+
+
+ /* -------------------- PipelineCommand ----------------------------- */
+
+ static const PipelineCommand pipelineCommand;
+
+ PipelineCommand::PipelineCommand():
+ PublicGridCommand(Pipeline::commandName) {
+ }
+
+ bool PipelineCommand::run(const string &dbName , BSONObj &cmdObj,
+ int options, string &errmsg,
+ BSONObjBuilder &result, bool fromRepl) {
+ //const string shardedOutputCollection = getTmpName( collection );
+
+ intrusive_ptr<ExpressionContext> pCtx(
+ ExpressionContext::create());
+ pCtx->setInRouter(true);
+
+ /* parse the pipeline specification */
+ boost::shared_ptr<Pipeline> pPipeline(
+ Pipeline::parseCommand(errmsg, cmdObj, pCtx));
+ if (!pPipeline.get())
+ return false; // there was some parsing error
+
+ string fullns(dbName + "." + pPipeline->getCollectionName());
+
+ /*
+ If the system isn't running sharded, or the target collection
+ isn't sharded, pass this on to a mongod.
+ */
+ DBConfigPtr conf(grid.getDBConfig(dbName , false));
+ if (!conf || !conf->isShardingEnabled() || !conf->isSharded(fullns))
+ return passthrough(conf, cmdObj, result);
+
+ /* split the pipeline into pieces for mongods and this mongos */
+ boost::shared_ptr<Pipeline> pShardPipeline(
+ pPipeline->splitForSharded());
+
+ /* create the command for the shards */
+ BSONObjBuilder commandBuilder;
+ pShardPipeline->toBson(&commandBuilder);
+ BSONObj shardedCommand(commandBuilder.done());
+
+ BSONObjBuilder shardQueryBuilder;
#ifdef NEVER
- BSONObjBuilder shardSortBuilder;
- pShardPipeline->getCursorMods(
- &shardQueryBuilder, &shardSortBuilder);
- BSONObj shardSort(shardSortBuilder.done());
+ BSONObjBuilder shardSortBuilder;
+ pShardPipeline->getCursorMods(
+ &shardQueryBuilder, &shardSortBuilder);
+ BSONObj shardSort(shardSortBuilder.done());
#endif /* NEVER */
- pShardPipeline->getInitialQuery(&shardQueryBuilder);
- BSONObj shardQuery(shardQueryBuilder.done());
-
- ChunkManagerPtr cm(conf->getChunkManager(fullns));
- set<Shard> shards;
- cm->getShardsForQuery(shards, shardQuery);
-
- /*
- From MRCmd::Run: "we need to use our connections to the shard
- so filtering is done correctly for un-owned docs so we allocate
- them in our thread and hand off"
- */
- vector<boost::shared_ptr<ShardConnection> > shardConns;
- list<boost::shared_ptr<Future::CommandResult> > futures;
- for (set<Shard>::iterator i=shards.begin(), end=shards.end();
- i != end; i++) {
- boost::shared_ptr<ShardConnection> temp(
- new ShardConnection(i->getConnString(), fullns));
- assert(temp->get());
- futures.push_back(
- Future::spawnCommand(i->getConnString(), dbName,
- shardedCommand , 0, temp->get()));
- shardConns.push_back(temp);
- }
+ pShardPipeline->getInitialQuery(&shardQueryBuilder);
+ BSONObj shardQuery(shardQueryBuilder.done());
+
+ ChunkManagerPtr cm(conf->getChunkManager(fullns));
+ set<Shard> shards;
+ cm->getShardsForQuery(shards, shardQuery);
+
+ /*
+ From MRCmd::Run: "we need to use our connections to the shard
+ so filtering is done correctly for un-owned docs so we allocate
+ them in our thread and hand off"
+ */
+ vector<boost::shared_ptr<ShardConnection> > shardConns;
+ list<boost::shared_ptr<Future::CommandResult> > futures;
+ for (set<Shard>::iterator i=shards.begin(), end=shards.end();
+ i != end; i++) {
+ boost::shared_ptr<ShardConnection> temp(
+ new ShardConnection(i->getConnString(), fullns));
+ assert(temp->get());
+ futures.push_back(
+ Future::spawnCommand(i->getConnString(), dbName,
+ shardedCommand , 0, temp->get()));
+ shardConns.push_back(temp);
+ }
- /* wrap the list of futures with a source */
- intrusive_ptr<DocumentSourceCommandFutures> pSource(
- DocumentSourceCommandFutures::create(errmsg, &futures));
+ /* wrap the list of futures with a source */
+ intrusive_ptr<DocumentSourceCommandFutures> pSource(
+ DocumentSourceCommandFutures::create(errmsg, &futures));
- /* run the pipeline */
- bool failed = pPipeline->run(result, errmsg, pSource);
+ /* run the pipeline */
+ bool failed = pPipeline->run(result, errmsg, pSource);
/*
- BSONObjBuilder shardresults;
- for (list<boost::shared_ptr<Future::CommandResult> >::iterator i(
- futures.begin()); i!=futures.end(); ++i) {
- boost::shared_ptr<Future::CommandResult> res(*i);
- if (!res->join()) {
- error() << "sharded pipeline failed on shard: " <<
- res->getServer() << " error: " << res->result() << endl;
- result.append( "cause" , res->result() );
- errmsg = "mongod pipeline failed: ";
- errmsg += res->result().toString();
- failed = true;
- continue;
- }
-
- shardresults.append( res->getServer() , res->result() );
- }
+ BSONObjBuilder shardresults;
+ for (list<boost::shared_ptr<Future::CommandResult> >::iterator i(
+ futures.begin()); i!=futures.end(); ++i) {
+ boost::shared_ptr<Future::CommandResult> res(*i);
+ if (!res->join()) {
+ error() << "sharded pipeline failed on shard: " <<
+ res->getServer() << " error: " << res->result() << endl;
+ result.append( "cause" , res->result() );
+ errmsg = "mongod pipeline failed: ";
+ errmsg += res->result().toString();
+ failed = true;
+ continue;
+ }
+
+ shardresults.append( res->getServer() , res->result() );
+ }
*/
- for(unsigned i = 0; i < shardConns.size(); ++i)
- shardConns[i]->done();
+ for(unsigned i = 0; i < shardConns.size(); ++i)
+ shardConns[i]->done();
- if (failed && (errmsg.length() > 0))
- return false;
+ if (failed && (errmsg.length() > 0))
+ return false;
- return true;
- }
+ return true;
+ }
} // namespace pub_grid_cmds