diff options
Diffstat (limited to 'src/mongo')
22 files changed, 188 insertions, 285 deletions
diff --git a/src/mongo/db/commands/pipeline_command.cpp b/src/mongo/db/commands/pipeline_command.cpp index b69226e4a08..280e6f9796b 100644 --- a/src/mongo/db/commands/pipeline_command.cpp +++ b/src/mongo/db/commands/pipeline_command.cpp @@ -234,9 +234,7 @@ namespace mongo { // important because sharded aggregations rely on this ability. // Skipping when inShard because this has already been through the // transformation (and this unsets pCtx->inShard). - BSONObjBuilder bb; - pPipeline->toBson(&bb); - parsed = bb.obj(); + parsed = pPipeline->serialize().toBson(); pPipeline = Pipeline::parseCommand(errmsg, parsed, pCtx); verify(pPipeline); } @@ -295,31 +293,20 @@ namespace mongo { */ intrusive_ptr<Pipeline> pShardSplit = pPipeline->splitForSharded(); - /* - Write the split pipeline as we would in order to transmit it to - the shard servers. - */ - BSONObjBuilder shardBuilder; - pShardSplit->toBson(&shardBuilder); - BSONObj shardBson(shardBuilder.done()); - - DEV (log() << "\n---- shardBson\n" << - shardBson.jsonString(Strict, 1) << "\n----\n"); - - /* for debugging purposes, show what the pipeline now looks like */ - DEV { - BSONObjBuilder pipelineBuilder; - pPipeline->toBson(&pipelineBuilder); - BSONObj pipelineBson(pipelineBuilder.done()); - (log() << "\n---- pipelineBson\n" << - pipelineBson.jsonString(Strict, 1) << "\n----\n"); - } + // Write the split pipeline as we would in order to transmit it to the shard servers. + Document shardCmd = pShardSplit->serialize(); + + DEV log() << "\n---- shardDescription\n" << shardCmd.toString() << "\n----\n"; + + // for debugging purposes, show what the pipeline now looks like + DEV log() << "\n---- pipelineDescription\n" << pPipeline->serialize() << "\n----\n"; /* on the shard servers, create the local pipeline */ intrusive_ptr<ExpressionContext> pShardCtx = new ExpressionContext(InterruptStatusMongod::status, NamespaceString(ns)); + BSONObj shardObj = shardCmd.toBson(); intrusive_ptr<Pipeline> pShardPipeline( - Pipeline::parseCommand(errmsg, shardBson, pShardCtx)); + Pipeline::parseCommand(errmsg, shardObj, pShardCtx)); if (!pShardPipeline.get()) { return false; } diff --git a/src/mongo/db/pipeline/document_source.cpp b/src/mongo/db/pipeline/document_source.cpp index 1cfb2d8c689..e91dc62e102 100644 --- a/src/mongo/db/pipeline/document_source.cpp +++ b/src/mongo/db/pipeline/document_source.cpp @@ -18,6 +18,7 @@ #include "mongo/db/pipeline/document_source.h" #include "mongo/db/pipeline/expression_context.h" +#include "mongo/db/pipeline/value.h" namespace mongo { @@ -55,18 +56,11 @@ namespace mongo { } } - void DocumentSource::addToBsonArray( - BSONArrayBuilder *pBuilder, bool explain) const { - BSONObjBuilder insides; - sourceToBson(&insides, explain); - -/* No statistics at this time - if (explain) { - insides.append("nOut", nOut); + void DocumentSource::serializeToArray(vector<Value>& array, bool explain) const { + Value entry = serialize(explain); + if (!entry.missing()) { + array.push_back(entry); } -*/ - - pBuilder->append(insides.done()); } BSONObj DocumentSource::depsToProjection(const set<string>& deps) { diff --git a/src/mongo/db/pipeline/document_source.h b/src/mongo/db/pipeline/document_source.h index 99ae26b0b82..8ce6e3b57bc 100644 --- a/src/mongo/db/pipeline/document_source.h +++ b/src/mongo/db/pipeline/document_source.h @@ -159,19 +159,13 @@ namespace mongo { static Document documentFromBsonWithDeps(const BSONObj& object, const ParsedDeps& deps); /** - Add the DocumentSource to the array builder. - - The default implementation calls sourceToBson() in order to - convert the inner part of the object which will be added to the - array being built here. - - A subclass may choose to overwrite this rather than addToBsonArray - if it should output multiple stages. - - @param pBuilder the array builder to add the operation to. - @param explain create explain output + * In the default case, serializes the DocumentSource and adds it to the vector<Value>. + * + * A subclass may choose to overwrite this, rather than serialize, + * if it should output multiple stages (eg, $sort sometimes also outputs a $limit). */ - virtual void addToBsonArray(BSONArrayBuilder *pBuilder, bool explain=false) const; + + virtual void serializeToArray(vector<Value>& array, bool explain = false) const; /// Returns true if doesn't require an input source (most DocumentSources do). virtual bool isValidInitialSource() const { return false; } @@ -182,18 +176,6 @@ namespace mongo { */ DocumentSource(const intrusive_ptr<ExpressionContext> &pExpCtx); - /** - Create an object that represents the document source. The object - will have a single field whose name is the source's name. This - will be used by the default implementation of addToBsonArray() - to add this object to a pipeline being represented in BSON. - - @param pBuilder a blank object builder to write to - @param explain create explain output - */ - virtual void sourceToBson(BSONObjBuilder *pBuilder, - bool explain) const = 0; - /* Most DocumentSources have an underlying source they get their data from. This is a convenience for them. @@ -219,6 +201,16 @@ namespace mongo { This is *not* unsigned so it can be passed to BSONObjBuilder.append(). */ long long nRowsOut; + + private: + /** + * Create a Value that represents the document source. + * + * This is used by the default implementation of serializeToArray() to add this object + * to a pipeline being serialized. Returning a missing() Value results in no entry + * being added to the array for this stage (DocumentSource). + */ + virtual Value serialize(bool explain = false) const = 0; }; /** This class marks DocumentSources that should be split between the router and the shards @@ -281,6 +273,7 @@ namespace mongo { public: // virtuals from DocumentSource virtual boost::optional<Document> getNext(); + virtual Value serialize(bool explain = false) const; virtual void setSource(DocumentSource *pSource); virtual bool isValidInitialSource() const { return true; } @@ -302,10 +295,6 @@ namespace mongo { BSONElement *pBsonElement, const intrusive_ptr<ExpressionContext> &pExpCtx); - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; - private: DocumentSourceBsonArray(BSONElement *pBsonElement, const intrusive_ptr<ExpressionContext> &pExpCtx); @@ -320,6 +309,7 @@ namespace mongo { public: // virtuals from DocumentSource virtual boost::optional<Document> getNext(); + virtual Value serialize(bool explain = false) const; virtual void setSource(DocumentSource *pSource); virtual bool isValidInitialSource() const { return true; } @@ -337,10 +327,6 @@ namespace mongo { const ShardOutput& shardOutput, const intrusive_ptr<ExpressionContext>& pExpCtx); - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; - private: DocumentSourceCommandShards(const ShardOutput& shardOutput, const intrusive_ptr<ExpressionContext>& pExpCtx); @@ -374,6 +360,7 @@ namespace mongo { // virtuals from DocumentSource virtual ~DocumentSourceCursor(); virtual boost::optional<Document> getNext(); + virtual Value serialize(bool explain = false) const; virtual void setSource(DocumentSource *pSource); virtual bool coalesce(const intrusive_ptr<DocumentSource>& nextSource); virtual bool isValidInitialSource() const { return true; } @@ -440,9 +427,6 @@ namespace mongo { /// returns -1 for no limit long long getLimit() const; - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; private: DocumentSourceCursor( @@ -531,6 +515,7 @@ namespace mongo { virtual const char *getSourceName() const; virtual GetDepsReturn getDependencies(set<string>& deps) const; virtual void dispose(); + virtual Value serialize(bool explain = false) const; /** Create a new grouping DocumentSource. @@ -592,10 +577,6 @@ namespace mongo { static const char groupName[]; - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; - private: DocumentSourceGroup(const intrusive_ptr<ExpressionContext> &pExpCtx); @@ -660,6 +641,7 @@ namespace mongo { public: // virtuals from DocumentSource virtual const char *getSourceName() const; + virtual Value serialize(bool explain = false) const; /** Create a filter. @@ -686,10 +668,6 @@ namespace mongo { static const char matchName[]; - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; - // virtuals from DocumentSourceFilterBase virtual bool accept(const Document& pDocument) const; @@ -710,6 +688,7 @@ namespace mongo { virtual void setSource(DocumentSource *pSource); virtual const char *getSourceName() const; virtual void dispose(); + virtual Value serialize(bool explain = false) const; virtual bool isValidInitialSource() const { return true; } static intrusive_ptr<DocumentSource> createFromBson( @@ -721,9 +700,6 @@ namespace mongo { const intrusive_ptr<ExpressionContext> &pExpCtx); static const char name[]; - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; private: @@ -757,6 +733,7 @@ namespace mongo { virtual ~DocumentSourceOut(); virtual boost::optional<Document> getNext(); virtual const char *getSourceName() const; + virtual Value serialize(bool explain = false) const; // Virtuals for SplittableDocumentSource virtual intrusive_ptr<DocumentSource> getShardSource() { return NULL; } @@ -780,10 +757,6 @@ namespace mongo { static const char outName[]; - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; - private: DocumentSourceOut(const NamespaceString& outputNs, const intrusive_ptr<ExpressionContext> &pExpCtx); @@ -807,6 +780,7 @@ namespace mongo { virtual boost::optional<Document> getNext(); virtual const char *getSourceName() const; virtual void optimize(); + virtual Value serialize(bool explain = false) const; virtual GetDepsReturn getDependencies(set<string>& deps) const; @@ -829,10 +803,6 @@ namespace mongo { /** projection as specified by the user */ BSONObj getRaw() const { return _raw; } - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; - private: DocumentSourceProject(const intrusive_ptr<ExpressionContext>& pExpCtx, const intrusive_ptr<ExpressionObject>& exprObj); @@ -859,8 +829,7 @@ namespace mongo { static intrusive_ptr<DocumentSource> createFromBson(BSONElement* bsonElement, const intrusive_ptr<ExpressionContext>& expCtx); - protected: - virtual void sourceToBson(BSONObjBuilder* pBuilder, bool explain) const; + virtual Value serialize(bool explain = false) const; private: DocumentSourceRedact(const intrusive_ptr<ExpressionContext>& expCtx, @@ -877,7 +846,7 @@ namespace mongo { // virtuals from DocumentSource virtual boost::optional<Document> getNext(); virtual const char *getSourceName() const; - virtual void addToBsonArray(BSONArrayBuilder *pBuilder, bool explain=false) const; + virtual void serializeToArray(vector<Value>& array, bool explain = false) const; virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource); virtual void dispose(); @@ -902,13 +871,8 @@ namespace mongo { */ void addKey(const string &fieldPath, bool ascending); - /** - Write out an object whose contents are the sort key. - - @param pBuilder initialized object builder. - @param fieldPrefix specify whether or not to include the field prefix - */ - void sortKeyToBson(BSONObjBuilder *pBuilder, bool usePrefix) const; + /// Write out a Document whose contents are the sort key. + Document serializeSortKey() const; /** Create a sorting DocumentSource from BSON. @@ -937,15 +901,14 @@ namespace mongo { intrusive_ptr<DocumentSourceLimit> getLimitSrc() const { return limitSrc; } static const char sortName[]; - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const { - verify(false); // should call addToBsonArray instead - } private: DocumentSourceSort(const intrusive_ptr<ExpressionContext> &pExpCtx); + virtual Value serialize(bool explain = false) const { + verify(false); // should call addToBsonArray instead + } + /* Before returning anything, this source must fetch everything from the underlying source and group it. populate() is used to do that @@ -992,6 +955,7 @@ namespace mongo { virtual boost::optional<Document> getNext(); virtual const char *getSourceName() const; virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource); + virtual Value serialize(bool explain = false) const; virtual GetDepsReturn getDependencies(set<string>& deps) const { return SEE_NEXT; // This doesn't affect needed fields @@ -1032,10 +996,6 @@ namespace mongo { static const char limitName[]; - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; - private: DocumentSourceLimit(const intrusive_ptr<ExpressionContext> &pExpCtx, long long limit); @@ -1051,6 +1011,7 @@ namespace mongo { virtual boost::optional<Document> getNext(); virtual const char *getSourceName() const; virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource); + virtual Value serialize(bool explain = false) const; virtual GetDepsReturn getDependencies(set<string>& deps) const { return SEE_NEXT; // This doesn't affect needed fields @@ -1090,10 +1051,6 @@ namespace mongo { static const char skipName[]; - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; - private: DocumentSourceSkip(const intrusive_ptr<ExpressionContext> &pExpCtx); @@ -1108,6 +1065,7 @@ namespace mongo { // virtuals from DocumentSource virtual boost::optional<Document> getNext(); virtual const char *getSourceName() const; + virtual Value serialize(bool explain = false) const; virtual GetDepsReturn getDependencies(set<string>& deps) const; @@ -1127,10 +1085,6 @@ namespace mongo { static const char unwindName[]; - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; - private: DocumentSourceUnwind(const intrusive_ptr<ExpressionContext> &pExpCtx); @@ -1154,6 +1108,7 @@ namespace mongo { virtual void setSource(DocumentSource *pSource); // errors out since this must be first virtual bool coalesce(const intrusive_ptr<DocumentSource> &pNextSource); virtual bool isValidInitialSource() const { return true; } + virtual Value serialize(bool explain = false) const; // Virtuals for SplittableDocumentSource virtual intrusive_ptr<DocumentSource> getShardSource(); @@ -1171,10 +1126,6 @@ namespace mongo { static intrusive_ptr<DocumentSourceGeoNear> create( const intrusive_ptr<ExpressionContext> &pCtx); - protected: - // virtuals from DocumentSource - virtual void sourceToBson(BSONObjBuilder *pBuilder, bool explain) const; - private: DocumentSourceGeoNear(const intrusive_ptr<ExpressionContext> &pExpCtx); diff --git a/src/mongo/db/pipeline/document_source_bson_array.cpp b/src/mongo/db/pipeline/document_source_bson_array.cpp index 0f13d3ab4c8..fc40915859e 100644 --- a/src/mongo/db/pipeline/document_source_bson_array.cpp +++ b/src/mongo/db/pipeline/document_source_bson_array.cpp @@ -55,13 +55,10 @@ namespace mongo { return pSource; } - void DocumentSourceBsonArray::sourceToBson( - BSONObjBuilder *pBuilder, bool explain) const { - + Value DocumentSourceBsonArray::serialize(bool explain) const { if (explain) { - BSONObj empty; - - pBuilder->append("bsonArray", empty); + return Value(DOC("bsonArray" << Document())); } + return Value(); } } diff --git a/src/mongo/db/pipeline/document_source_command_shards.cpp b/src/mongo/db/pipeline/document_source_command_shards.cpp index f11978468bf..4537c37b2fc 100644 --- a/src/mongo/db/pipeline/document_source_command_shards.cpp +++ b/src/mongo/db/pipeline/document_source_command_shards.cpp @@ -26,9 +26,8 @@ namespace mongo { verify(false); } - void DocumentSourceCommandShards::sourceToBson( - BSONObjBuilder *pBuilder, bool explain) const { - /* this has no BSON equivalent */ + Value DocumentSourceCommandShards::serialize(bool explain) const { + // this has no BSON equivalent verify(false); } diff --git a/src/mongo/db/pipeline/document_source_cursor.cpp b/src/mongo/db/pipeline/document_source_cursor.cpp index 50fbc876ef3..c16238d096a 100644 --- a/src/mongo/db/pipeline/document_source_cursor.cpp +++ b/src/mongo/db/pipeline/document_source_cursor.cpp @@ -190,28 +190,25 @@ namespace mongo { return false; } - void DocumentSourceCursor::sourceToBson( - BSONObjBuilder *pBuilder, bool explain) const { - - /* this has no analog in the BSON world, so only allow it for explain */ - if (explain) - { - BSONObj bsonObj; + Value DocumentSourceCursor::serialize(bool explain) const { + // we never parse a documentSourceCursor, so we do not serialize it + if (explain) { + MutableDocument result; - pBuilder->append("query", _query); + result.setField("query", Value(_query)); if (!_sort.isEmpty()) { - pBuilder->append("sort", _sort); + result.setField("sort", Value(_sort)); } if (_limit) { - pBuilder->append("limit", _limit->getLimit()); + result.setField("limit", Value(_limit->getLimit())); } BSONObj projectionSpec; if (_projection) { projectionSpec = _projection->getSpec(); - pBuilder->append("projection", projectionSpec); + result.setField("projection", Value(projectionSpec)); } // construct query for explain @@ -227,8 +224,10 @@ namespace mongo { ? &projectionSpec : NULL)); - pBuilder->append("cursor", explainResult); + result.setField("cursor", Value(explainResult)); + return result.freezeToValue(); } + return Value(); } DocumentSourceCursor::DocumentSourceCursor(const string& ns, diff --git a/src/mongo/db/pipeline/document_source_geo_near.cpp b/src/mongo/db/pipeline/document_source_geo_near.cpp index f6f8305c271..7c2476985e8 100644 --- a/src/mongo/db/pipeline/document_source_geo_near.cpp +++ b/src/mongo/db/pipeline/document_source_geo_near.cpp @@ -67,32 +67,34 @@ namespace mongo { limit); } - void DocumentSourceGeoNear::sourceToBson(BSONObjBuilder *pBuilder, bool explain) const { - BSONObjBuilder geoNear (pBuilder->subobjStart("$geoNear")); + Value DocumentSourceGeoNear::serialize(bool explain) const { + MutableDocument result; if (coordsIsArray) { - geoNear.appendArray("near", coords); + result.setField("near", Value(BSONArray(coords))); } else { - geoNear.append("near", coords); + result.setField("near", Value(coords)); } - geoNear.append("distanceField", distanceField->getPath(false)); // not in buildGeoNearCmd - geoNear.append("limit", limit); + // not in buildGeoNearCmd + result.setField("distanceField", Value(distanceField->getPath(false))); + + result.setField("limit", Value(limit)); if (maxDistance > 0) - geoNear.append("maxDistance", maxDistance); + result.setField("maxDistance", Value(maxDistance)); - geoNear.append("query", query); - geoNear.append("spherical", spherical); - geoNear.append("distanceMultiplier", distanceMultiplier); + result.setField("query", Value(query)); + result.setField("spherical", Value(spherical)); + result.setField("distanceMultiplier", Value(distanceMultiplier)); if (includeLocs) - geoNear.append("includeLocs", includeLocs->getPath(false)); + result.setField("includeLocs", Value(includeLocs->getPath(false))); - geoNear.append("uniqueDocs", uniqueDocs); + result.setField("uniqueDocs", Value(uniqueDocs)); - geoNear.doneFast(); + return Value(DOC(getSourceName() << result.freeze())); } BSONObj DocumentSourceGeoNear::buildGeoNearCmd() const { diff --git a/src/mongo/db/pipeline/document_source_group.cpp b/src/mongo/db/pipeline/document_source_group.cpp index 486af593028..2ece0f36d98 100644 --- a/src/mongo/db/pipeline/document_source_group.cpp +++ b/src/mongo/db/pipeline/document_source_group.cpp @@ -109,18 +109,17 @@ namespace mongo { pSource->dispose(); } - void DocumentSourceGroup::sourceToBson(BSONObjBuilder* pBuilder, bool explain) const { + Value DocumentSourceGroup::serialize(bool explain) const { MutableDocument insides; - /* add the _id */ + // add the _id insides["_id"] = pIdExpression->serialize(); - /* add the remaining fields */ + // add the remaining fields const size_t n = vFieldName.size(); for(size_t i = 0; i < n; ++i) { intrusive_ptr<Accumulator> accum = vpAccumulatorFactory[i](); - insides[vFieldName[i]] = Value( - DOC(accum->getOpName() << vpExpression[i]->serialize())); + insides[vFieldName[i]] = Value(DOC(accum->getOpName() << vpExpression[i]->serialize())); } if (_doingMerge) { @@ -130,7 +129,7 @@ namespace mongo { insides["$doingMerge"] = Value(true); } - *pBuilder << groupName << insides.freeze(); + return Value(DOC(getSourceName() << insides.freeze())); } DocumentSource::GetDepsReturn DocumentSourceGroup::getDependencies(set<string>& deps) const { diff --git a/src/mongo/db/pipeline/document_source_limit.cpp b/src/mongo/db/pipeline/document_source_limit.cpp index f19da5171c0..cad0893c382 100644 --- a/src/mongo/db/pipeline/document_source_limit.cpp +++ b/src/mongo/db/pipeline/document_source_limit.cpp @@ -63,9 +63,8 @@ namespace mongo { return pSource->getNext(); } - void DocumentSourceLimit::sourceToBson( - BSONObjBuilder *pBuilder, bool explain) const { - pBuilder->append("$limit", limit); + Value DocumentSourceLimit::serialize(bool explain) const { + return Value(DOC(getSourceName() << limit)); } intrusive_ptr<DocumentSourceLimit> DocumentSourceLimit::create( diff --git a/src/mongo/db/pipeline/document_source_match.cpp b/src/mongo/db/pipeline/document_source_match.cpp index 476abbf946e..d1e502bc7d8 100644 --- a/src/mongo/db/pipeline/document_source_match.cpp +++ b/src/mongo/db/pipeline/document_source_match.cpp @@ -30,10 +30,8 @@ namespace mongo { return matchName; } - void DocumentSourceMatch::sourceToBson( - BSONObjBuilder *pBuilder, bool explain) const { - const BSONObj *pQuery = matcher.getQuery(); - pBuilder->append(matchName, *pQuery); + Value DocumentSourceMatch::serialize(bool explain) const { + return Value(DOC(getSourceName() << Document(*matcher.getQuery()))); } bool DocumentSourceMatch::accept(const Document& input) const { diff --git a/src/mongo/db/pipeline/document_source_merge_cursors.cpp b/src/mongo/db/pipeline/document_source_merge_cursors.cpp index e45716999be..1d4ed9347b3 100644 --- a/src/mongo/db/pipeline/document_source_merge_cursors.cpp +++ b/src/mongo/db/pipeline/document_source_merge_cursors.cpp @@ -67,12 +67,13 @@ namespace mongo { return new DocumentSourceMergeCursors(cursorIds, pExpCtx); } - void DocumentSourceMergeCursors::sourceToBson(BSONObjBuilder* builder, bool explain) const { - BSONArrayBuilder cursors(builder->subarrayStart("$mergeCursors")); + Value DocumentSourceMergeCursors::serialize(bool explain) const { + vector<Value> cursors; for (size_t i = 0; i < _cursorIds.size(); i++) { - cursors.append(BSON("host" << _cursorIds[i].first.toString() - << "id" << _cursorIds[i].second)); + cursors.push_back(Value(DOC("host" << Value(_cursorIds[i].first.toString()) + << "id" << _cursorIds[i].second))); } + return Value(DOC(getSourceName() << Value(cursors))); } DocumentSourceMergeCursors::CursorAndConnection::CursorAndConnection( diff --git a/src/mongo/db/pipeline/document_source_out.cpp b/src/mongo/db/pipeline/document_source_out.cpp index f76a6d74a3a..bd04811779d 100644 --- a/src/mongo/db/pipeline/document_source_out.cpp +++ b/src/mongo/db/pipeline/document_source_out.cpp @@ -157,10 +157,10 @@ namespace mongo { return new DocumentSourceOut(outputNs, pExpCtx); } - void DocumentSourceOut::sourceToBson(BSONObjBuilder *pBuilder, bool explain) const { + Value DocumentSourceOut::serialize(bool explain) const { massert(17000, "$out shouldn't have different db than input", _outputNs.db() == pExpCtx->ns.db()); - pBuilder->append("$out", _outputNs.coll()); + return Value(DOC(getSourceName() << _outputNs.coll())); } } diff --git a/src/mongo/db/pipeline/document_source_project.cpp b/src/mongo/db/pipeline/document_source_project.cpp index d751eb1c51d..5db8afde329 100644 --- a/src/mongo/db/pipeline/document_source_project.cpp +++ b/src/mongo/db/pipeline/document_source_project.cpp @@ -82,8 +82,8 @@ namespace mongo { pEO = dynamic_pointer_cast<ExpressionObject>(pE); } - void DocumentSourceProject::sourceToBson(BSONObjBuilder* pBuilder, bool explain) const { - *pBuilder << projectName << pEO->serialize(); + Value DocumentSourceProject::serialize(bool explain) const { + return Value(DOC(getSourceName() << pEO->serialize())); } intrusive_ptr<DocumentSource> DocumentSourceProject::createFromBson( diff --git a/src/mongo/db/pipeline/document_source_redact.cpp b/src/mongo/db/pipeline/document_source_redact.cpp index 04d2b4f644b..21f5aed1b00 100644 --- a/src/mongo/db/pipeline/document_source_redact.cpp +++ b/src/mongo/db/pipeline/document_source_redact.cpp @@ -124,8 +124,8 @@ namespace mongo { _expression = _expression->optimize(); } - void DocumentSourceRedact::sourceToBson(BSONObjBuilder* pBuilder, bool explain) const { - *pBuilder << redactName << _expression.get()->serialize(); + Value DocumentSourceRedact::serialize(bool explain) const { + return Value(DOC(getSourceName() << _expression.get()->serialize())); } intrusive_ptr<DocumentSource> DocumentSourceRedact::createFromBson( diff --git a/src/mongo/db/pipeline/document_source_skip.cpp b/src/mongo/db/pipeline/document_source_skip.cpp index 8758ff743db..3d85085f521 100644 --- a/src/mongo/db/pipeline/document_source_skip.cpp +++ b/src/mongo/db/pipeline/document_source_skip.cpp @@ -65,9 +65,8 @@ namespace mongo { return pSource->getNext(); } - void DocumentSourceSkip::sourceToBson( - BSONObjBuilder *pBuilder, bool explain) const { - pBuilder->append("$skip", _skip); + Value DocumentSourceSkip::serialize(bool explain) const { + return Value(DOC(getSourceName() << _skip)); } intrusive_ptr<DocumentSourceSkip> DocumentSourceSkip::create( diff --git a/src/mongo/db/pipeline/document_source_sort.cpp b/src/mongo/db/pipeline/document_source_sort.cpp index cc01796b600..fd6ddd81d73 100644 --- a/src/mongo/db/pipeline/document_source_sort.cpp +++ b/src/mongo/db/pipeline/document_source_sort.cpp @@ -43,31 +43,17 @@ namespace mongo { return _output->next().second; } - void DocumentSourceSort::addToBsonArray(BSONArrayBuilder *pBuilder, bool explain) const { - if (explain) { // always one obj for combined $sort + $limit - BSONObjBuilder sortObj (pBuilder->subobjStart()); - BSONObjBuilder insides (sortObj.subobjStart(sortName)); - BSONObjBuilder sortKey (insides.subobjStart("sortKey")); - sortKeyToBson(&sortKey, false); - sortKey.doneFast(); - - if (explain && limitSrc) { - insides.appendNumber("limit", limitSrc->getLimit()); - } - insides.doneFast(); - sortObj.doneFast(); + void DocumentSourceSort::serializeToArray(vector<Value>& array, bool explain) const { + if (explain) { // always one Value for combined $sort + $limit + array.push_back(Value( + DOC(getSourceName() << DOC("sortKey" << serializeSortKey() + << "limit" << (limitSrc ? Value(limitSrc->getLimit()) + : Value()))))); } - else { // one obj for $sort + maybe one obj for $limit - { - BSONObjBuilder sortObj (pBuilder->subobjStart()); - BSONObjBuilder insides (sortObj.subobjStart(sortName)); - sortKeyToBson(&insides, false); - insides.doneFast(); - sortObj.doneFast(); - } - + else { // one Value for $sort and maybe a Value for $limit + array.push_back(Value(DOC(getSourceName() << serializeSortKey()))); if (limitSrc) { - limitSrc->addToBsonArray(pBuilder, explain); + limitSrc->serializeToArray(array); } } } @@ -101,8 +87,9 @@ namespace mongo { vAscending.push_back(ascending); } - void DocumentSourceSort::sortKeyToBson(BSONObjBuilder* pBuilder, bool usePrefix) const { - /* add the key fields */ + Document DocumentSourceSort::serializeSortKey() const { + MutableDocument keyObj; + // add the key fields const size_t n = vSortKey.size(); for(size_t i = 0; i < n; ++i) { // get the field name out of each ExpressionFieldPath @@ -111,10 +98,12 @@ namespace mongo { verify(withVariable.getFieldName(0) == "ROOT"); const string fieldPath = withVariable.tail().getPath(false); - /* append a named integer based on the sort order */ - pBuilder->append(fieldPath, (vAscending[i] ? 1 : -1)); + // append a named integer based on the sort order + keyObj.setField(fieldPath, Value(vAscending[i] ? 1 : -1)); } + return keyObj.freeze(); } + DocumentSource::GetDepsReturn DocumentSourceSort::getDependencies(set<string>& deps) const { for(size_t i = 0; i < vSortKey.size(); ++i) { vSortKey[i]->addDependencies(deps); diff --git a/src/mongo/db/pipeline/document_source_unwind.cpp b/src/mongo/db/pipeline/document_source_unwind.cpp index eeb046aa0a4..a3ff87591d7 100644 --- a/src/mongo/db/pipeline/document_source_unwind.cpp +++ b/src/mongo/db/pipeline/document_source_unwind.cpp @@ -126,10 +126,9 @@ namespace mongo { return out; } - void DocumentSourceUnwind::sourceToBson( - BSONObjBuilder *pBuilder, bool explain) const { + Value DocumentSourceUnwind::serialize(bool explain) const { verify(_unwindPath); - pBuilder->append(unwindName, _unwindPath->getPath(true)); + return Value(DOC(getSourceName() << _unwindPath->getPath(true))); } DocumentSource::GetDepsReturn DocumentSourceUnwind::getDependencies(set<string>& deps) const { diff --git a/src/mongo/db/pipeline/pipeline.cpp b/src/mongo/db/pipeline/pipeline.cpp index 9d3dd259e13..d22f2be3633 100644 --- a/src/mongo/db/pipeline/pipeline.cpp +++ b/src/mongo/db/pipeline/pipeline.cpp @@ -401,33 +401,36 @@ namespace mongo { return true; } - void Pipeline::toBson(BSONObjBuilder *pBuilder) const { - /* create an array out of the pipeline operations */ - BSONArrayBuilder arrayBuilder; + Document Pipeline::serialize() const { + MutableDocument serialized; + // create an array out of the pipeline operations + vector<Value> array; for(SourceContainer::const_iterator iter(sources.begin()), listEnd(sources.end()); iter != listEnd; ++iter) { intrusive_ptr<DocumentSource> pSource(*iter); - pSource->addToBsonArray(&arrayBuilder); + pSource->serializeToArray(array); } - /* add the top-level items to the command */ - pBuilder->append(commandName, getCollectionName()); - pBuilder->append(pipelineName, arrayBuilder.arr()); + // add the top-level items to the command + serialized.setField(commandName, Value(getCollectionName())); + serialized.setField(pipelineName, Value(array)); if (explain) { - pBuilder->append(explainName, explain); + serialized.setField(explainName, Value(explain)); } if (pCtx->extSortAllowed) { - pBuilder->append("allowDiskUsage", true); + serialized.setField("allowDiskUsage", Value(true)); } bool btemp; if ((btemp = getSplitMongodPipeline())) { - pBuilder->append(splitMongodPipelineName, btemp); + serialized.setField(splitMongodPipelineName, Value(btemp)); } + + return serialized.freeze(); } void Pipeline::stitch() { @@ -482,7 +485,8 @@ namespace mongo { } } - void Pipeline::writeExplainOps(BSONArrayBuilder *pArrayBuilder) const { + vector<Value> Pipeline::writeExplainOps() const { + vector<Value> array; for(SourceContainer::const_iterator iter(sources.begin()), listEnd(sources.end()); iter != listEnd; @@ -493,17 +497,13 @@ namespace mongo { if (dynamic_cast<DocumentSourceBsonArray*>(pSource.get())) continue; - pSource->addToBsonArray(pArrayBuilder, true); + pSource->serializeToArray(array, true); } + return array; } void Pipeline::writeExplainShard(BSONObjBuilder &result) const { - BSONArrayBuilder opArray; // where we'll put the pipeline ops - - // next, add the pipeline operators - writeExplainOps(&opArray); - - result.appendArray(serverPipelineName, opArray.arr()); + result << serverPipelineName << Value(writeExplainOps()); } void Pipeline::writeExplainMongos(BSONObjBuilder &result) const { @@ -525,12 +525,9 @@ namespace mongo { opBuilder.doneFast(); } - BSONArrayBuilder mongosOpArray; // where we'll put the pipeline ops - writeExplainOps(&mongosOpArray); - // now we combine the shard pipelines with the one here result.append(serverPipelineName, shardOpArray.arr()); - result.append(mongosPipelineName, mongosOpArray.arr()); + result << mongosPipelineName << Value(writeExplainOps()); } void Pipeline::addInitialSource(intrusive_ptr<DocumentSource> source) { diff --git a/src/mongo/db/pipeline/pipeline.h b/src/mongo/db/pipeline/pipeline.h index b7333e59686..cb8bc6ae5f2 100644 --- a/src/mongo/db/pipeline/pipeline.h +++ b/src/mongo/db/pipeline/pipeline.h @@ -20,6 +20,7 @@ #include "mongo/pch.h" +#include "mongo/db/pipeline/value.h" #include "mongo/util/intrusive_counter.h" #include "mongo/util/timer.h" @@ -98,7 +99,7 @@ namespace mongo { @param the builder to write the command to */ - void toBson(BSONObjBuilder *pBuilder) const; + Document serialize() const; /** Stitch together the source pointers (by calling setSource) for each source in sources. * Must be called after optimize and addInitialSource but before trying to get results. @@ -167,13 +168,11 @@ namespace mongo { Pipeline(const intrusive_ptr<ExpressionContext> &pCtx); - /* - Write the pipeline's operators to the given array, with the - explain flag true (for DocumentSource::addToBsonArray()). - - @param pArrayBuilder where to write the ops to + /** + * Write the pipeline's operators to a vector<Value>, with the + * explain flag true (for DocumentSource::serializeToArray()). */ - void writeExplainOps(BSONArrayBuilder *pArrayBuilder) const; + vector<Value> writeExplainOps() const; /* Write the pipeline's operators to the given result document, diff --git a/src/mongo/db/pipeline/pipeline_d.cpp b/src/mongo/db/pipeline/pipeline_d.cpp index 8a8c2f88ba9..e6119fb01ec 100644 --- a/src/mongo/db/pipeline/pipeline_d.cpp +++ b/src/mongo/db/pipeline/pipeline_d.cpp @@ -119,24 +119,21 @@ namespace { index scan. */ intrusive_ptr<DocumentSourceSort> pSort; - BSONObjBuilder sortBuilder; + BSONObj sortObj; if (!sources.empty()) { const intrusive_ptr<DocumentSource> &pSC = sources.front(); pSort = dynamic_cast<DocumentSourceSort *>(pSC.get()); if (pSort) { - /* build the sort key */ - pSort->sortKeyToBson(&sortBuilder, false); + // build the sort key + sortObj = pSort->serializeSortKey().toBson(); } } - /* Create the sort object; see comments on the query object above */ - BSONObj sortObj = sortBuilder.obj(); - - /* get the full "namespace" name */ + // get the full "namespace" name string fullName(dbName + "." + pPipeline->getCollectionName()); - /* for debugging purposes, show what the query and sort are */ + // for debugging purposes, show what the query and sort are DEV { (log() << "\n---- query BSON\n" << queryObj.jsonString(Strict, 1) << "\n----\n"); diff --git a/src/mongo/dbtests/documentsourcetests.cpp b/src/mongo/dbtests/documentsourcetests.cpp index 5f31e859427..fb556068deb 100644 --- a/src/mongo/dbtests/documentsourcetests.cpp +++ b/src/mongo/dbtests/documentsourcetests.cpp @@ -31,9 +31,10 @@ namespace DocumentSourceTests { static DBDirectClient client; BSONObj toBson( const intrusive_ptr<DocumentSource>& source ) { - BSONArrayBuilder bab; - source->addToBsonArray( &bab ); - return bab.arr()[ 0 ].Obj().getOwned(); + vector<Value> arr; + source->serializeToArray(arr); + ASSERT_EQUALS(arr.size(), 1UL); + return arr[0].getDocument().toBson(); } class CollectionBase { @@ -428,7 +429,7 @@ namespace DocumentSourceTests { void assertRoundTrips( const intrusive_ptr<DocumentSource>& group ) { // We don't check against the spec that generated 'group' originally, because // $const operators may be introduced in the first serialization. - BSONObj spec = toBson( group ); + BSONObj spec = toBson(group); BSONElement specElement = spec.firstElement(); intrusive_ptr<DocumentSource> generated = DocumentSourceGroup::createFromBson( &specElement, ctx() ); @@ -905,9 +906,9 @@ namespace DocumentSourceTests { * created with. */ void checkBsonRepresentation( const BSONObj& spec ) { - BSONArrayBuilder bab; - _project->addToBsonArray( &bab, false ); - BSONObj generatedSpec = bab.arr()[ 0 ].Obj().getOwned(); + vector<Value> arr; + _project->serializeToArray(arr); + BSONObj generatedSpec = arr[0].getDocument().toBson(); ASSERT_EQUALS( spec, generatedSpec ); } private: @@ -1052,9 +1053,9 @@ namespace DocumentSourceTests { * created with. */ void checkBsonRepresentation( const BSONObj& spec ) { - BSONArrayBuilder bab; - _sort->addToBsonArray( &bab, false ); - BSONObj generatedSpec = bab.arr()[ 0 ].Obj().getOwned(); + vector<Value> arr; + _sort->serializeToArray(arr); + BSONObj generatedSpec = arr[0].getDocument().toBson(); ASSERT_EQUALS( spec, generatedSpec ); } intrusive_ptr<DocumentSource> _sort; @@ -1067,9 +1068,9 @@ namespace DocumentSourceTests { ASSERT_EQUALS(sort()->getLimit(), -1); { // pre-limit checks - BSONArrayBuilder arr; - sort()->addToBsonArray(&arr, false); - ASSERT_EQUALS(arr.arr(), BSON_ARRAY(BSON("$sort" << BSON("a" << 1)))); + vector<Value> arr; + sort()->serializeToArray(arr); + ASSERT_EQUALS(arr[0].getDocument().toBson(), BSON("$sort" << BSON("a" << 1))); ASSERT(sort()->getShardSource() == NULL); ASSERT(sort()->getRouterSource() != NULL); @@ -1082,10 +1083,10 @@ namespace DocumentSourceTests { ASSERT_TRUE(sort()->coalesce(mkLimit(5))); ASSERT_EQUALS(sort()->getLimit(), 5); // reduced - BSONArrayBuilder arr; - sort()->addToBsonArray(&arr, false); - ASSERT_EQUALS(arr.arr(), BSON_ARRAY(BSON("$sort" << BSON("a" << 1)) - << BSON("$limit" << sort()->getLimit()))); + vector<Value> arr; + sort()->serializeToArray(arr); + ASSERT_EQUALS(Value(arr), DOC_ARRAY(DOC("$sort" << DOC("a" << 1)) + << DOC("$limit" << sort()->getLimit()))); ASSERT(sort()->getShardSource() != NULL); ASSERT(sort()->getRouterSource() != NULL); @@ -1384,9 +1385,9 @@ namespace DocumentSourceTests { * created with. */ void checkBsonRepresentation( const BSONObj& spec ) { - BSONArrayBuilder bab; - _unwind->addToBsonArray( &bab, false ); - BSONObj generatedSpec = bab.arr()[ 0 ].Obj().getOwned(); + vector<Value> arr; + _unwind->serializeToArray(arr); + BSONObj generatedSpec = Value(arr[0]).getDocument().toBson(); ASSERT_EQUALS( spec, generatedSpec ); } intrusive_ptr<DocumentSource> _unwind; diff --git a/src/mongo/s/commands_public.cpp b/src/mongo/s/commands_public.cpp index dabe0df0d12..d0e908a5ab5 100644 --- a/src/mongo/s/commands_public.cpp +++ b/src/mongo/s/commands_public.cpp @@ -1860,21 +1860,19 @@ namespace mongo { return aggPassthrough(conf, cmdObj, result); /* split the pipeline into pieces for mongods and this mongos */ - intrusive_ptr<Pipeline> pShardPipeline( - pPipeline->splitForSharded()); + intrusive_ptr<Pipeline> pShardPipeline(pPipeline->splitForSharded()); - /* create the command for the shards */ - BSONObjBuilder commandBuilder; - pShardPipeline->toBson(&commandBuilder); - commandBuilder.append("fromRouter", true); // this means produce output to be merged + // create the command for the shards + MutableDocument commandBuilder(pShardPipeline->serialize()); + commandBuilder.setField("fromRouter", Value(true)); // this means produce output to be merged if (cmdObj.hasField("$queryOptions")) { - commandBuilder.append(cmdObj["$queryOptions"]); + commandBuilder.setField("$queryOptions", Value(cmdObj["$queryOptions"])); } - commandBuilder.append("cursor", BSON("batchSize" << 0)); - BSONObj shardedCommand(commandBuilder.done()); + commandBuilder.setField("cursor", Value(DOC("batchSize" << 0))); + BSONObj shardedCommand = commandBuilder.freeze().toBson(); BSONObjBuilder shardQueryBuilder; pShardPipeline->getInitialQuery(&shardQueryBuilder); BSONObj shardQuery(shardQueryBuilder.done()); @@ -1894,13 +1892,12 @@ namespace mongo { DocumentSourceMergeCursors::CursorIds cursorIds = parseCursors(shardResults, fullns); pPipeline->addInitialSource(DocumentSourceMergeCursors::create(cursorIds, pExpCtx)); - BSONObjBuilder mergeCmd; - pPipeline->toBson(&mergeCmd); + MutableDocument mergeCmd(pPipeline->serialize()); if (cmdObj.hasField("cursor")) - mergeCmd.append(cmdObj["cursor"]); + mergeCmd["cursor"] = Value(cmdObj["cursor"]); if (cmdObj.hasField("$queryOptions")) - mergeCmd.append(cmdObj["$queryOptions"]); + mergeCmd["$queryOptions"] = Value(cmdObj["$queryOptions"]); string outputNsOrEmpty; if (DocumentSourceOut* out = dynamic_cast<DocumentSourceOut*>(pPipeline->output())) { @@ -1911,7 +1908,7 @@ namespace mongo { // that the merging mongod is sent the config servers on connection init. const string mergeServer = conf->getPrimary().getConnString(); ShardConnection conn(mergeServer, outputNsOrEmpty); - BSONObj mergedResults = aggRunCommand(conn.get(), dbName, mergeCmd.obj()); + BSONObj mergedResults = aggRunCommand(conn.get(), dbName, mergeCmd.freeze().toBson()); bool ok = mergedResults["ok"].trueValue(); conn.done(); @@ -1950,14 +1947,13 @@ namespace mongo { BSONObjBuilder& result) { uassertCanMergeInMongos(mergePipeline, cmdObj); - BSONObjBuilder commandBuilder; - shardPipeline->toBson(&commandBuilder); - commandBuilder.append("fromRouter", true); + MutableDocument commandBuilder(shardPipeline->serialize()); + commandBuilder["fromRouter"] = Value(true); if (cmdObj.hasField("$queryOptions")) { - commandBuilder.append(cmdObj["$queryOptions"]); + commandBuilder["$queryOptions"] = Value(cmdObj["$queryOptions"]); } - BSONObj shardedCommand(commandBuilder.done()); + BSONObj shardedCommand = commandBuilder.freeze().toBson(); BSONObjBuilder shardQueryBuilder; shardPipeline->getInitialQuery(&shardQueryBuilder); |