summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorSvilen Mihaylov <svilen.mihaylov@mongodb.com>2020-02-18 17:27:16 -0500
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2020-02-20 00:27:50 +0000
commit03792669b22c6d0e2785a823e5081a6125c2d37c (patch)
treeebe9584edf68e4d9d4d40b9cb8caca19efff04f1 /src
parentbeaac1def11ef1a70ed940567aa8f444a1b95d3e (diff)
downloadmongo-03792669b22c6d0e2785a823e5081a6125c2d37c.tar.gz
SERVER-45216 Rename Document::size() to Document::computeSize
Diffstat (limited to 'src')
-rw-r--r--src/mongo/db/exec/document_value/document.cpp4
-rw-r--r--src/mongo/db/exec/document_value/document.h6
-rw-r--r--src/mongo/db/exec/document_value/document_internal.h3
-rw-r--r--src/mongo/db/exec/document_value/document_value_test.cpp26
-rw-r--r--src/mongo/db/exec/exclusion_projection_executor_test.cpp6
-rw-r--r--src/mongo/db/pipeline/accumulator_js_reduce.cpp2
-rw-r--r--src/mongo/db/pipeline/document_source_bucket_auto_test.cpp2
-rw-r--r--src/mongo/db/pipeline/document_source_change_stream_transform.cpp2
-rw-r--r--src/mongo/db/pipeline/document_source_check_resume_token.cpp3
-rw-r--r--src/mongo/db/pipeline/document_source_facet_test.cpp8
-rw-r--r--src/mongo/db/pipeline/document_source_graph_lookup_test.cpp6
-rw-r--r--src/mongo/db/pipeline/document_source_lookup_test.cpp2
-rw-r--r--src/mongo/db/pipeline/expression.cpp4
13 files changed, 38 insertions, 36 deletions
diff --git a/src/mongo/db/exec/document_value/document.cpp b/src/mongo/db/exec/document_value/document.cpp
index d3b170368bc..2e384442fcb 100644
--- a/src/mongo/db/exec/document_value/document.cpp
+++ b/src/mongo/db/exec/document_value/document.cpp
@@ -687,8 +687,8 @@ string Document::toString() const {
}
void Document::serializeForSorter(BufBuilder& buf) const {
- const int numElems = size();
- buf.appendNum(numElems);
+ const size_t numElems = computeSize();
+ buf.appendNum(static_cast<int>(numElems));
for (DocumentStorageIterator it = storage().iterator(); !it.atEnd(); it.advance()) {
buf.appendStr(it->nameSD(), /*NUL byte*/ true);
diff --git a/src/mongo/db/exec/document_value/document.h b/src/mongo/db/exec/document_value/document.h
index a4be77a09e8..cb3b7421f05 100644
--- a/src/mongo/db/exec/document_value/document.h
+++ b/src/mongo/db/exec/document_value/document.h
@@ -172,9 +172,9 @@ public:
const Value getNestedField(const FieldPath& path,
std::vector<Position>* positions = nullptr) const;
- /// Number of fields in this document. O(n)
- size_t size() const {
- return storage().size();
+ // Number of fields in this document. Exp. runtime O(n).
+ size_t computeSize() const {
+ return storage().computeSize();
}
/// True if this document has no fields.
diff --git a/src/mongo/db/exec/document_value/document_internal.h b/src/mongo/db/exec/document_value/document_internal.h
index fd1f7915caa..5cd5d4692c4 100644
--- a/src/mongo/db/exec/document_value/document_internal.h
+++ b/src/mongo/db/exec/document_value/document_internal.h
@@ -302,7 +302,8 @@ public:
return kEmptyDoc;
}
- size_t size() const {
+ // The function adds up all iterator counts. Exp. runtime is O(N).
+ size_t computeSize() const {
// can't use _numFields because it includes removed Fields
size_t count = 0;
for (DocumentStorageIterator it = iterator(); !it.atEnd(); it.advance())
diff --git a/src/mongo/db/exec/document_value/document_value_test.cpp b/src/mongo/db/exec/document_value/document_value_test.cpp
index 2b7797affe6..d6039cba5a4 100644
--- a/src/mongo/db/exec/document_value/document_value_test.cpp
+++ b/src/mongo/db/exec/document_value/document_value_test.cpp
@@ -81,20 +81,20 @@ void assertRoundTrips(const Document& document1) {
TEST(DocumentConstruction, Default) {
Document document;
- ASSERT_EQUALS(0U, document.size());
+ ASSERT_EQUALS(0ULL, document.computeSize());
assertRoundTrips(document);
}
TEST(DocumentConstruction, FromEmptyBson) {
Document document = fromBson(BSONObj());
- ASSERT_EQUALS(0U, document.size());
+ ASSERT_EQUALS(0ULL, document.computeSize());
assertRoundTrips(document);
}
TEST(DocumentConstruction, FromNonEmptyBson) {
Document document = fromBson(BSON("a" << 1 << "b"
<< "q"));
- ASSERT_EQUALS(2U, document.size());
+ ASSERT_EQUALS(2ULL, document.computeSize());
ASSERT_EQUALS("a", getNthField(document, 0).first.toString());
ASSERT_EQUALS(1, getNthField(document, 0).second.getInt());
ASSERT_EQUALS("b", getNthField(document, 1).first.toString());
@@ -103,7 +103,7 @@ TEST(DocumentConstruction, FromNonEmptyBson) {
TEST(DocumentConstruction, FromInitializerList) {
auto document = Document{{"a", 1}, {"b", "q"_sd}};
- ASSERT_EQUALS(2U, document.size());
+ ASSERT_EQUALS(2ULL, document.computeSize());
ASSERT_EQUALS("a", getNthField(document, 0).first.toString());
ASSERT_EQUALS(1, getNthField(document, 0).second.getInt());
ASSERT_EQUALS("b", getNthField(document, 1).first.toString());
@@ -112,7 +112,7 @@ TEST(DocumentConstruction, FromInitializerList) {
TEST(DocumentConstruction, FromEmptyDocumentClone) {
Document document;
- ASSERT_EQUALS(0U, document.size());
+ ASSERT_EQUALS(0ULL, document.computeSize());
// Prior to SERVER-26462, cloning an empty document would cause a segmentation fault.
Document documentClone = document.clone();
ASSERT_DOCUMENT_EQ(document, documentClone);
@@ -182,16 +182,16 @@ public:
void run() {
MutableDocument md;
md.addField("foo", Value(1));
- ASSERT_EQUALS(1U, md.peek().size());
+ ASSERT_EQUALS(1ULL, md.peek().computeSize());
ASSERT_EQUALS(1, md.peek()["foo"].getInt());
md.addField("bar", Value(99));
- ASSERT_EQUALS(2U, md.peek().size());
+ ASSERT_EQUALS(2ULL, md.peek().computeSize());
ASSERT_EQUALS(99, md.peek()["bar"].getInt());
// No assertion is triggered by a duplicate field name.
md.addField("a", Value(5));
Document final = md.freeze();
- ASSERT_EQUALS(3U, final.size());
+ ASSERT_EQUALS(3ULL, final.computeSize());
assertRoundTrips(final);
}
};
@@ -227,13 +227,13 @@ public:
// Set the first field.
md.setField("a", Value("foo"_sd));
- ASSERT_EQUALS(3U, md.peek().size());
+ ASSERT_EQUALS(3ULL, md.peek().computeSize());
ASSERT_EQUALS("foo", md.peek()["a"].getString());
ASSERT_EQUALS("foo", getNthField(md.peek(), 0).second.getString());
assertRoundTrips(md.peek());
// Set the second field.
md["b"] = Value("bar"_sd);
- ASSERT_EQUALS(3U, md.peek().size());
+ ASSERT_EQUALS(3ULL, md.peek().computeSize());
ASSERT_EQUALS("bar", md.peek()["b"].getString());
ASSERT_EQUALS("bar", getNthField(md.peek(), 1).second.getString());
assertRoundTrips(md.peek());
@@ -241,7 +241,7 @@ public:
// Remove the second field.
md.setField("b", Value());
LOGV2(20585, "{md_peek}", "md_peek"_attr = md.peek().toString());
- ASSERT_EQUALS(2U, md.peek().size());
+ ASSERT_EQUALS(2ULL, md.peek().computeSize());
ASSERT(md.peek()["b"].missing());
ASSERT_EQUALS("a", getNthField(md.peek(), 0).first.toString());
ASSERT_EQUALS("c", getNthField(md.peek(), 1).first.toString());
@@ -250,7 +250,7 @@ public:
// Remove the first field.
md["a"] = Value();
- ASSERT_EQUALS(1U, md.peek().size());
+ ASSERT_EQUALS(1ULL, md.peek().computeSize());
ASSERT(md.peek()["a"].missing());
ASSERT_EQUALS("c", getNthField(md.peek(), 0).first.toString());
ASSERT_EQUALS(99, md.peek()["c"].getInt());
@@ -259,7 +259,7 @@ public:
// Remove the final field. Verify document is empty.
md.remove("c");
ASSERT(md.peek().empty());
- ASSERT_EQUALS(0U, md.peek().size());
+ ASSERT_EQUALS(0ULL, md.peek().computeSize());
ASSERT_DOCUMENT_EQ(md.peek(), Document());
ASSERT(!FieldIterator(md.peek()).more());
ASSERT(md.peek()["c"].missing());
diff --git a/src/mongo/db/exec/exclusion_projection_executor_test.cpp b/src/mongo/db/exec/exclusion_projection_executor_test.cpp
index 881ec1e17eb..cdf7a305bf8 100644
--- a/src/mongo/db/exec/exclusion_projection_executor_test.cpp
+++ b/src/mongo/db/exec/exclusion_projection_executor_test.cpp
@@ -92,17 +92,17 @@ TEST(ExclusionProjectionExecutionTest, ShouldSerializeToEquivalentProjection) {
// Converts numbers to bools, converts dotted paths to nested documents. Note order of excluded
// fields is subject to change.
auto serialization = exclusion->serializeTransformation(boost::none);
- ASSERT_EQ(serialization.size(), 4UL);
+ ASSERT_EQ(serialization.computeSize(), 4ULL);
ASSERT_VALUE_EQ(serialization["a"], Value(false));
ASSERT_VALUE_EQ(serialization["_id"], Value(false));
ASSERT_EQ(serialization["b"].getType(), BSONType::Object);
- ASSERT_EQ(serialization["b"].getDocument().size(), 2UL);
+ ASSERT_EQ(serialization["b"].getDocument().computeSize(), 2ULL);
ASSERT_VALUE_EQ(serialization["b"].getDocument()["c"], Value(false));
ASSERT_VALUE_EQ(serialization["b"].getDocument()["d"], Value(false));
ASSERT_EQ(serialization["x"].getType(), BSONType::Object);
- ASSERT_EQ(serialization["x"].getDocument().size(), 1UL);
+ ASSERT_EQ(serialization["x"].getDocument().computeSize(), 1ULL);
ASSERT_VALUE_EQ(serialization["x"].getDocument()["y"], Value(false));
}
diff --git a/src/mongo/db/pipeline/accumulator_js_reduce.cpp b/src/mongo/db/pipeline/accumulator_js_reduce.cpp
index b3fb1df3bb0..877ac1ca41e 100644
--- a/src/mongo/db/pipeline/accumulator_js_reduce.cpp
+++ b/src/mongo/db/pipeline/accumulator_js_reduce.cpp
@@ -102,7 +102,7 @@ void AccumulatorInternalJsReduce::processInternal(const Value& input, bool mergi
str::stream() << kAccumulatorName
<< " requires the 'data' argument to have a 'k' and 'v' field. Instead found"
<< data.toString(),
- data.size() == 2ull && !data["k"].missing() && !data["v"].missing());
+ data.computeSize() == 2ull && !data["k"].missing() && !data["v"].missing());
_key = data["k"];
diff --git a/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp b/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp
index abe9633b5aa..b7b50b6527d 100644
--- a/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp
+++ b/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp
@@ -515,7 +515,7 @@ TEST_F(BucketAutoTests, ShouldBeAbleToReParseSerializedStage) {
ASSERT_EQUALS(serialization.size(), 1UL);
ASSERT_EQUALS(serialization[0].getType(), BSONType::Object);
- ASSERT_EQUALS(serialization[0].getDocument().size(), 1UL);
+ ASSERT_EQUALS(serialization[0].getDocument().computeSize(), 1ULL);
ASSERT_EQUALS(serialization[0].getDocument()["$bucketAuto"].getType(), BSONType::Object);
auto serializedBson = serialization[0].getDocument().toBson();
diff --git a/src/mongo/db/pipeline/document_source_change_stream_transform.cpp b/src/mongo/db/pipeline/document_source_change_stream_transform.cpp
index 5c875c06741..bb8447b1137 100644
--- a/src/mongo/db/pipeline/document_source_change_stream_transform.cpp
+++ b/src/mongo/db/pipeline/document_source_change_stream_transform.cpp
@@ -116,7 +116,7 @@ DocumentSourceChangeStreamTransform::DocumentSourceChangeStreamTransform(
// If the document key from the resume token has more than one field, that means it
// includes the shard key and thus should never change.
- auto isFinal = docKey.size() > 1;
+ const bool isFinal = docKeyFields.size() > 1;
_documentKeyCache[tokenData.uuid.get()] =
DocumentKeyCacheEntry({docKeyFields, isFinal});
diff --git a/src/mongo/db/pipeline/document_source_check_resume_token.cpp b/src/mongo/db/pipeline/document_source_check_resume_token.cpp
index 3305b2b108b..1aa71c0c454 100644
--- a/src/mongo/db/pipeline/document_source_check_resume_token.cpp
+++ b/src/mongo/db/pipeline/document_source_check_resume_token.cpp
@@ -154,7 +154,8 @@ ResumeStatus compareAgainstClientResumeToken(const intrusive_ptr<ExpressionConte
// In order for the relaxed comparison to be applicable, the client token must have a single _id
// field, and the resumed stream token must have additional fields beyond _id.
- if (!(documentKeyFromClient.size() == 1 && documentKeyFromResumedStream.size() > 1)) {
+ if (!(documentKeyFromClient.computeSize() == 1 &&
+ documentKeyFromResumedStream.computeSize() > 1)) {
return defaultResumeStatus;
}
diff --git a/src/mongo/db/pipeline/document_source_facet_test.cpp b/src/mongo/db/pipeline/document_source_facet_test.cpp
index da7718fa5c6..6cb58d29279 100644
--- a/src/mongo/db/pipeline/document_source_facet_test.cpp
+++ b/src/mongo/db/pipeline/document_source_facet_test.cpp
@@ -362,7 +362,7 @@ TEST_F(DocumentSourceFacetTest, MultipleFacetsShouldSeeTheSameDocuments) {
expectedOutputs.emplace_back(input.releaseDocument());
}
ASSERT(output.isAdvanced());
- ASSERT_EQ(output.getDocument().size(), 2UL);
+ ASSERT_EQ(output.getDocument().computeSize(), 2ULL);
ASSERT_VALUE_EQ(output.getDocument()["first"], Value(expectedOutputs));
ASSERT_VALUE_EQ(output.getDocument()["second"], Value(expectedOutputs));
@@ -401,7 +401,7 @@ TEST_F(DocumentSourceFacetTest,
// The output fields are in no guaranteed order.
ASSERT(output.isAdvanced());
- ASSERT_EQ(output.getDocument().size(), 2UL);
+ ASSERT_EQ(output.getDocument().computeSize(), 2ULL);
ASSERT_VALUE_EQ(output.getDocument()["all"], Value(expectedPassthroughOutput));
ASSERT_VALUE_EQ(output.getDocument()["first"],
Value(vector<Value>{Value(expectedPassthroughOutput.front())}));
@@ -504,12 +504,12 @@ TEST_F(DocumentSourceFacetTest, ShouldBeAbleToReParseSerializedStage) {
ASSERT_EQ(serialization[0].getType(), BSONType::Object);
// The fields are in no guaranteed order, so we can't make a simple Document comparison.
- ASSERT_EQ(serialization[0].getDocument().size(), 1UL);
+ ASSERT_EQ(serialization[0].getDocument().computeSize(), 1ULL);
ASSERT_EQ(serialization[0].getDocument()["$facet"].getType(), BSONType::Object);
// Should have two fields: "skippedOne" and "skippedTwo".
auto serializedStage = serialization[0].getDocument()["$facet"].getDocument();
- ASSERT_EQ(serializedStage.size(), 2UL);
+ ASSERT_EQ(serializedStage.computeSize(), 2ULL);
ASSERT_VALUE_EQ(serializedStage["skippedOne"],
Value(vector<Value>{Value(Document{{"$skip", 1}})}));
ASSERT_VALUE_EQ(serializedStage["skippedTwo"],
diff --git a/src/mongo/db/pipeline/document_source_graph_lookup_test.cpp b/src/mongo/db/pipeline/document_source_graph_lookup_test.cpp
index 084b71ac99b..b99283a4831 100644
--- a/src/mongo/db/pipeline/document_source_graph_lookup_test.cpp
+++ b/src/mongo/db/pipeline/document_source_graph_lookup_test.cpp
@@ -207,7 +207,7 @@ TEST_F(DocumentSourceGraphLookUpTest,
auto next = graphLookupStage->getNext();
ASSERT_TRUE(next.isAdvanced());
- ASSERT_EQ(2U, next.getDocument().size());
+ ASSERT_EQ(2ULL, next.getDocument().computeSize());
ASSERT_VALUE_EQ(Value(0), next.getDocument().getField("_id"));
auto resultsValue = next.getDocument().getField("results");
@@ -525,7 +525,7 @@ TEST_F(DocumentSourceGraphLookUpTest, ShouldExpandArraysAtEndOfConnectFromField)
auto next = graphLookupStage->getNext();
ASSERT_TRUE(next.isAdvanced());
- ASSERT_EQ(3U, next.getDocument().size());
+ ASSERT_EQ(3ULL, next.getDocument().computeSize());
ASSERT_VALUE_EQ(Value(0), next.getDocument().getField("_id"));
auto resultsValue = next.getDocument().getField("results");
@@ -598,7 +598,7 @@ TEST_F(DocumentSourceGraphLookUpTest, ShouldNotExpandArraysWithinArraysAtEndOfCo
auto next = graphLookupStage->getNext();
ASSERT_TRUE(next.isAdvanced());
- ASSERT_EQ(3U, next.getDocument().size());
+ ASSERT_EQ(3ULL, next.getDocument().computeSize());
ASSERT_VALUE_EQ(Value(0), next.getDocument().getField("_id"));
auto resultsValue = next.getDocument().getField("results");
diff --git a/src/mongo/db/pipeline/document_source_lookup_test.cpp b/src/mongo/db/pipeline/document_source_lookup_test.cpp
index e6c4e9e93f5..abe14a3fcb6 100644
--- a/src/mongo/db/pipeline/document_source_lookup_test.cpp
+++ b/src/mongo/db/pipeline/document_source_lookup_test.cpp
@@ -402,7 +402,7 @@ TEST_F(DocumentSourceLookUpTest, ShouldBeAbleToReParseSerializedStage) {
ASSERT_EQ(serializedDoc["$lookup"].getType(), BSONType::Object);
auto serializedStage = serializedDoc["$lookup"].getDocument();
- ASSERT_EQ(serializedStage.size(), 4UL);
+ ASSERT_EQ(serializedStage.computeSize(), 4ULL);
ASSERT_VALUE_EQ(serializedStage["from"], Value(std::string("coll")));
ASSERT_VALUE_EQ(serializedStage["as"], Value(std::string("as")));
diff --git a/src/mongo/db/pipeline/expression.cpp b/src/mongo/db/pipeline/expression.cpp
index c33900f7549..60ec5b0d711 100644
--- a/src/mongo/db/pipeline/expression.cpp
+++ b/src/mongo/db/pipeline/expression.cpp
@@ -686,8 +686,8 @@ Value ExpressionArrayToObject::evaluate(const Document& root, Variables* variabl
uassert(40392,
str::stream() << "$arrayToObject requires an object keys of 'k' and 'v'. "
"Found incorrect number of keys:"
- << elem.getDocument().size(),
- (elem.getDocument().size() == 2));
+ << elem.getDocument().computeSize(),
+ (elem.getDocument().computeSize() == 2));
Value key = elem.getDocument().getField("k");
Value value = elem.getDocument().getField("v");