summaryrefslogtreecommitdiff
path: root/src/mongo/db/query
diff options
context:
space:
mode:
authorclang-format-7.0.1 <adam.martin@10gen.com>2019-07-26 18:20:35 -0400
committerADAM David Alan Martin <adam.martin@10gen.com>2019-07-27 11:02:23 -0400
commit134a4083953270e8a11430395357fb70a29047ad (patch)
treedd428e1230e31d92b20b393dfdc17ffe7fa79cb6 /src/mongo/db/query
parent1e46b5049003f427047e723ea5fab15b5a9253ca (diff)
downloadmongo-134a4083953270e8a11430395357fb70a29047ad.tar.gz
SERVER-41772 Apply clang-format 7.0.1 to the codebase
Diffstat (limited to 'src/mongo/db/query')
-rw-r--r--src/mongo/db/query/canonical_query_encoder.cpp20
-rw-r--r--src/mongo/db/query/canonical_query_encoder.h4
-rw-r--r--src/mongo/db/query/collation/collation_index_key.cpp6
-rw-r--r--src/mongo/db/query/collation/collation_index_key_test.cpp3
-rw-r--r--src/mongo/db/query/collation/collation_spec_test.cpp144
-rw-r--r--src/mongo/db/query/collation/collator_factory_icu.cpp155
-rw-r--r--src/mongo/db/query/collation/collator_factory_icu_decoration.cpp4
-rw-r--r--src/mongo/db/query/collation/collator_factory_icu_test.cpp176
-rw-r--r--src/mongo/db/query/collation/collator_interface_mock_test.cpp10
-rw-r--r--src/mongo/db/query/count_command_test.cpp79
-rw-r--r--src/mongo/db/query/cursor_response.cpp18
-rw-r--r--src/mongo/db/query/cursor_response_test.cpp150
-rw-r--r--src/mongo/db/query/datetime/date_time_support.cpp10
-rw-r--r--src/mongo/db/query/datetime/date_time_support.h3
-rw-r--r--src/mongo/db/query/datetime/init_timezone_data.cpp3
-rw-r--r--src/mongo/db/query/explain.h2
-rw-r--r--src/mongo/db/query/explain_options.cpp11
-rw-r--r--src/mongo/db/query/find.cpp3
-rw-r--r--src/mongo/db/query/find_and_modify_request.cpp18
-rw-r--r--src/mongo/db/query/find_and_modify_request.h14
-rw-r--r--src/mongo/db/query/get_executor.cpp14
-rw-r--r--src/mongo/db/query/get_executor_test.cpp15
-rw-r--r--src/mongo/db/query/getmore_request.cpp11
-rw-r--r--src/mongo/db/query/getmore_request_test.cpp45
-rw-r--r--src/mongo/db/query/killcursors_request.cpp4
-rw-r--r--src/mongo/db/query/killcursors_request_test.cpp21
-rw-r--r--src/mongo/db/query/killcursors_response.cpp4
-rw-r--r--src/mongo/db/query/killcursors_response_test.cpp42
-rw-r--r--src/mongo/db/query/parsed_distinct.cpp18
-rw-r--r--src/mongo/db/query/parsed_distinct_test.cpp50
-rw-r--r--src/mongo/db/query/parsed_projection.cpp8
-rw-r--r--src/mongo/db/query/parsed_projection_test.cpp5
-rw-r--r--src/mongo/db/query/plan_cache_indexability.cpp2
-rw-r--r--src/mongo/db/query/plan_cache_indexability_test.cpp4
-rw-r--r--src/mongo/db/query/plan_cache_test.cpp3
-rw-r--r--src/mongo/db/query/plan_enumerator.cpp14
-rw-r--r--src/mongo/db/query/planner_analysis.cpp2
-rw-r--r--src/mongo/db/query/planner_ixselect.cpp15
-rw-r--r--src/mongo/db/query/planner_ixselect_test.cpp10
-rw-r--r--src/mongo/db/query/query_planner.cpp15
-rw-r--r--src/mongo/db/query/query_planner_geo_test.cpp80
-rw-r--r--src/mongo/db/query/query_planner_test.cpp17
-rw-r--r--src/mongo/db/query/query_planner_test_fixture.cpp4
-rw-r--r--src/mongo/db/query/query_planner_text_test.cpp113
-rw-r--r--src/mongo/db/query/query_planner_wildcard_index_test.cpp3
-rw-r--r--src/mongo/db/query/query_request.cpp20
-rw-r--r--src/mongo/db/query/query_request_test.cpp2
-rw-r--r--src/mongo/db/query/query_settings_test.cpp4
-rw-r--r--src/mongo/db/query/query_solution.cpp2
-rw-r--r--src/mongo/db/query/query_solution_test.cpp3
-rw-r--r--src/mongo/db/query/stage_builder.cpp7
51 files changed, 487 insertions, 903 deletions
diff --git a/src/mongo/db/query/canonical_query_encoder.cpp b/src/mongo/db/query/canonical_query_encoder.cpp
index 91982bff80a..6698e56766c 100644
--- a/src/mongo/db/query/canonical_query_encoder.cpp
+++ b/src/mongo/db/query/canonical_query_encoder.cpp
@@ -427,10 +427,10 @@ void encodeKeyForMatch(const MatchExpression* tree, StringBuilder* keyBuilder) {
}
/**
-* Encodes sort order into cache key.
-* Sort order is normalized because it provided by
-* QueryRequest.
-*/
+ * Encodes sort order into cache key.
+ * Sort order is normalized because it provided by
+ * QueryRequest.
+ */
void encodeKeyForSort(const BSONObj& sortObj, StringBuilder* keyBuilder) {
if (sortObj.isEmpty()) {
return;
@@ -463,12 +463,12 @@ void encodeKeyForSort(const BSONObj& sortObj, StringBuilder* keyBuilder) {
}
/**
-* Encodes parsed projection into cache key.
-* Does a simple toString() on each projected field
-* in the BSON object.
-* Orders the encoded elements in the projection by field name.
-* This handles all the special projection types ($meta, $elemMatch, etc.)
-*/
+ * Encodes parsed projection into cache key.
+ * Does a simple toString() on each projected field
+ * in the BSON object.
+ * Orders the encoded elements in the projection by field name.
+ * This handles all the special projection types ($meta, $elemMatch, etc.)
+ */
void encodeKeyForProj(const BSONObj& projObj, StringBuilder* keyBuilder) {
// Sorts the BSON elements by field name using a map.
std::map<StringData, BSONElement> elements;
diff --git a/src/mongo/db/query/canonical_query_encoder.h b/src/mongo/db/query/canonical_query_encoder.h
index d0019ba08c9..73c0eff5fa7 100644
--- a/src/mongo/db/query/canonical_query_encoder.h
+++ b/src/mongo/db/query/canonical_query_encoder.h
@@ -45,5 +45,5 @@ CanonicalQuery::QueryShapeString encode(const CanonicalQuery& cq);
* Returns a hash of the given key (produced from either a QueryShapeString or a PlanCacheKey).
*/
uint32_t computeHash(StringData key);
-}
-}
+} // namespace canonical_query_encoder
+} // namespace mongo
diff --git a/src/mongo/db/query/collation/collation_index_key.cpp b/src/mongo/db/query/collation/collation_index_key.cpp
index 48d971d3f7e..44b647044ca 100644
--- a/src/mongo/db/query/collation/collation_index_key.cpp
+++ b/src/mongo/db/query/collation/collation_index_key.cpp
@@ -114,9 +114,7 @@ void translateElement(StringData fieldName,
uasserted(ErrorCodes::CannotBuildIndexKeys,
str::stream()
<< "Cannot index type Symbol with a collation. Failed to index element: "
- << element
- << ". Index collation: "
- << collator->getSpec().toBSON());
+ << element << ". Index collation: " << collator->getSpec().toBSON());
}
default:
out->appendAs(element, fieldName);
@@ -144,7 +142,7 @@ void translate(BSONObj obj, const CollatorInterface* collator, BufBuilder* out)
element.fieldNameStringData(), element, collator, &ctx.getBuilder(), &ctxStack);
}
}
-}
+} // namespace
void CollationIndexKey::collationAwareIndexKeyAppend(BSONElement elt,
const CollatorInterface* collator,
diff --git a/src/mongo/db/query/collation/collation_index_key_test.cpp b/src/mongo/db/query/collation/collation_index_key_test.cpp
index 7696561060a..20a788d7df4 100644
--- a/src/mongo/db/query/collation/collation_index_key_test.cpp
+++ b/src/mongo/db/query/collation/collation_index_key_test.cpp
@@ -171,8 +171,7 @@ TEST(CollationIndexKeyTest, CollationAwareAppendThrowsIfSymbolInsideObject) {
CollatorInterfaceMock collator(CollatorInterfaceMock::MockType::kReverseString);
BSONObj dataObj = BSON("" << BSON("a"
<< "foo"
- << "b"
- << BSONSymbol("mySymbol")));
+ << "b" << BSONSymbol("mySymbol")));
BSONObjBuilder out;
ASSERT_THROWS_CODE(
CollationIndexKey::collationAwareIndexKeyAppend(dataObj.firstElement(), &collator, &out),
diff --git a/src/mongo/db/query/collation/collation_spec_test.cpp b/src/mongo/db/query/collation/collation_spec_test.cpp
index 8036e463a54..c255476292e 100644
--- a/src/mongo/db/query/collation/collation_spec_test.cpp
+++ b/src/mongo/db/query/collation/collation_spec_test.cpp
@@ -185,23 +185,13 @@ TEST(CollationSpecTest, ToBSONCorrectlySerializesDefaults) {
BSONObj expectedObj = BSON("locale"
<< "myLocale"
- << "caseLevel"
- << false
- << "caseFirst"
+ << "caseLevel" << false << "caseFirst"
<< "off"
- << "strength"
- << 3
- << "numericOrdering"
- << false
- << "alternate"
+ << "strength" << 3 << "numericOrdering" << false << "alternate"
<< "non-ignorable"
<< "maxVariable"
<< "punct"
- << "normalization"
- << false
- << "backwards"
- << false
- << "version"
+ << "normalization" << false << "backwards" << false << "version"
<< "myVersion");
ASSERT_BSONOBJ_EQ(expectedObj, collationSpec.toBSON());
@@ -215,23 +205,13 @@ TEST(CollationSpecTest, ToBSONCorrectlySerializesCaseFirstUpper) {
BSONObj expectedObj = BSON("locale"
<< "myLocale"
- << "caseLevel"
- << false
- << "caseFirst"
+ << "caseLevel" << false << "caseFirst"
<< "upper"
- << "strength"
- << 3
- << "numericOrdering"
- << false
- << "alternate"
+ << "strength" << 3 << "numericOrdering" << false << "alternate"
<< "non-ignorable"
<< "maxVariable"
<< "punct"
- << "normalization"
- << false
- << "backwards"
- << false
- << "version"
+ << "normalization" << false << "backwards" << false << "version"
<< "myVersion");
ASSERT_BSONOBJ_EQ(expectedObj, collationSpec.toBSON());
@@ -245,23 +225,13 @@ TEST(CollationSpecTest, ToBSONCorrectlySerializesCaseFirstLower) {
BSONObj expectedObj = BSON("locale"
<< "myLocale"
- << "caseLevel"
- << false
- << "caseFirst"
+ << "caseLevel" << false << "caseFirst"
<< "lower"
- << "strength"
- << 3
- << "numericOrdering"
- << false
- << "alternate"
+ << "strength" << 3 << "numericOrdering" << false << "alternate"
<< "non-ignorable"
<< "maxVariable"
<< "punct"
- << "normalization"
- << false
- << "backwards"
- << false
- << "version"
+ << "normalization" << false << "backwards" << false << "version"
<< "myVersion");
ASSERT_BSONOBJ_EQ(expectedObj, collationSpec.toBSON());
@@ -275,23 +245,13 @@ TEST(CollationSpecTest, ToBSONCorrectlySerializesPrimaryStrength) {
BSONObj expectedObj = BSON("locale"
<< "myLocale"
- << "caseLevel"
- << false
- << "caseFirst"
+ << "caseLevel" << false << "caseFirst"
<< "off"
- << "strength"
- << 1
- << "numericOrdering"
- << false
- << "alternate"
+ << "strength" << 1 << "numericOrdering" << false << "alternate"
<< "non-ignorable"
<< "maxVariable"
<< "punct"
- << "normalization"
- << false
- << "backwards"
- << false
- << "version"
+ << "normalization" << false << "backwards" << false << "version"
<< "myVersion");
ASSERT_BSONOBJ_EQ(expectedObj, collationSpec.toBSON());
@@ -305,23 +265,13 @@ TEST(CollationSpecTest, ToBSONCorrectlySerializesSecondaryStrength) {
BSONObj expectedObj = BSON("locale"
<< "myLocale"
- << "caseLevel"
- << false
- << "caseFirst"
+ << "caseLevel" << false << "caseFirst"
<< "off"
- << "strength"
- << 2
- << "numericOrdering"
- << false
- << "alternate"
+ << "strength" << 2 << "numericOrdering" << false << "alternate"
<< "non-ignorable"
<< "maxVariable"
<< "punct"
- << "normalization"
- << false
- << "backwards"
- << false
- << "version"
+ << "normalization" << false << "backwards" << false << "version"
<< "myVersion");
ASSERT_BSONOBJ_EQ(expectedObj, collationSpec.toBSON());
@@ -335,23 +285,13 @@ TEST(CollationSpecTest, ToBSONCorrectlySerializesQuaternaryStrength) {
BSONObj expectedObj = BSON("locale"
<< "myLocale"
- << "caseLevel"
- << false
- << "caseFirst"
+ << "caseLevel" << false << "caseFirst"
<< "off"
- << "strength"
- << 4
- << "numericOrdering"
- << false
- << "alternate"
+ << "strength" << 4 << "numericOrdering" << false << "alternate"
<< "non-ignorable"
<< "maxVariable"
<< "punct"
- << "normalization"
- << false
- << "backwards"
- << false
- << "version"
+ << "normalization" << false << "backwards" << false << "version"
<< "myVersion");
ASSERT_BSONOBJ_EQ(expectedObj, collationSpec.toBSON());
@@ -365,23 +305,13 @@ TEST(CollationSpecTest, ToBSONCorrectlySerializesIdenticalStrength) {
BSONObj expectedObj = BSON("locale"
<< "myLocale"
- << "caseLevel"
- << false
- << "caseFirst"
+ << "caseLevel" << false << "caseFirst"
<< "off"
- << "strength"
- << 5
- << "numericOrdering"
- << false
- << "alternate"
+ << "strength" << 5 << "numericOrdering" << false << "alternate"
<< "non-ignorable"
<< "maxVariable"
<< "punct"
- << "normalization"
- << false
- << "backwards"
- << false
- << "version"
+ << "normalization" << false << "backwards" << false << "version"
<< "myVersion");
ASSERT_BSONOBJ_EQ(expectedObj, collationSpec.toBSON());
@@ -395,23 +325,13 @@ TEST(CollationSpecTest, ToBSONCorrectlySerializesAlternateShifted) {
BSONObj expectedObj = BSON("locale"
<< "myLocale"
- << "caseLevel"
- << false
- << "caseFirst"
+ << "caseLevel" << false << "caseFirst"
<< "off"
- << "strength"
- << 3
- << "numericOrdering"
- << false
- << "alternate"
+ << "strength" << 3 << "numericOrdering" << false << "alternate"
<< "shifted"
<< "maxVariable"
<< "punct"
- << "normalization"
- << false
- << "backwards"
- << false
- << "version"
+ << "normalization" << false << "backwards" << false << "version"
<< "myVersion");
ASSERT_BSONOBJ_EQ(expectedObj, collationSpec.toBSON());
@@ -425,23 +345,13 @@ TEST(CollationSpecTest, ToBSONCorrectlySerializesMaxVariableSpace) {
BSONObj expectedObj = BSON("locale"
<< "myLocale"
- << "caseLevel"
- << false
- << "caseFirst"
+ << "caseLevel" << false << "caseFirst"
<< "off"
- << "strength"
- << 3
- << "numericOrdering"
- << false
- << "alternate"
+ << "strength" << 3 << "numericOrdering" << false << "alternate"
<< "non-ignorable"
<< "maxVariable"
<< "space"
- << "normalization"
- << false
- << "backwards"
- << false
- << "version"
+ << "normalization" << false << "backwards" << false << "version"
<< "myVersion");
ASSERT_BSONOBJ_EQ(expectedObj, collationSpec.toBSON());
diff --git a/src/mongo/db/query/collation/collator_factory_icu.cpp b/src/mongo/db/query/collation/collator_factory_icu.cpp
index f9662b0a22a..7f612265b71 100644
--- a/src/mongo/db/query/collation/collator_factory_icu.cpp
+++ b/src/mongo/db/query/collation/collator_factory_icu.cpp
@@ -186,13 +186,9 @@ StatusWith<CollationSpec::CaseFirstType> stringToCaseFirstType(const std::string
} else {
return {ErrorCodes::FailedToParse,
str::stream() << "Field '" << CollationSpec::kCaseFirstField << "' must be '"
- << CollationSpec::kCaseFirstUpper
- << "', '"
- << CollationSpec::kCaseFirstLower
- << "', or '"
- << CollationSpec::kCaseFirstOff
- << "'. Got: "
- << caseFirst};
+ << CollationSpec::kCaseFirstUpper << "', '"
+ << CollationSpec::kCaseFirstLower << "', or '"
+ << CollationSpec::kCaseFirstOff << "'. Got: " << caseFirst};
}
}
@@ -211,8 +207,7 @@ StatusWith<CollationSpec::StrengthType> integerToStrengthType(long long strength
}
return {ErrorCodes::FailedToParse,
str::stream() << "Field '" << CollationSpec::kStrengthField
- << "' must be an integer 1 through 5. Got: "
- << strength};
+ << "' must be an integer 1 through 5. Got: " << strength};
}
StatusWith<CollationSpec::AlternateType> stringToAlternateType(const std::string& alternate) {
@@ -223,11 +218,8 @@ StatusWith<CollationSpec::AlternateType> stringToAlternateType(const std::string
} else {
return {ErrorCodes::FailedToParse,
str::stream() << "Field '" << CollationSpec::kAlternateField << "' must be '"
- << CollationSpec::kAlternateNonIgnorable
- << "' or '"
- << CollationSpec::kAlternateShifted
- << "'. Got: "
- << alternate};
+ << CollationSpec::kAlternateNonIgnorable << "' or '"
+ << CollationSpec::kAlternateShifted << "'. Got: " << alternate};
}
}
@@ -239,11 +231,8 @@ StatusWith<CollationSpec::MaxVariableType> stringToMaxVariableType(const std::st
} else {
return {ErrorCodes::FailedToParse,
str::stream() << "Field '" << CollationSpec::kMaxVariableField << "' must be '"
- << CollationSpec::kMaxVariablePunct
- << "' or '"
- << CollationSpec::kMaxVariableSpace
- << "'. Got: "
- << maxVariable};
+ << CollationSpec::kMaxVariablePunct << "' or '"
+ << CollationSpec::kMaxVariableSpace << "'. Got: " << maxVariable};
}
}
@@ -273,10 +262,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to get '" << CollationSpec::kCaseLevelField
- << "' attribute from icu::Collator: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute from icu::Collator: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
parsedSpec.caseLevel = attributeToBool(caseLevelAttribute);
} else if (!parseStatus.isOK()) {
@@ -290,10 +277,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to set '" << CollationSpec::kCaseLevelField
- << "' attribute: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
}
@@ -308,10 +293,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to get '" << CollationSpec::kCaseFirstField
- << "' attribute from icu::Collator: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute from icu::Collator: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
parsedSpec.caseFirst = getCaseFirstFromAttribute(caseFirstAttribute);
} else if (!parseStatus.isOK()) {
@@ -333,10 +316,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to set '" << CollationSpec::kCaseFirstField
- << "' attribute: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
}
@@ -351,10 +332,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to get '" << CollationSpec::kStrengthField
- << "' attribute from icu::Collator: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute from icu::Collator: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
parsedSpec.strength = getStrengthFromAttribute(strengthAttribute);
} else if (!parseStatus.isOK()) {
@@ -375,10 +354,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to set '" << CollationSpec::kStrengthField
- << "' attribute: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
}
@@ -394,10 +371,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to get '" << CollationSpec::kNumericOrderingField
- << "' attribute from icu::Collator: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute from icu::Collator: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
parsedSpec.numericOrdering = attributeToBool(numericOrderingAttribute);
} else if (!parseStatus.isOK()) {
@@ -412,10 +387,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to set '" << CollationSpec::kNumericOrderingField
- << "' attribute: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
}
@@ -431,10 +404,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to get '" << CollationSpec::kAlternateField
- << "' attribute from icu::Collator: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute from icu::Collator: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
parsedSpec.alternate = getAlternateFromAttribute(alternateAttribute);
} else if (!parseStatus.isOK()) {
@@ -456,10 +427,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to set '" << CollationSpec::kAlternateField
- << "' attribute: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
}
@@ -486,10 +455,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to set '" << CollationSpec::kMaxVariableField
- << "' attribute: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
}
@@ -505,10 +472,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to get '" << CollationSpec::kNormalizationField
- << "' attribute from icu::Collator: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute from icu::Collator: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
parsedSpec.normalization = attributeToBool(normalizationAttribute);
} else if (!parseStatus.isOK()) {
@@ -523,10 +488,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to set '" << CollationSpec::kNormalizationField
- << "' attribute: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
}
@@ -542,10 +505,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to get '" << CollationSpec::kBackwardsField
- << "' attribute from icu::Collator: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute from icu::Collator: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
parsedSpec.backwards = attributeToBool(backwardsAttribute);
} else if (!parseStatus.isOK()) {
@@ -560,10 +521,8 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to set '" << CollationSpec::kBackwardsField
- << "' attribute: "
- << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << "' attribute: " << icuError.errorName()
+ << ". Collation spec: " << spec};
}
}
@@ -585,9 +544,7 @@ StatusWith<CollationSpec> parseToCollationSpec(const BSONObj& spec,
return {ErrorCodes::IncompatibleCollationVersion,
str::stream() << "Requested collation version " << specVersionStr
<< " but the only available collator version was "
- << parsedSpec.version
- << ". Requested collation spec: "
- << spec};
+ << parsedSpec.version << ". Requested collation spec: " << spec};
}
++parsedFields;
@@ -613,8 +570,7 @@ StatusWith<std::string> parseLocaleID(const BSONObj& spec) {
if (localeID.find('\0') != std::string::npos) {
return {ErrorCodes::BadValue,
str::stream() << "Field '" << CollationSpec::kLocaleField
- << "' cannot contain null byte. Collation spec: "
- << spec};
+ << "' cannot contain null byte. Collation spec: " << spec};
}
return localeID;
}
@@ -630,15 +586,13 @@ Status validateLocaleID(const BSONObj& spec,
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to get locale from icu::Collator: " << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << ". Collation spec: " << spec};
}
if (originalID.empty()) {
return {ErrorCodes::BadValue,
str::stream() << "Field '" << CollationSpec::kLocaleField
- << "' cannot be the empty string in: "
- << spec};
+ << "' cannot be the empty string in: " << spec};
}
// Check that each component of the locale ID is recognized by ICU. If ICU 1) cannot parse the
@@ -669,11 +623,9 @@ Status validateCollationSpec(const CollationSpec& spec) {
if (spec.backwards && spec.strength == CollationSpec::StrengthType::kPrimary) {
return {ErrorCodes::BadValue,
str::stream() << "'" << CollationSpec::kBackwardsField << "' is invalid with '"
- << CollationSpec::kStrengthField
- << "' of "
+ << CollationSpec::kStrengthField << "' of "
<< static_cast<int>(CollationSpec::StrengthType::kPrimary)
- << " in: "
- << spec.toBSON()};
+ << " in: " << spec.toBSON()};
}
// The caseFirst option only affects tertiary level or caseLevel comparisons. It will have no
@@ -683,13 +635,10 @@ Status validateCollationSpec(const CollationSpec& spec) {
spec.strength == CollationSpec::StrengthType::kSecondary)) {
return {ErrorCodes::BadValue,
str::stream() << "'" << CollationSpec::kCaseFirstField << "' is invalid unless '"
- << CollationSpec::kCaseLevelField
- << "' is on or '"
- << CollationSpec::kStrengthField
- << "' is greater than "
+ << CollationSpec::kCaseLevelField << "' is on or '"
+ << CollationSpec::kStrengthField << "' is greater than "
<< static_cast<int>(CollationSpec::StrengthType::kSecondary)
- << " in: "
- << spec.toBSON()};
+ << " in: " << spec.toBSON()};
}
return Status::OK();
@@ -712,8 +661,7 @@ StatusWith<std::unique_ptr<CollatorInterface>> CollatorFactoryICU::makeFromBSON(
return {ErrorCodes::FailedToParse,
str::stream() << "If " << CollationSpec::kLocaleField << "="
<< CollationSpec::kSimpleBinaryComparison
- << ", no other fields should be present in: "
- << spec};
+ << ", no other fields should be present in: " << spec};
}
return {nullptr};
}
@@ -722,8 +670,8 @@ StatusWith<std::unique_ptr<CollatorInterface>> CollatorFactoryICU::makeFromBSON(
auto userLocale = icu::Locale::createFromName(parsedLocaleID.getValue().c_str());
if (userLocale.isBogus()) {
return {ErrorCodes::BadValue,
- str::stream() << "Field '" << CollationSpec::kLocaleField << "' is not valid in: "
- << spec};
+ str::stream() << "Field '" << CollationSpec::kLocaleField
+ << "' is not valid in: " << spec};
}
// Construct an icu::Collator.
@@ -734,8 +682,7 @@ StatusWith<std::unique_ptr<CollatorInterface>> CollatorFactoryICU::makeFromBSON(
icuError.set(status);
return {ErrorCodes::OperationFailed,
str::stream() << "Failed to create collator: " << icuError.errorName()
- << ". Collation spec: "
- << spec};
+ << ". Collation spec: " << spec};
}
Status localeValidationStatus = validateLocaleID(spec, parsedLocaleID.getValue(), *icuCollator);
diff --git a/src/mongo/db/query/collation/collator_factory_icu_decoration.cpp b/src/mongo/db/query/collation/collator_factory_icu_decoration.cpp
index 654f4f4c7b3..6eacae4c5a1 100644
--- a/src/mongo/db/query/collation/collator_factory_icu_decoration.cpp
+++ b/src/mongo/db/query/collation/collator_factory_icu_decoration.cpp
@@ -40,9 +40,7 @@ namespace mongo {
namespace {
ServiceContext::ConstructorActionRegisterer registerIcuCollator{
- "CreateCollatorFactory",
- {"LoadICUData"},
- [](ServiceContext* service) {
+ "CreateCollatorFactory", {"LoadICUData"}, [](ServiceContext* service) {
CollatorFactoryInterface::set(service, std::make_unique<CollatorFactoryICU>());
}};
} // namespace
diff --git a/src/mongo/db/query/collation/collator_factory_icu_test.cpp b/src/mongo/db/query/collation/collator_factory_icu_test.cpp
index 7024303dffd..d7d0edce54b 100644
--- a/src/mongo/db/query/collation/collator_factory_icu_test.cpp
+++ b/src/mongo/db/query/collation/collator_factory_icu_test.cpp
@@ -61,8 +61,7 @@ TEST(CollatorFactoryICUTest, SimpleLocaleWithOtherFieldsFailsToParse) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "simple"
- << "caseLevel"
- << true));
+ << "caseLevel" << true));
ASSERT_NOT_OK(collator.getStatus());
ASSERT_EQ(collator.getStatus(), ErrorCodes::FailedToParse);
}
@@ -444,8 +443,7 @@ TEST(CollatorFactoryICUTest, CaseLevelFalseParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "caseLevel"
- << false));
+ << "caseLevel" << false));
ASSERT_OK(collator.getStatus());
ASSERT_FALSE(collator.getValue()->getSpec().caseLevel);
}
@@ -454,8 +452,7 @@ TEST(CollatorFactoryICUTest, CaseLevelTrueParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "caseLevel"
- << true));
+ << "caseLevel" << true));
ASSERT_OK(collator.getStatus());
ASSERT_TRUE(collator.getValue()->getSpec().caseLevel);
}
@@ -497,8 +494,7 @@ TEST(CollatorFactoryICUTest, PrimaryStrengthParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 1));
+ << "strength" << 1));
ASSERT_OK(collator.getStatus());
ASSERT_EQ(static_cast<int>(CollationSpec::StrengthType::kPrimary),
static_cast<int>(collator.getValue()->getSpec().strength));
@@ -508,8 +504,7 @@ TEST(CollatorFactoryICUTest, SecondaryStrengthParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 2));
+ << "strength" << 2));
ASSERT_OK(collator.getStatus());
ASSERT_EQ(static_cast<int>(CollationSpec::StrengthType::kSecondary),
static_cast<int>(collator.getValue()->getSpec().strength));
@@ -519,8 +514,7 @@ TEST(CollatorFactoryICUTest, TertiaryStrengthParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 3));
+ << "strength" << 3));
ASSERT_OK(collator.getStatus());
ASSERT_EQ(static_cast<int>(CollationSpec::StrengthType::kTertiary),
static_cast<int>(collator.getValue()->getSpec().strength));
@@ -530,8 +524,7 @@ TEST(CollatorFactoryICUTest, QuaternaryStrengthParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 4));
+ << "strength" << 4));
ASSERT_OK(collator.getStatus());
ASSERT_EQ(static_cast<int>(CollationSpec::StrengthType::kQuaternary),
static_cast<int>(collator.getValue()->getSpec().strength));
@@ -541,8 +534,7 @@ TEST(CollatorFactoryICUTest, IdenticalStrengthParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 5));
+ << "strength" << 5));
ASSERT_OK(collator.getStatus());
ASSERT_EQ(static_cast<int>(CollationSpec::StrengthType::kIdentical),
static_cast<int>(collator.getValue()->getSpec().strength));
@@ -552,8 +544,7 @@ TEST(CollatorFactoryICUTest, NumericOrderingFalseParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "numericOrdering"
- << false));
+ << "numericOrdering" << false));
ASSERT_OK(collator.getStatus());
ASSERT_FALSE(collator.getValue()->getSpec().numericOrdering);
}
@@ -562,8 +553,7 @@ TEST(CollatorFactoryICUTest, NumericOrderingTrueParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "numericOrdering"
- << true));
+ << "numericOrdering" << true));
ASSERT_OK(collator.getStatus());
ASSERT_TRUE(collator.getValue()->getSpec().numericOrdering);
}
@@ -616,8 +606,7 @@ TEST(CollatorFactoryICUTest, NormalizationFalseParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "normalization"
- << false));
+ << "normalization" << false));
ASSERT_OK(collator.getStatus());
ASSERT_FALSE(collator.getValue()->getSpec().normalization);
}
@@ -626,8 +615,7 @@ TEST(CollatorFactoryICUTest, NormalizationTrueParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "normalization"
- << true));
+ << "normalization" << true));
ASSERT_OK(collator.getStatus());
ASSERT_TRUE(collator.getValue()->getSpec().normalization);
}
@@ -636,8 +624,7 @@ TEST(CollatorFactoryICUTest, BackwardsFalseParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "backwards"
- << false));
+ << "backwards" << false));
ASSERT_OK(collator.getStatus());
ASSERT_FALSE(collator.getValue()->getSpec().backwards);
}
@@ -646,8 +633,7 @@ TEST(CollatorFactoryICUTest, BackwardsTrueParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "backwards"
- << true));
+ << "backwards" << true));
ASSERT_OK(collator.getStatus());
ASSERT_TRUE(collator.getValue()->getSpec().backwards);
}
@@ -656,8 +642,7 @@ TEST(CollatorFactoryICUTest, LongStrengthFieldParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 1LL));
+ << "strength" << 1LL));
ASSERT_OK(collator.getStatus());
ASSERT_EQ(static_cast<int>(CollationSpec::StrengthType::kPrimary),
static_cast<int>(collator.getValue()->getSpec().strength));
@@ -667,8 +652,7 @@ TEST(CollatorFactoryICUTest, DoubleStrengthFieldParsesSuccessfully) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 1.0));
+ << "strength" << 1.0));
ASSERT_OK(collator.getStatus());
ASSERT_EQ(static_cast<int>(CollationSpec::StrengthType::kPrimary),
static_cast<int>(collator.getValue()->getSpec().strength));
@@ -688,8 +672,7 @@ TEST(CollatorFactoryICUTest, NonStringCaseFirstFieldFailsToParse) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "caseFirst"
- << 1));
+ << "caseFirst" << 1));
ASSERT_NOT_OK(collator.getStatus());
ASSERT_EQ(collator.getStatus(), ErrorCodes::TypeMismatch);
}
@@ -718,8 +701,7 @@ TEST(CollatorFactoryICUTest, TooLargeStrengthFieldFailsToParse) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 2147483648LL));
+ << "strength" << 2147483648LL));
ASSERT_NOT_OK(collator.getStatus());
ASSERT_EQ(collator.getStatus(), ErrorCodes::FailedToParse);
}
@@ -728,8 +710,7 @@ TEST(CollatorFactoryICUTest, FractionalStrengthFieldFailsToParse) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 0.5));
+ << "strength" << 0.5));
ASSERT_NOT_OK(collator.getStatus());
ASSERT_EQ(collator.getStatus(), ErrorCodes::BadValue);
}
@@ -738,8 +719,7 @@ TEST(CollatorFactoryICUTest, NegativeStrengthFieldFailsToParse) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << -1));
+ << "strength" << -1));
ASSERT_NOT_OK(collator.getStatus());
ASSERT_EQ(collator.getStatus(), ErrorCodes::FailedToParse);
}
@@ -748,8 +728,7 @@ TEST(CollatorFactoryICUTest, InvalidIntegerStrengthFieldFailsToParse) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 6));
+ << "strength" << 6));
ASSERT_NOT_OK(collator.getStatus());
ASSERT_EQ(collator.getStatus(), ErrorCodes::FailedToParse);
}
@@ -768,8 +747,7 @@ TEST(CollatorFactoryICUTest, NonStringAlternateFieldFailsToParse) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "alternate"
- << 1));
+ << "alternate" << 1));
ASSERT_NOT_OK(collator.getStatus());
ASSERT_EQ(collator.getStatus(), ErrorCodes::TypeMismatch);
}
@@ -788,8 +766,7 @@ TEST(CollatorFactoryICUTest, NonStringMaxVariableFieldFailsToParse) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "maxVariable"
- << 1));
+ << "maxVariable" << 1));
ASSERT_NOT_OK(collator.getStatus());
ASSERT_EQ(collator.getStatus(), ErrorCodes::TypeMismatch);
}
@@ -846,8 +823,7 @@ TEST(CollatorFactoryICUTest, NonStringVersionFieldFailsToParse) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "version"
- << 3));
+ << "version" << 3));
ASSERT_NOT_OK(collator.getStatus());
ASSERT_EQ(collator.getStatus(), ErrorCodes::TypeMismatch);
}
@@ -879,8 +855,7 @@ TEST(CollatorFactoryICUTest, PrimaryStrengthCollatorIgnoresCaseAndAccents) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 1));
+ << "strength" << 1));
ASSERT_OK(collator.getStatus());
// u8"\u00E1" is latin small letter a with acute.
@@ -892,8 +867,7 @@ TEST(CollatorFactoryICUTest, SecondaryStrengthCollatorsIgnoresCaseButNotAccents)
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 2));
+ << "strength" << 2));
ASSERT_OK(collator.getStatus());
// u8"\u00E1" is latin small letter a with acute.
@@ -905,8 +879,7 @@ TEST(CollatorFactoryICUTest, TertiaryStrengthCollatorConsidersCaseAndAccents) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 3));
+ << "strength" << 3));
ASSERT_OK(collator.getStatus());
// u8"\u00E1" is latin small letter a with acute.
@@ -918,10 +891,7 @@ TEST(CollatorFactoryICUTest, PrimaryStrengthCaseLevelTrue) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 1
- << "caseLevel"
- << true));
+ << "strength" << 1 << "caseLevel" << true));
ASSERT_OK(collator.getStatus());
// u8"\u00E1" is latin small letter a with acute.
@@ -931,14 +901,11 @@ TEST(CollatorFactoryICUTest, PrimaryStrengthCaseLevelTrue) {
TEST(CollatorFactoryICUTest, PrimaryStrengthCaseLevelTrueCaseFirstUpper) {
CollatorFactoryICU factory;
- auto collator = factory.makeFromBSON(BSON("locale"
- << "en_US"
- << "strength"
- << 1
- << "caseLevel"
- << true
- << "caseFirst"
- << "upper"));
+ auto collator =
+ factory.makeFromBSON(BSON("locale"
+ << "en_US"
+ << "strength" << 1 << "caseLevel" << true << "caseFirst"
+ << "upper"));
ASSERT_OK(collator.getStatus());
// u8"\u00E1" is latin small letter a with acute.
@@ -948,14 +915,11 @@ TEST(CollatorFactoryICUTest, PrimaryStrengthCaseLevelTrueCaseFirstUpper) {
TEST(CollatorFactoryICUTest, TertiaryStrengthCaseLevelTrueCaseFirstUpper) {
CollatorFactoryICU factory;
- auto collator = factory.makeFromBSON(BSON("locale"
- << "en_US"
- << "strength"
- << 3
- << "caseLevel"
- << true
- << "caseFirst"
- << "upper"));
+ auto collator =
+ factory.makeFromBSON(BSON("locale"
+ << "en_US"
+ << "strength" << 3 << "caseLevel" << true << "caseFirst"
+ << "upper"));
ASSERT_OK(collator.getStatus());
ASSERT_LT(collator.getValue()->compare("A", "a"), 0);
}
@@ -972,8 +936,7 @@ TEST(CollatorFactoryICUTest, NumericOrderingTrue) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "numericOrdering"
- << true));
+ << "numericOrdering" << true));
ASSERT_OK(collator.getStatus());
ASSERT_LT(collator.getValue()->compare("2", "10"), 0);
}
@@ -982,9 +945,7 @@ TEST(CollatorFactoryICUTest, PrimaryStrengthAlternateShifted) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 1
- << "alternate"
+ << "strength" << 1 << "alternate"
<< "shifted"));
ASSERT_OK(collator.getStatus());
ASSERT_EQ(collator.getValue()->compare("a b", "ab"), 0);
@@ -995,9 +956,7 @@ TEST(CollatorFactoryICUTest, QuaternaryStrengthAlternateShifted) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 4
- << "alternate"
+ << "strength" << 4 << "alternate"
<< "shifted"));
ASSERT_OK(collator.getStatus());
ASSERT_LT(collator.getValue()->compare("a b", "ab"), 0);
@@ -1008,9 +967,7 @@ TEST(CollatorFactoryICUTest, PrimaryStrengthAlternateShiftedMaxVariableSpace) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 1
- << "alternate"
+ << "strength" << 1 << "alternate"
<< "shifted"
<< "maxVariable"
<< "space"));
@@ -1023,8 +980,7 @@ TEST(CollatorFactoryICUTest, SecondaryStrengthBackwardsFalse) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 2));
+ << "strength" << 2));
ASSERT_OK(collator.getStatus());
// u8"\u00E1" is latin small letter a with acute.
@@ -1035,10 +991,7 @@ TEST(CollatorFactoryICUTest, SecondaryStrengthBackwardsTrue) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "strength"
- << 2
- << "backwards"
- << true));
+ << "strength" << 2 << "backwards" << true));
ASSERT_OK(collator.getStatus());
// u8"\u00E1" is latin small letter a with acute.
@@ -1069,10 +1022,7 @@ TEST(CollatorFactoryICUTest, BackwardsTrueWithStrengthOneFails) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "backwards"
- << true
- << "strength"
- << 1));
+ << "backwards" << true << "strength" << 1));
ASSERT_NOT_OK(collator.getStatus());
}
@@ -1080,10 +1030,7 @@ TEST(CollatorFactoryICUTest, BackwardsTrueWithStrengthTwoSucceeds) {
CollatorFactoryICU factory;
auto collator = factory.makeFromBSON(BSON("locale"
<< "en_US"
- << "backwards"
- << true
- << "strength"
- << 2));
+ << "backwards" << true << "strength" << 2));
ASSERT_OK(collator.getStatus());
}
@@ -1093,8 +1040,7 @@ TEST(CollatorFactoryICUTest, CaseFirstLowerWithStrengthThreeSucceeds) {
<< "en_US"
<< "caseFirst"
<< "lower"
- << "strength"
- << 3));
+ << "strength" << 3));
ASSERT_OK(collator.getStatus());
}
@@ -1104,8 +1050,7 @@ TEST(CollatorFactoryICUTest, CaseFirstUpperWithStrengthThreeSucceeds) {
<< "en_US"
<< "caseFirst"
<< "upper"
- << "strength"
- << 3));
+ << "strength" << 3));
ASSERT_OK(collator.getStatus());
}
@@ -1115,10 +1060,7 @@ TEST(CollatorFactoryICUTest, CaseFirstLowerWithCaseLevelSucceeds) {
<< "en_US"
<< "caseFirst"
<< "lower"
- << "caseLevel"
- << true
- << "strength"
- << 1));
+ << "caseLevel" << true << "strength" << 1));
ASSERT_OK(collator.getStatus());
}
@@ -1128,10 +1070,7 @@ TEST(CollatorFactoryICUTest, CaseFirstUpperWithCaseLevelSucceeds) {
<< "en_US"
<< "caseFirst"
<< "upper"
- << "caseLevel"
- << true
- << "strength"
- << 1));
+ << "caseLevel" << true << "strength" << 1));
ASSERT_OK(collator.getStatus());
}
@@ -1141,8 +1080,7 @@ TEST(CollatorFactoryICUTest, CaseFirstOffWithStrengthOneSucceeds) {
<< "en_US"
<< "caseFirst"
<< "off"
- << "strength"
- << 1));
+ << "strength" << 1));
ASSERT_OK(collator.getStatus());
}
@@ -1152,8 +1090,7 @@ TEST(CollatorFactoryICUTest, CaseFirstLowerWithStrengthOneFails) {
<< "en_US"
<< "caseFirst"
<< "lower"
- << "strength"
- << 1));
+ << "strength" << 1));
ASSERT_NOT_OK(collator.getStatus());
}
@@ -1163,8 +1100,7 @@ TEST(CollatorFactoryICUTest, CaseFirstLowerWithStrengthTwoFails) {
<< "en_US"
<< "caseFirst"
<< "lower"
- << "strength"
- << 2));
+ << "strength" << 2));
ASSERT_NOT_OK(collator.getStatus());
}
@@ -1174,8 +1110,7 @@ TEST(CollatorFactoryICUTest, CaseFirstUpperWithStrengthOneFails) {
<< "en_US"
<< "caseFirst"
<< "upper"
- << "strength"
- << 1));
+ << "strength" << 1));
ASSERT_NOT_OK(collator.getStatus());
}
@@ -1185,8 +1120,7 @@ TEST(CollatorFactoryICUTest, CaseFirstUpperWithStrengthTwoFails) {
<< "en_US"
<< "caseFirst"
<< "upper"
- << "strength"
- << 2));
+ << "strength" << 2));
ASSERT_NOT_OK(collator.getStatus());
}
diff --git a/src/mongo/db/query/collation/collator_interface_mock_test.cpp b/src/mongo/db/query/collation/collator_interface_mock_test.cpp
index d792d95c2a1..340e9690ef6 100644
--- a/src/mongo/db/query/collation/collator_interface_mock_test.cpp
+++ b/src/mongo/db/query/collation/collator_interface_mock_test.cpp
@@ -242,10 +242,12 @@ TEST(CollatorInterfaceMockSelfTest, BSONObjsEqualUnderCollatorHashEquallyNested)
SimpleBSONObjComparator bsonCmpConsiderCase;
BSONObjComparator bsonCmpIgnoreCase(
BSONObj(), BSONObjComparator::FieldNamesMode::kConsider, &toLowerCollator);
- BSONObj obj1 = BSON("a" << 1 << "b" << BSON("c"
- << "foo"));
- BSONObj obj2 = BSON("a" << 1 << "b" << BSON("c"
- << "FOO"));
+ BSONObj obj1 = BSON("a" << 1 << "b"
+ << BSON("c"
+ << "foo"));
+ BSONObj obj2 = BSON("a" << 1 << "b"
+ << BSON("c"
+ << "FOO"));
ASSERT_NE(bsonCmpConsiderCase.hash(obj1), bsonCmpConsiderCase.hash(obj2));
ASSERT_EQ(bsonCmpIgnoreCase.hash(obj1), bsonCmpIgnoreCase.hash(obj2));
}
diff --git a/src/mongo/db/query/count_command_test.cpp b/src/mongo/db/query/count_command_test.cpp
index c660bc6adec..b7ea431f678 100644
--- a/src/mongo/db/query/count_command_test.cpp
+++ b/src/mongo/db/query/count_command_test.cpp
@@ -50,8 +50,7 @@ TEST(CountCommandTest, ParserDealsWithMissingFieldsCorrectly) {
<< "TestColl"
<< "$db"
<< "TestDB"
- << "query"
- << BSON("a" << BSON("$lte" << 10)));
+ << "query" << BSON("a" << BSON("$lte" << 10)));
auto countCmd = CountCommand::parse(ctxt, commandObj);
ASSERT_BSONOBJ_EQ(countCmd.getQuery(), fromjson("{ a : { '$lte' : 10 } }"));
@@ -70,15 +69,8 @@ TEST(CountCommandTest, ParserParsesCommandWithAllFieldsCorrectly) {
<< "TestColl"
<< "$db"
<< "TestDB"
- << "query"
- << BSON("a" << BSON("$gte" << 11))
- << "limit"
- << 100
- << "skip"
- << 1000
- << "hint"
- << BSON("b" << 5)
- << "collation"
+ << "query" << BSON("a" << BSON("$gte" << 11)) << "limit" << 100 << "skip"
+ << 1000 << "hint" << BSON("b" << 5) << "collation"
<< BSON("locale"
<< "en_US")
<< "readConcern"
@@ -89,8 +81,7 @@ TEST(CountCommandTest, ParserParsesCommandWithAllFieldsCorrectly) {
<< "secondary")
<< "comment"
<< "aComment"
- << "maxTimeMS"
- << 10000);
+ << "maxTimeMS" << 10000);
const auto countCmd = CountCommand::parse(ctxt, commandObj);
ASSERT_BSONOBJ_EQ(countCmd.getQuery(), fromjson("{ a : { '$gte' : 11 } }"));
@@ -110,8 +101,7 @@ TEST(CountCommandTest, ParsingNegativeLimitGivesPositiveLimit) {
<< "TestColl"
<< "$db"
<< "TestDB"
- << "limit"
- << -100);
+ << "limit" << -100);
const auto countCmd = CountCommand::parse(ctxt, commandObj);
ASSERT_EQ(countCmd.getLimit().get(), 100);
@@ -122,9 +112,7 @@ TEST(CountCommandTest, LimitCannotBeMinLong) {
<< "TestColl"
<< "$db"
<< "TestDB"
- << "query"
- << BSON("a" << BSON("$gte" << 11))
- << "limit"
+ << "query" << BSON("a" << BSON("$gte" << 11)) << "limit"
<< std::numeric_limits<long long>::min());
ASSERT_THROWS_CODE(
@@ -132,31 +120,28 @@ TEST(CountCommandTest, LimitCannotBeMinLong) {
}
TEST(CountCommandTest, FailParseBadSkipValue) {
- ASSERT_THROWS_CODE(CountCommand::parse(ctxt,
- BSON("count"
- << "TestColl"
- << "$db"
- << "TestDB"
- << "query"
- << BSON("a" << BSON("$gte" << 11))
- << "skip"
- << -1000)),
- AssertionException,
- ErrorCodes::FailedToParse);
+ ASSERT_THROWS_CODE(
+ CountCommand::parse(ctxt,
+ BSON("count"
+ << "TestColl"
+ << "$db"
+ << "TestDB"
+ << "query" << BSON("a" << BSON("$gte" << 11)) << "skip" << -1000)),
+ AssertionException,
+ ErrorCodes::FailedToParse);
}
TEST(CountCommandTest, FailParseBadCollationType) {
- ASSERT_THROWS_CODE(CountCommand::parse(ctxt,
- BSON("count"
- << "TestColl"
- << "$db"
- << "TestDB"
- << "query"
- << BSON("a" << BSON("$gte" << 11))
- << "collation"
- << "en_US")),
- AssertionException,
- ErrorCodes::TypeMismatch);
+ ASSERT_THROWS_CODE(
+ CountCommand::parse(ctxt,
+ BSON("count"
+ << "TestColl"
+ << "$db"
+ << "TestDB"
+ << "query" << BSON("a" << BSON("$gte" << 11)) << "collation"
+ << "en_US")),
+ AssertionException,
+ ErrorCodes::TypeMismatch);
}
TEST(CountCommandTest, FailParseUnknownField) {
@@ -176,8 +161,7 @@ TEST(CountCommandTest, ConvertToAggregationWithHint) {
<< "TestColl"
<< "$db"
<< "TestDB"
- << "hint"
- << BSON("x" << 1));
+ << "hint" << BSON("x" << 1));
auto countCmd = CountCommand::parse(ctxt, commandObj);
auto agg = uassertStatusOK(countCommandAsAggregationCommand(countCmd, testns));
@@ -198,12 +182,7 @@ TEST(CountCommandTest, ConvertToAggregationWithQueryAndFilterAndLimit) {
<< "TestColl"
<< "$db"
<< "TestDB"
- << "limit"
- << 200
- << "skip"
- << 300
- << "query"
- << BSON("x" << 7));
+ << "limit" << 200 << "skip" << 300 << "query" << BSON("x" << 7));
auto countCmd = CountCommand::parse(ctxt, commandObj);
auto agg = uassertStatusOK(countCommandAsAggregationCommand(countCmd, testns));
@@ -227,9 +206,7 @@ TEST(CountCommandTest, ConvertToAggregationWithMaxTimeMS) {
auto countCmd = CountCommand::parse(ctxt,
BSON("count"
<< "TestColl"
- << "maxTimeMS"
- << 100
- << "$db"
+ << "maxTimeMS" << 100 << "$db"
<< "TestDB"));
auto agg = uassertStatusOK(countCommandAsAggregationCommand(countCmd, testns));
diff --git a/src/mongo/db/query/cursor_response.cpp b/src/mongo/db/query/cursor_response.cpp
index 39234429572..8cb8a063e1f 100644
--- a/src/mongo/db/query/cursor_response.cpp
+++ b/src/mongo/db/query/cursor_response.cpp
@@ -169,24 +169,24 @@ StatusWith<CursorResponse> CursorResponse::parseFromBSON(const BSONObj& cmdRespo
BSONElement cursorElt = cmdResponse[kCursorField];
if (cursorElt.type() != BSONType::Object) {
return {ErrorCodes::TypeMismatch,
- str::stream() << "Field '" << kCursorField << "' must be a nested object in: "
- << cmdResponse};
+ str::stream() << "Field '" << kCursorField
+ << "' must be a nested object in: " << cmdResponse};
}
BSONObj cursorObj = cursorElt.Obj();
BSONElement idElt = cursorObj[kIdField];
if (idElt.type() != BSONType::NumberLong) {
- return {
- ErrorCodes::TypeMismatch,
- str::stream() << "Field '" << kIdField << "' must be of type long in: " << cmdResponse};
+ return {ErrorCodes::TypeMismatch,
+ str::stream() << "Field '" << kIdField
+ << "' must be of type long in: " << cmdResponse};
}
cursorId = idElt.Long();
BSONElement nsElt = cursorObj[kNsField];
if (nsElt.type() != BSONType::String) {
return {ErrorCodes::TypeMismatch,
- str::stream() << "Field '" << kNsField << "' must be of type string in: "
- << cmdResponse};
+ str::stream() << "Field '" << kNsField
+ << "' must be of type string in: " << cmdResponse};
}
fullns = nsElt.String();
@@ -198,9 +198,7 @@ StatusWith<CursorResponse> CursorResponse::parseFromBSON(const BSONObj& cmdRespo
if (batchElt.type() != BSONType::Array) {
return {ErrorCodes::TypeMismatch,
str::stream() << "Must have array field '" << kBatchFieldInitial << "' or '"
- << kBatchField
- << "' in: "
- << cmdResponse};
+ << kBatchField << "' in: " << cmdResponse};
}
batchObj = batchElt.Obj();
diff --git a/src/mongo/db/query/cursor_response_test.cpp b/src/mongo/db/query/cursor_response_test.cpp
index 3ddee43748b..ecb5d7570b6 100644
--- a/src/mongo/db/query/cursor_response_test.cpp
+++ b/src/mongo/db/query/cursor_response_test.cpp
@@ -41,13 +41,11 @@ namespace mongo {
namespace {
TEST(CursorResponseTest, parseFromBSONFirstBatch) {
- StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(
- BSON("cursor" << BSON("id" << CursorId(123) << "ns"
- << "db.coll"
- << "firstBatch"
- << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1));
+ StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(BSON(
+ "cursor" << BSON("id" << CursorId(123) << "ns"
+ << "db.coll"
+ << "firstBatch" << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
+ << "ok" << 1));
ASSERT_OK(result.getStatus());
CursorResponse response = std::move(result.getValue());
@@ -59,13 +57,11 @@ TEST(CursorResponseTest, parseFromBSONFirstBatch) {
}
TEST(CursorResponseTest, parseFromBSONNextBatch) {
- StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(
- BSON("cursor" << BSON("id" << CursorId(123) << "ns"
- << "db.coll"
- << "nextBatch"
- << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1));
+ StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(BSON(
+ "cursor" << BSON("id" << CursorId(123) << "ns"
+ << "db.coll"
+ << "nextBatch" << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
+ << "ok" << 1));
ASSERT_OK(result.getStatus());
CursorResponse response = std::move(result.getValue());
@@ -77,13 +73,11 @@ TEST(CursorResponseTest, parseFromBSONNextBatch) {
}
TEST(CursorResponseTest, parseFromBSONCursorIdZero) {
- StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(
- BSON("cursor" << BSON("id" << CursorId(0) << "ns"
- << "db.coll"
- << "nextBatch"
- << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1));
+ StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(BSON(
+ "cursor" << BSON("id" << CursorId(0) << "ns"
+ << "db.coll"
+ << "nextBatch" << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
+ << "ok" << 1));
ASSERT_OK(result.getStatus());
CursorResponse response = std::move(result.getValue());
@@ -95,13 +89,11 @@ TEST(CursorResponseTest, parseFromBSONCursorIdZero) {
}
TEST(CursorResponseTest, parseFromBSONEmptyBatch) {
- StatusWith<CursorResponse> result =
- CursorResponse::parseFromBSON(BSON("cursor" << BSON("id" << CursorId(123) << "ns"
- << "db.coll"
- << "nextBatch"
- << BSONArrayBuilder().arr())
- << "ok"
- << 1));
+ StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(
+ BSON("cursor" << BSON("id" << CursorId(123) << "ns"
+ << "db.coll"
+ << "nextBatch" << BSONArrayBuilder().arr())
+ << "ok" << 1));
ASSERT_OK(result.getStatus());
CursorResponse response = std::move(result.getValue());
@@ -125,8 +117,7 @@ TEST(CursorResponseTest, parseFromBSONNsFieldMissing) {
StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(
BSON("cursor" << BSON("id" << CursorId(123) << "firstBatch"
<< BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1));
+ << "ok" << 1));
ASSERT_NOT_OK(result.getStatus());
}
@@ -134,8 +125,7 @@ TEST(CursorResponseTest, parseFromBSONNsFieldWrongType) {
StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(
BSON("cursor" << BSON("id" << CursorId(123) << "ns" << 456 << "firstBatch"
<< BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1));
+ << "ok" << 1));
ASSERT_NOT_OK(result.getStatus());
}
@@ -143,10 +133,8 @@ TEST(CursorResponseTest, parseFromBSONIdFieldMissing) {
StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(
BSON("cursor" << BSON("ns"
<< "db.coll"
- << "nextBatch"
- << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1));
+ << "nextBatch" << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
+ << "ok" << 1));
ASSERT_NOT_OK(result.getStatus());
}
@@ -156,10 +144,8 @@ TEST(CursorResponseTest, parseFromBSONIdFieldWrongType) {
<< "123"
<< "ns"
<< "db.coll"
- << "nextBatch"
- << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1));
+ << "nextBatch" << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
+ << "ok" << 1));
ASSERT_NOT_OK(result.getStatus());
}
@@ -167,19 +153,16 @@ TEST(CursorResponseTest, parseFromBSONBatchFieldMissing) {
StatusWith<CursorResponse> result =
CursorResponse::parseFromBSON(BSON("cursor" << BSON("id" << CursorId(123) << "ns"
<< "db.coll")
- << "ok"
- << 1));
+ << "ok" << 1));
ASSERT_NOT_OK(result.getStatus());
}
TEST(CursorResponseTest, parseFromBSONFirstBatchFieldWrongType) {
- StatusWith<CursorResponse> result =
- CursorResponse::parseFromBSON(BSON("cursor" << BSON("id" << CursorId(123) << "ns"
- << "db.coll"
- << "firstBatch"
- << BSON("_id" << 1))
- << "ok"
- << 1));
+ StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(
+ BSON("cursor" << BSON("id" << CursorId(123) << "ns"
+ << "db.coll"
+ << "firstBatch" << BSON("_id" << 1))
+ << "ok" << 1));
ASSERT_NOT_OK(result.getStatus());
}
@@ -187,19 +170,16 @@ TEST(CursorResponseTest, parseFromBSONNextBatchFieldWrongType) {
StatusWith<CursorResponse> result =
CursorResponse::parseFromBSON(BSON("cursor" << BSON("id" << CursorId(123) << "ns"
<< "db.coll"
- << "nextBatch"
- << BSON("_id" << 1))
- << "ok"
- << 1));
+ << "nextBatch" << BSON("_id" << 1))
+ << "ok" << 1));
ASSERT_NOT_OK(result.getStatus());
}
TEST(CursorResponseTest, parseFromBSONOkFieldMissing) {
- StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(
- BSON("cursor" << BSON("id" << CursorId(123) << "ns"
- << "db.coll"
- << "nextBatch"
- << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))));
+ StatusWith<CursorResponse> result = CursorResponse::parseFromBSON(BSON(
+ "cursor" << BSON("id" << CursorId(123) << "ns"
+ << "db.coll"
+ << "nextBatch" << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))));
ASSERT_NOT_OK(result.getStatus());
}
@@ -216,13 +196,11 @@ TEST(CursorResponseTest, toBSONInitialResponse) {
std::vector<BSONObj> batch = {BSON("_id" << 1), BSON("_id" << 2)};
CursorResponse response(NamespaceString("testdb.testcoll"), CursorId(123), batch);
BSONObj responseObj = response.toBSON(CursorResponse::ResponseType::InitialResponse);
- BSONObj expectedResponse =
- BSON("cursor" << BSON("id" << CursorId(123) << "ns"
- << "testdb.testcoll"
- << "firstBatch"
- << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1.0);
+ BSONObj expectedResponse = BSON(
+ "cursor" << BSON("id" << CursorId(123) << "ns"
+ << "testdb.testcoll"
+ << "firstBatch" << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
+ << "ok" << 1.0);
ASSERT_BSONOBJ_EQ(responseObj, expectedResponse);
}
@@ -230,13 +208,11 @@ TEST(CursorResponseTest, toBSONSubsequentResponse) {
std::vector<BSONObj> batch = {BSON("_id" << 1), BSON("_id" << 2)};
CursorResponse response(NamespaceString("testdb.testcoll"), CursorId(123), batch);
BSONObj responseObj = response.toBSON(CursorResponse::ResponseType::SubsequentResponse);
- BSONObj expectedResponse =
- BSON("cursor" << BSON("id" << CursorId(123) << "ns"
- << "testdb.testcoll"
- << "nextBatch"
- << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1.0);
+ BSONObj expectedResponse = BSON(
+ "cursor" << BSON("id" << CursorId(123) << "ns"
+ << "testdb.testcoll"
+ << "nextBatch" << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
+ << "ok" << 1.0);
ASSERT_BSONOBJ_EQ(responseObj, expectedResponse);
}
@@ -248,13 +224,11 @@ TEST(CursorResponseTest, addToBSONInitialResponse) {
response.addToBSON(CursorResponse::ResponseType::InitialResponse, &builder);
BSONObj responseObj = builder.obj();
- BSONObj expectedResponse =
- BSON("cursor" << BSON("id" << CursorId(123) << "ns"
- << "testdb.testcoll"
- << "firstBatch"
- << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1.0);
+ BSONObj expectedResponse = BSON(
+ "cursor" << BSON("id" << CursorId(123) << "ns"
+ << "testdb.testcoll"
+ << "firstBatch" << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
+ << "ok" << 1.0);
ASSERT_BSONOBJ_EQ(responseObj, expectedResponse);
}
@@ -266,13 +240,11 @@ TEST(CursorResponseTest, addToBSONSubsequentResponse) {
response.addToBSON(CursorResponse::ResponseType::SubsequentResponse, &builder);
BSONObj responseObj = builder.obj();
- BSONObj expectedResponse =
- BSON("cursor" << BSON("id" << CursorId(123) << "ns"
- << "testdb.testcoll"
- << "nextBatch"
- << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
- << "ok"
- << 1.0);
+ BSONObj expectedResponse = BSON(
+ "cursor" << BSON("id" << CursorId(123) << "ns"
+ << "testdb.testcoll"
+ << "nextBatch" << BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2)))
+ << "ok" << 1.0);
ASSERT_BSONOBJ_EQ(responseObj, expectedResponse);
}
@@ -288,10 +260,8 @@ TEST(CursorResponseTest, serializePostBatchResumeToken) {
<< "db.coll"
<< "nextBatch"
<< BSON_ARRAY(BSON("_id" << 1) << BSON("_id" << 2))
- << "postBatchResumeToken"
- << postBatchResumeToken)
- << "ok"
- << 1));
+ << "postBatchResumeToken" << postBatchResumeToken)
+ << "ok" << 1));
auto reparsed = CursorResponse::parseFromBSON(serialized);
ASSERT_OK(reparsed.getStatus());
CursorResponse reparsedResponse = std::move(reparsed.getValue());
diff --git a/src/mongo/db/query/datetime/date_time_support.cpp b/src/mongo/db/query/datetime/date_time_support.cpp
index 6ab2a93c976..1705bb600b2 100644
--- a/src/mongo/db/query/datetime/date_time_support.cpp
+++ b/src/mongo/db/query/datetime/date_time_support.cpp
@@ -179,9 +179,7 @@ void TimeZoneDatabase::loadTimeZoneInfo(
40475,
{ErrorCodes::FailedToParse,
str::stream() << "failed to parse time zone file for time zone identifier \""
- << entry.id
- << "\": "
- << timelib_get_error_message(errorCode)});
+ << entry.id << "\": " << timelib_get_error_message(errorCode)});
}
invariant(errorCode == TIMELIB_ERROR_NO_ERROR);
@@ -275,8 +273,7 @@ Date_t TimeZoneDatabase::fromString(StringData dateString,
uasserted(ErrorCodes::ConversionFailure,
str::stream()
<< "an incomplete date/time string has been found, with elements missing: \""
- << dateString
- << "\"");
+ << dateString << "\"");
}
if (!tz.isUtcZone()) {
@@ -294,8 +291,7 @@ Date_t TimeZoneDatabase::fromString(StringData dateString,
ErrorCodes::ConversionFailure,
str::stream()
<< "you cannot pass in a date/time string with time zone information ('"
- << parsedTime.get()->tz_abbr
- << "') together with a timezone argument");
+ << parsedTime.get()->tz_abbr << "') together with a timezone argument");
break;
default: // should technically not be possible to reach
uasserted(ErrorCodes::ConversionFailure,
diff --git a/src/mongo/db/query/datetime/date_time_support.h b/src/mongo/db/query/datetime/date_time_support.h
index 94ac4c4d08e..f5efdcb8fc3 100644
--- a/src/mongo/db/query/datetime/date_time_support.h
+++ b/src/mongo/db/query/datetime/date_time_support.h
@@ -295,8 +295,7 @@ private:
uassert(18537,
str::stream() << "Could not convert date to string: date component was outside "
- << "the supported range of 0-9999: "
- << number,
+ << "the supported range of 0-9999: " << number,
(number >= 0) && (number <= 9999));
int digits = 1;
diff --git a/src/mongo/db/query/datetime/init_timezone_data.cpp b/src/mongo/db/query/datetime/init_timezone_data.cpp
index 970e6db7de4..a9f8fe97ec8 100644
--- a/src/mongo/db/query/datetime/init_timezone_data.cpp
+++ b/src/mongo/db/query/datetime/init_timezone_data.cpp
@@ -50,8 +50,7 @@ ServiceContext::ConstructorActionRegisterer loadTimeZoneDB{
if (!timeZoneDatabase) {
uasserted(ErrorCodes::FailedToParse,
str::stream() << "failed to load time zone database from path \""
- << serverGlobalParams.timeZoneInfoPath
- << "\"");
+ << serverGlobalParams.timeZoneInfoPath << "\"");
}
TimeZoneDatabase::set(service,
std::make_unique<TimeZoneDatabase>(std::move(timeZoneDatabase)));
diff --git a/src/mongo/db/query/explain.h b/src/mongo/db/query/explain.h
index 8317fc50cfc..e6ad7cc0c5c 100644
--- a/src/mongo/db/query/explain.h
+++ b/src/mongo/db/query/explain.h
@@ -246,4 +246,4 @@ private:
static void generateServerInfo(BSONObjBuilder* out);
};
-} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/query/explain_options.cpp b/src/mongo/db/query/explain_options.cpp
index b9c771de18e..581252ffdfc 100644
--- a/src/mongo/db/query/explain_options.cpp
+++ b/src/mongo/db/query/explain_options.cpp
@@ -72,13 +72,10 @@ StatusWith<ExplainOptions::Verbosity> ExplainOptions::parseCmdBSON(const BSONObj
verbosity = Verbosity::kExecStats;
} else if (verbStr != kAllPlansExecutionVerbosityStr) {
return Status(ErrorCodes::FailedToParse,
- str::stream() << "verbosity string must be one of {'"
- << kQueryPlannerVerbosityStr
- << "', '"
- << kExecStatsVerbosityStr
- << "', '"
- << kAllPlansExecutionVerbosityStr
- << "'}");
+ str::stream()
+ << "verbosity string must be one of {'" << kQueryPlannerVerbosityStr
+ << "', '" << kExecStatsVerbosityStr << "', '"
+ << kAllPlansExecutionVerbosityStr << "'}");
}
}
diff --git a/src/mongo/db/query/find.cpp b/src/mongo/db/query/find.cpp
index f9de0152b5c..97b0640289c 100644
--- a/src/mongo/db/query/find.cpp
+++ b/src/mongo/db/query/find.cpp
@@ -337,8 +337,7 @@ Message getMore(OperationContext* opCtx,
// cursor.
uassert(ErrorCodes::Unauthorized,
str::stream() << "Requested getMore on namespace " << ns << ", but cursor " << cursorid
- << " belongs to namespace "
- << cursorPin->nss().ns(),
+ << " belongs to namespace " << cursorPin->nss().ns(),
nss == cursorPin->nss());
// A user can only call getMore on their own cursor. If there were multiple users authenticated
diff --git a/src/mongo/db/query/find_and_modify_request.cpp b/src/mongo/db/query/find_and_modify_request.cpp
index 20f62d2a407..9bf40a1f456 100644
--- a/src/mongo/db/query/find_and_modify_request.cpp
+++ b/src/mongo/db/query/find_and_modify_request.cpp
@@ -171,18 +171,18 @@ StatusWith<FindAndModifyRequest> FindAndModifyRequest::parseFromBSON(NamespaceSt
auto queryElement = cmdObj[kQueryField];
if (queryElement.type() != Object) {
return {ErrorCodes::Error(31160),
- str::stream() << "'" << kQueryField
- << "' parameter must be an object, found "
- << queryElement.type()};
+ str::stream()
+ << "'" << kQueryField << "' parameter must be an object, found "
+ << queryElement.type()};
}
query = queryElement.embeddedObject();
} else if (field == kSortField) {
auto sortElement = cmdObj[kSortField];
if (sortElement.type() != Object) {
return {ErrorCodes::Error(31174),
- str::stream() << "'" << kSortField
- << "' parameter must be an object, found "
- << sortElement.type()};
+ str::stream()
+ << "'" << kSortField << "' parameter must be an object, found "
+ << sortElement.type()};
}
sort = sortElement.embeddedObject();
} else if (field == kRemoveField) {
@@ -195,9 +195,9 @@ StatusWith<FindAndModifyRequest> FindAndModifyRequest::parseFromBSON(NamespaceSt
auto projectionElement = cmdObj[kFieldProjectionField];
if (projectionElement.type() != Object) {
return {ErrorCodes::Error(31175),
- str::stream() << "'" << kFieldProjectionField
- << "' parameter must be an object, found "
- << projectionElement.type()};
+ str::stream()
+ << "'" << kFieldProjectionField
+ << "' parameter must be an object, found " << projectionElement.type()};
}
fields = projectionElement.embeddedObject();
} else if (field == kUpsertField) {
diff --git a/src/mongo/db/query/find_and_modify_request.h b/src/mongo/db/query/find_and_modify_request.h
index a8b350e691f..a5212570755 100644
--- a/src/mongo/db/query/find_and_modify_request.h
+++ b/src/mongo/db/query/find_and_modify_request.h
@@ -117,13 +117,13 @@ public:
//
/**
- * Sets the filter to find a document.
- */
+ * Sets the filter to find a document.
+ */
void setQuery(BSONObj query);
/**
- * Sets the update object that specifies how a document gets updated.
- */
+ * Sets the update object that specifies how a document gets updated.
+ */
void setUpdateObj(BSONObj updateObj);
/**
@@ -134,8 +134,8 @@ public:
void setShouldReturnNew(bool shouldReturnNew);
/**
- * Sets a flag whether the statement performs an upsert.
- */
+ * Sets a flag whether the statement performs an upsert.
+ */
void setUpsert(bool upsert);
//
@@ -210,4 +210,4 @@ private:
// Holds value when performing an update request and none when a remove request.
boost::optional<write_ops::UpdateModification> _update;
};
-}
+} // namespace mongo
diff --git a/src/mongo/db/query/get_executor.cpp b/src/mongo/db/query/get_executor.cpp
index e0f7041eb37..7b20d39a033 100644
--- a/src/mongo/db/query/get_executor.cpp
+++ b/src/mongo/db/query/get_executor.cpp
@@ -631,8 +631,9 @@ StatusWith<unique_ptr<PlanExecutor, PlanExecutor::Deleter>> getExecutorFind(
bool permitYield,
size_t plannerOptions) {
const auto& readConcernArgs = repl::ReadConcernArgs::get(opCtx);
- auto yieldPolicy = (permitYield && (readConcernArgs.getLevel() !=
- repl::ReadConcernLevel::kSnapshotReadConcern))
+ auto yieldPolicy =
+ (permitYield &&
+ (readConcernArgs.getLevel() != repl::ReadConcernLevel::kSnapshotReadConcern))
? PlanExecutor::YIELD_AUTO
: PlanExecutor::INTERRUPT_ONLY;
return _getExecutorFind(
@@ -1370,10 +1371,11 @@ QueryPlannerParams fillOutPlannerParamsForDistinct(OperationContext* opCtx,
const IndexCatalogEntry* ice = ii->next();
const IndexDescriptor* desc = ice->descriptor();
if (desc->keyPattern().hasField(parsedDistinct.getKey())) {
- if (!mayUnwindArrays && isAnyComponentOfPathMultikey(desc->keyPattern(),
- desc->isMultikey(opCtx),
- desc->getMultikeyPaths(opCtx),
- parsedDistinct.getKey())) {
+ if (!mayUnwindArrays &&
+ isAnyComponentOfPathMultikey(desc->keyPattern(),
+ desc->isMultikey(opCtx),
+ desc->getMultikeyPaths(opCtx),
+ parsedDistinct.getKey())) {
// If the caller requested "strict" distinct that does not "pre-unwind" arrays,
// then an index which is multikey on the distinct field may not be used. This is
// because when indexing an array each element gets inserted individually. Any plan
diff --git a/src/mongo/db/query/get_executor_test.cpp b/src/mongo/db/query/get_executor_test.cpp
index 6281cedc01a..d54080debef 100644
--- a/src/mongo/db/query/get_executor_test.cpp
+++ b/src/mongo/db/query/get_executor_test.cpp
@@ -189,14 +189,13 @@ TEST(GetExecutorTest, GetAllowedIndicesDescendingOrder) {
}
TEST(GetExecutorTest, GetAllowedIndicesMatchesByName) {
- testAllowedIndices(
- {buildSimpleIndexEntry(fromjson("{a: 1}"), "a_1"),
- buildSimpleIndexEntry(fromjson("{a: 1}"), "a_1:en")},
- // BSONObjSet default constructor is explicit, so we cannot copy-list-initialize until
- // C++14.
- SimpleBSONObjComparator::kInstance.makeBSONObjSet(),
- {"a_1"},
- {"a_1"});
+ testAllowedIndices({buildSimpleIndexEntry(fromjson("{a: 1}"), "a_1"),
+ buildSimpleIndexEntry(fromjson("{a: 1}"), "a_1:en")},
+ // BSONObjSet default constructor is explicit, so we cannot
+ // copy-list-initialize until C++14.
+ SimpleBSONObjComparator::kInstance.makeBSONObjSet(),
+ {"a_1"},
+ {"a_1"});
}
TEST(GetExecutorTest, GetAllowedIndicesMatchesMultipleIndexesByKey) {
diff --git a/src/mongo/db/query/getmore_request.cpp b/src/mongo/db/query/getmore_request.cpp
index e577671f2fd..e78f6e4e37c 100644
--- a/src/mongo/db/query/getmore_request.cpp
+++ b/src/mongo/db/query/getmore_request.cpp
@@ -84,8 +84,7 @@ Status GetMoreRequest::isValid() const {
if (batchSize && *batchSize <= 0) {
return Status(ErrorCodes::BadValue,
str::stream() << "Batch size for getMore must be positive, "
- << "but received: "
- << *batchSize);
+ << "but received: " << *batchSize);
}
return Status::OK();
@@ -116,8 +115,8 @@ StatusWith<GetMoreRequest> GetMoreRequest::parseFromBSON(const std::string& dbna
} else if (fieldName == kCollectionField) {
if (el.type() != BSONType::String) {
return {ErrorCodes::TypeMismatch,
- str::stream() << "Field 'collection' must be of type string in: "
- << cmdObj};
+ str::stream()
+ << "Field 'collection' must be of type string in: " << cmdObj};
}
BSONElement collElt = cmdObj["collection"];
@@ -155,9 +154,7 @@ StatusWith<GetMoreRequest> GetMoreRequest::parseFromBSON(const std::string& dbna
} else if (!isGenericArgument(fieldName)) {
return {ErrorCodes::FailedToParse,
str::stream() << "Failed to parse: " << cmdObj << ". "
- << "Unrecognized field '"
- << fieldName
- << "'."};
+ << "Unrecognized field '" << fieldName << "'."};
}
}
diff --git a/src/mongo/db/query/getmore_request_test.cpp b/src/mongo/db/query/getmore_request_test.cpp
index f9fe0627cbe..78b235153f8 100644
--- a/src/mongo/db/query/getmore_request_test.cpp
+++ b/src/mongo/db/query/getmore_request_test.cpp
@@ -61,8 +61,7 @@ TEST(GetMoreRequestTest, parseFromBSONCursorIdNotLongLong) {
StatusWith<GetMoreRequest> result = GetMoreRequest::parseFromBSON("db",
BSON("getMore"
<< "not a number"
- << "collection"
- << 123));
+ << "collection" << 123));
ASSERT_NOT_OK(result.getStatus());
ASSERT_EQUALS(ErrorCodes::TypeMismatch, result.getStatus().code());
}
@@ -117,8 +116,7 @@ TEST(GetMoreRequestTest, parseFromBSONUnrecognizedFieldName) {
GetMoreRequest::parseFromBSON("db",
BSON("getMore" << CursorId(123) << "collection"
<< "coll"
- << "unknown_field"
- << 1));
+ << "unknown_field" << 1));
ASSERT_NOT_OK(result.getStatus());
ASSERT_EQUALS(ErrorCodes::FailedToParse, result.getStatus().code());
}
@@ -128,8 +126,7 @@ TEST(GetMoreRequestTest, parseFromBSONInvalidBatchSize) {
GetMoreRequest::parseFromBSON("db",
BSON("getMore" << CursorId(123) << "collection"
<< "coll"
- << "batchSize"
- << -1));
+ << "batchSize" << -1));
ASSERT_NOT_OK(result.getStatus());
ASSERT_EQUALS(ErrorCodes::BadValue, result.getStatus().code());
}
@@ -139,8 +136,7 @@ TEST(GetMoreRequestTest, parseFromBSONInvalidBatchSizeOfZero) {
GetMoreRequest::parseFromBSON("db",
BSON("getMore" << CursorId(123) << "collection"
<< "coll"
- << "batchSize"
- << 0));
+ << "batchSize" << 0));
ASSERT_NOT_OK(result.getStatus());
ASSERT_EQUALS(ErrorCodes::BadValue, result.getStatus().code());
}
@@ -161,8 +157,7 @@ TEST(GetMoreRequestTest, parseFromBSONBatchSizeProvided) {
GetMoreRequest::parseFromBSON("db",
BSON("getMore" << CursorId(123) << "collection"
<< "coll"
- << "batchSize"
- << 200));
+ << "batchSize" << 200));
ASSERT_EQUALS("db.coll", result.getValue().nss.toString());
ASSERT_EQUALS(CursorId(123), result.getValue().cursorid);
ASSERT(result.getValue().batchSize);
@@ -186,8 +181,7 @@ TEST(GetMoreRequestTest, parseFromBSONHasMaxTimeMS) {
GetMoreRequest::parseFromBSON("db",
BSON("getMore" << CursorId(123) << "collection"
<< "coll"
- << "maxTimeMS"
- << 100));
+ << "maxTimeMS" << 100));
ASSERT_OK(result.getStatus());
ASSERT_EQUALS("db.coll", result.getValue().nss.toString());
ASSERT(result.getValue().awaitDataTimeout);
@@ -200,8 +194,7 @@ TEST(GetMoreRequestTest, parseFromBSONHasMaxTimeMSOfZero) {
GetMoreRequest::parseFromBSON("db",
BSON("getMore" << CursorId(123) << "collection"
<< "coll"
- << "maxTimeMS"
- << 0));
+ << "maxTimeMS" << 0));
ASSERT_OK(result.getStatus());
ASSERT_EQUALS("db.coll", result.getValue().nss.toString());
ASSERT_EQUALS(CursorId(123), result.getValue().cursorid);
@@ -216,8 +209,7 @@ TEST(GetMoreRequestTest, toBSONHasBatchSize) {
BSONObj requestObj = request.toBSON();
BSONObj expectedRequest = BSON("getMore" << CursorId(123) << "collection"
<< "testcoll"
- << "batchSize"
- << 99);
+ << "batchSize" << 99);
ASSERT_BSONOBJ_EQ(requestObj, expectedRequest);
}
@@ -240,10 +232,7 @@ TEST(GetMoreRequestTest, toBSONHasTerm) {
BSONObj requestObj = request.toBSON();
BSONObj expectedRequest = BSON("getMore" << CursorId(123) << "collection"
<< "testcoll"
- << "batchSize"
- << 99
- << "term"
- << 1);
+ << "batchSize" << 99 << "term" << 1);
ASSERT_BSONOBJ_EQ(requestObj, expectedRequest);
}
@@ -255,14 +244,11 @@ TEST(GetMoreRequestTest, toBSONHasCommitLevel) {
1,
repl::OpTime(Timestamp(0, 10), 2));
BSONObj requestObj = request.toBSON();
- BSONObj expectedRequest = BSON("getMore" << CursorId(123) << "collection"
- << "testcoll"
- << "batchSize"
- << 99
- << "term"
- << 1
- << "lastKnownCommittedOpTime"
- << BSON("ts" << Timestamp(0, 10) << "t" << 2LL));
+ BSONObj expectedRequest =
+ BSON("getMore" << CursorId(123) << "collection"
+ << "testcoll"
+ << "batchSize" << 99 << "term" << 1 << "lastKnownCommittedOpTime"
+ << BSON("ts" << Timestamp(0, 10) << "t" << 2LL));
ASSERT_BSONOBJ_EQ(requestObj, expectedRequest);
}
@@ -276,8 +262,7 @@ TEST(GetMoreRequestTest, toBSONHasMaxTimeMS) {
BSONObj requestObj = request.toBSON();
BSONObj expectedRequest = BSON("getMore" << CursorId(123) << "collection"
<< "testcoll"
- << "maxTimeMS"
- << 789);
+ << "maxTimeMS" << 789);
ASSERT_BSONOBJ_EQ(requestObj, expectedRequest);
}
diff --git a/src/mongo/db/query/killcursors_request.cpp b/src/mongo/db/query/killcursors_request.cpp
index df44d73043d..5f21b82d489 100644
--- a/src/mongo/db/query/killcursors_request.cpp
+++ b/src/mongo/db/query/killcursors_request.cpp
@@ -67,8 +67,8 @@ StatusWith<KillCursorsRequest> KillCursorsRequest::parseFromBSON(const std::stri
if (cmdObj[kCursorsField].type() != BSONType::Array) {
return {ErrorCodes::FailedToParse,
- str::stream() << "Field '" << kCursorsField << "' must be of type array in: "
- << cmdObj};
+ str::stream() << "Field '" << kCursorsField
+ << "' must be of type array in: " << cmdObj};
}
std::vector<CursorId> cursorIds;
diff --git a/src/mongo/db/query/killcursors_request_test.cpp b/src/mongo/db/query/killcursors_request_test.cpp
index fef544d0b42..d1cdb1f4650 100644
--- a/src/mongo/db/query/killcursors_request_test.cpp
+++ b/src/mongo/db/query/killcursors_request_test.cpp
@@ -95,8 +95,7 @@ TEST(KillCursorsRequestTest, parseFromBSONCursorFieldNotArray) {
KillCursorsRequest::parseFromBSON("db",
BSON("killCursors"
<< "coll"
- << "cursors"
- << CursorId(123)));
+ << "cursors" << CursorId(123)));
ASSERT_NOT_OK(result.getStatus());
ASSERT_EQ(result.getStatus().code(), ErrorCodes::FailedToParse);
}
@@ -106,21 +105,18 @@ TEST(KillCursorsRequestTest, parseFromBSONCursorFieldEmptyArray) {
KillCursorsRequest::parseFromBSON("db",
BSON("killCursors"
<< "coll"
- << "cursors"
- << BSONArrayBuilder().arr()));
+ << "cursors" << BSONArrayBuilder().arr()));
ASSERT_NOT_OK(result.getStatus());
ASSERT_EQ(result.getStatus().code(), ErrorCodes::BadValue);
}
TEST(KillCursorsRequestTest, parseFromBSONCursorFieldContainsEltOfWrongType) {
- StatusWith<KillCursorsRequest> result =
- KillCursorsRequest::parseFromBSON("db",
- BSON("killCursors"
- << "coll"
- << "cursors"
- << BSON_ARRAY(CursorId(123) << "foo"
- << CursorId(456))));
+ StatusWith<KillCursorsRequest> result = KillCursorsRequest::parseFromBSON(
+ "db",
+ BSON("killCursors"
+ << "coll"
+ << "cursors" << BSON_ARRAY(CursorId(123) << "foo" << CursorId(456))));
ASSERT_NOT_OK(result.getStatus());
ASSERT_EQ(result.getStatus().code(), ErrorCodes::FailedToParse);
}
@@ -132,8 +128,7 @@ TEST(KillCursorsRequestTest, toBSON) {
BSONObj requestObj = request.toBSON();
BSONObj expectedObj = BSON("killCursors"
<< "coll"
- << "cursors"
- << BSON_ARRAY(CursorId(123) << CursorId(456)));
+ << "cursors" << BSON_ARRAY(CursorId(123) << CursorId(456)));
ASSERT_BSONOBJ_EQ(requestObj, expectedObj);
}
diff --git a/src/mongo/db/query/killcursors_response.cpp b/src/mongo/db/query/killcursors_response.cpp
index 798b2bf8cb0..8b482772b59 100644
--- a/src/mongo/db/query/killcursors_response.cpp
+++ b/src/mongo/db/query/killcursors_response.cpp
@@ -51,8 +51,8 @@ Status fillOutCursorArray(const BSONObj& cmdResponse,
if (elt.type() != BSONType::Array) {
return {ErrorCodes::FailedToParse,
- str::stream() << "Field '" << fieldName << "' must be of type array in: "
- << cmdResponse};
+ str::stream() << "Field '" << fieldName
+ << "' must be of type array in: " << cmdResponse};
}
for (BSONElement cursorElt : elt.Obj()) {
diff --git a/src/mongo/db/query/killcursors_response_test.cpp b/src/mongo/db/query/killcursors_response_test.cpp
index c0c5da3f278..8f091635bb4 100644
--- a/src/mongo/db/query/killcursors_response_test.cpp
+++ b/src/mongo/db/query/killcursors_response_test.cpp
@@ -41,13 +41,9 @@ namespace {
TEST(KillCursorsResponseTest, parseFromBSONSuccess) {
StatusWith<KillCursorsResponse> result = KillCursorsResponse::parseFromBSON(
BSON("cursorsKilled" << BSON_ARRAY(CursorId(123)) << "cursorsNotFound"
- << BSON_ARRAY(CursorId(456) << CursorId(6))
- << "cursorsAlive"
+ << BSON_ARRAY(CursorId(456) << CursorId(6)) << "cursorsAlive"
<< BSON_ARRAY(CursorId(7) << CursorId(8) << CursorId(9))
- << "cursorsUnknown"
- << BSONArray()
- << "ok"
- << 1.0));
+ << "cursorsUnknown" << BSONArray() << "ok" << 1.0));
ASSERT_OK(result.getStatus());
KillCursorsResponse response = result.getValue();
ASSERT_EQ(response.cursorsKilled.size(), 1U);
@@ -65,11 +61,8 @@ TEST(KillCursorsResponseTest, parseFromBSONSuccess) {
TEST(KillCursorsResponseTest, parseFromBSONSuccessOmitCursorsAlive) {
StatusWith<KillCursorsResponse> result = KillCursorsResponse::parseFromBSON(
BSON("cursorsKilled" << BSON_ARRAY(CursorId(123)) << "cursorsNotFound"
- << BSON_ARRAY(CursorId(456) << CursorId(6))
- << "cursorsUnknown"
- << BSON_ARRAY(CursorId(789))
- << "ok"
- << 1.0));
+ << BSON_ARRAY(CursorId(456) << CursorId(6)) << "cursorsUnknown"
+ << BSON_ARRAY(CursorId(789)) << "ok" << 1.0));
ASSERT_NOT_OK(result.getStatus());
ASSERT_EQ(result.getStatus().code(), ErrorCodes::FailedToParse);
}
@@ -84,13 +77,11 @@ TEST(KillCursorsResponseTest, parseFromBSONCommandNotOk) {
}
TEST(KillCursorsResponseTest, parseFromBSONFieldNotArray) {
- StatusWith<KillCursorsResponse> result = KillCursorsResponse::parseFromBSON(
- BSON("cursorsKilled" << BSON_ARRAY(CursorId(123)) << "cursorsNotFound"
- << "foobar"
- << "cursorsAlive"
- << BSON_ARRAY(CursorId(7) << CursorId(8) << CursorId(9))
- << "ok"
- << 1.0));
+ StatusWith<KillCursorsResponse> result = KillCursorsResponse::parseFromBSON(BSON(
+ "cursorsKilled" << BSON_ARRAY(CursorId(123)) << "cursorsNotFound"
+ << "foobar"
+ << "cursorsAlive" << BSON_ARRAY(CursorId(7) << CursorId(8) << CursorId(9))
+ << "ok" << 1.0));
ASSERT_NOT_OK(result.getStatus());
ASSERT_EQ(result.getStatus().code(), ErrorCodes::FailedToParse);
}
@@ -98,11 +89,8 @@ TEST(KillCursorsResponseTest, parseFromBSONFieldNotArray) {
TEST(KillCursorsResponseTest, parseFromBSONArrayContainsInvalidElement) {
StatusWith<KillCursorsResponse> result = KillCursorsResponse::parseFromBSON(
BSON("cursorsKilled" << BSON_ARRAY(CursorId(123)) << "cursorsNotFound"
- << BSON_ARRAY(CursorId(456) << CursorId(6))
- << "cursorsAlive"
- << BSON_ARRAY(CursorId(7) << "foobar" << CursorId(9))
- << "ok"
- << 1.0));
+ << BSON_ARRAY(CursorId(456) << CursorId(6)) << "cursorsAlive"
+ << BSON_ARRAY(CursorId(7) << "foobar" << CursorId(9)) << "ok" << 1.0));
ASSERT_NOT_OK(result.getStatus());
ASSERT_EQ(result.getStatus().code(), ErrorCodes::FailedToParse);
}
@@ -116,13 +104,9 @@ TEST(KillCursorsResponseTest, toBSON) {
BSONObj responseObj = response.toBSON();
BSONObj expectedResponse =
BSON("cursorsKilled" << BSON_ARRAY(CursorId(123)) << "cursorsNotFound"
- << BSON_ARRAY(CursorId(456) << CursorId(6))
- << "cursorsAlive"
+ << BSON_ARRAY(CursorId(456) << CursorId(6)) << "cursorsAlive"
<< BSON_ARRAY(CursorId(7) << CursorId(8) << CursorId(9))
- << "cursorsUnknown"
- << BSONArray()
- << "ok"
- << 1.0);
+ << "cursorsUnknown" << BSONArray() << "ok" << 1.0);
ASSERT_BSONOBJ_EQ(responseObj, expectedResponse);
}
diff --git a/src/mongo/db/query/parsed_distinct.cpp b/src/mongo/db/query/parsed_distinct.cpp
index ca72257d6d3..226754acba4 100644
--- a/src/mongo/db/query/parsed_distinct.cpp
+++ b/src/mongo/db/query/parsed_distinct.cpp
@@ -293,11 +293,10 @@ StatusWith<ParsedDistinct> ParsedDistinct::parse(OperationContext* opCtx,
if (auto readConcernElt = cmdObj[repl::ReadConcernArgs::kReadConcernFieldName]) {
if (readConcernElt.type() != BSONType::Object) {
return Status(ErrorCodes::TypeMismatch,
- str::stream() << "\"" << repl::ReadConcernArgs::kReadConcernFieldName
- << "\" had the wrong type. Expected "
- << typeName(BSONType::Object)
- << ", found "
- << typeName(readConcernElt.type()));
+ str::stream()
+ << "\"" << repl::ReadConcernArgs::kReadConcernFieldName
+ << "\" had the wrong type. Expected " << typeName(BSONType::Object)
+ << ", found " << typeName(readConcernElt.type()));
}
qr->setReadConcern(readConcernElt.embeddedObject());
}
@@ -305,11 +304,10 @@ StatusWith<ParsedDistinct> ParsedDistinct::parse(OperationContext* opCtx,
if (auto queryOptionsElt = cmdObj[QueryRequest::kUnwrappedReadPrefField]) {
if (queryOptionsElt.type() != BSONType::Object) {
return Status(ErrorCodes::TypeMismatch,
- str::stream() << "\"" << QueryRequest::kUnwrappedReadPrefField
- << "\" had the wrong type. Expected "
- << typeName(BSONType::Object)
- << ", found "
- << typeName(queryOptionsElt.type()));
+ str::stream()
+ << "\"" << QueryRequest::kUnwrappedReadPrefField
+ << "\" had the wrong type. Expected " << typeName(BSONType::Object)
+ << ", found " << typeName(queryOptionsElt.type()));
}
qr->setUnwrappedReadPref(queryOptionsElt.embeddedObject());
}
diff --git a/src/mongo/db/query/parsed_distinct_test.cpp b/src/mongo/db/query/parsed_distinct_test.cpp
index bf48d19439e..dd6e501ed24 100644
--- a/src/mongo/db/query/parsed_distinct_test.cpp
+++ b/src/mongo/db/query/parsed_distinct_test.cpp
@@ -73,10 +73,10 @@ TEST(ParsedDistinctTest, ConvertToAggregationNoQuery) {
std::vector<BSONObj> expectedPipeline{
BSON("$unwind" << BSON("path"
<< "$x"
- << "preserveNullAndEmptyArrays"
- << true)),
- BSON("$group" << BSON("_id" << BSONNULL << "distinct" << BSON("$addToSet"
- << "$x")))};
+ << "preserveNullAndEmptyArrays" << true)),
+ BSON("$group" << BSON("_id" << BSONNULL << "distinct"
+ << BSON("$addToSet"
+ << "$x")))};
ASSERT(std::equal(expectedPipeline.begin(),
expectedPipeline.end(),
ar.getValue().getPipeline().begin(),
@@ -113,23 +113,21 @@ TEST(ParsedDistinctTest, ConvertToAggregationDottedPathNoQuery) {
std::vector<BSONObj> expectedPipeline{
BSON("$unwind" << BSON("path"
<< "$x"
- << "preserveNullAndEmptyArrays"
- << true)),
+ << "preserveNullAndEmptyArrays" << true)),
BSON("$unwind" << BSON("path"
<< "$x.y"
- << "preserveNullAndEmptyArrays"
- << true)),
+ << "preserveNullAndEmptyArrays" << true)),
BSON("$unwind" << BSON("path"
<< "$x.y.z"
- << "preserveNullAndEmptyArrays"
- << true)),
+ << "preserveNullAndEmptyArrays" << true)),
BSON("$match" << BSON("x" << BSON("$_internalSchemaType"
<< "object")
<< "x.y"
<< BSON("$_internalSchemaType"
<< "object"))),
- BSON("$group" << BSON("_id" << BSONNULL << "distinct" << BSON("$addToSet"
- << "$x.y.z")))};
+ BSON("$group" << BSON("_id" << BSONNULL << "distinct"
+ << BSON("$addToSet"
+ << "$x.y.z")))};
ASSERT(std::equal(expectedPipeline.begin(),
expectedPipeline.end(),
ar.getValue().getPipeline().begin(),
@@ -159,9 +157,7 @@ TEST(ParsedDistinctTest, ConvertToAggregationWithAllOptions) {
<< "secondary")
<< "comment"
<< "aComment"
- << "maxTimeMS"
- << 100
- << "$db"
+ << "maxTimeMS" << 100 << "$db"
<< "testdb"),
ExtensionsCallbackNoop(),
!isExplain);
@@ -190,10 +186,10 @@ TEST(ParsedDistinctTest, ConvertToAggregationWithAllOptions) {
std::vector<BSONObj> expectedPipeline{
BSON("$unwind" << BSON("path"
<< "$x"
- << "preserveNullAndEmptyArrays"
- << true)),
- BSON("$group" << BSON("_id" << BSONNULL << "distinct" << BSON("$addToSet"
- << "$x")))};
+ << "preserveNullAndEmptyArrays" << true)),
+ BSON("$group" << BSON("_id" << BSONNULL << "distinct"
+ << BSON("$addToSet"
+ << "$x")))};
ASSERT(std::equal(expectedPipeline.begin(),
expectedPipeline.end(),
ar.getValue().getPipeline().begin(),
@@ -232,10 +228,10 @@ TEST(ParsedDistinctTest, ConvertToAggregationWithQuery) {
BSON("$match" << BSON("z" << 7)),
BSON("$unwind" << BSON("path"
<< "$y"
- << "preserveNullAndEmptyArrays"
- << true)),
- BSON("$group" << BSON("_id" << BSONNULL << "distinct" << BSON("$addToSet"
- << "$y")))};
+ << "preserveNullAndEmptyArrays" << true)),
+ BSON("$group" << BSON("_id" << BSONNULL << "distinct"
+ << BSON("$addToSet"
+ << "$y")))};
ASSERT(std::equal(expectedPipeline.begin(),
expectedPipeline.end(),
ar.getValue().getPipeline().begin(),
@@ -269,10 +265,10 @@ TEST(ParsedDistinctTest, ExplainNotIncludedWhenConvertingToAggregationCommand) {
std::vector<BSONObj> expectedPipeline{
BSON("$unwind" << BSON("path"
<< "$x"
- << "preserveNullAndEmptyArrays"
- << true)),
- BSON("$group" << BSON("_id" << BSONNULL << "distinct" << BSON("$addToSet"
- << "$x")))};
+ << "preserveNullAndEmptyArrays" << true)),
+ BSON("$group" << BSON("_id" << BSONNULL << "distinct"
+ << BSON("$addToSet"
+ << "$x")))};
ASSERT(std::equal(expectedPipeline.begin(),
expectedPipeline.end(),
ar.getValue().getPipeline().begin(),
diff --git a/src/mongo/db/query/parsed_projection.cpp b/src/mongo/db/query/parsed_projection.cpp
index aaa3bd36f3d..359ad5c23d8 100644
--- a/src/mongo/db/query/parsed_projection.cpp
+++ b/src/mongo/db/query/parsed_projection.cpp
@@ -34,8 +34,8 @@
namespace mongo {
-using std::unique_ptr;
using std::string;
+using std::unique_ptr;
/**
* Parses the projection 'spec' and checks its validity with respect to the query 'query'.
@@ -297,9 +297,9 @@ Status ParsedProjection::make(OperationContext* opCtx,
// $meta sortKey should not be checked as a part of _requiredFields, since it can
// potentially produce a covered projection as long as the sort key is covered.
if (BSONType::Object == elt.type()) {
- dassert(
- SimpleBSONObjComparator::kInstance.evaluate(elt.Obj() == BSON("$meta"
- << "sortKey")));
+ dassert(SimpleBSONObjComparator::kInstance.evaluate(elt.Obj() ==
+ BSON("$meta"
+ << "sortKey")));
continue;
}
if (elt.trueValue()) {
diff --git a/src/mongo/db/query/parsed_projection_test.cpp b/src/mongo/db/query/parsed_projection_test.cpp
index 075858687fd..990b665d6ed 100644
--- a/src/mongo/db/query/parsed_projection_test.cpp
+++ b/src/mongo/db/query/parsed_projection_test.cpp
@@ -38,8 +38,8 @@
namespace {
-using std::unique_ptr;
using std::string;
+using std::unique_ptr;
using std::vector;
using namespace mongo;
@@ -62,8 +62,7 @@ unique_ptr<ParsedProjection> createParsedProjection(const BSONObj& query, const
Status status = ParsedProjection::make(opCtx.get(), projObj, queryMatchExpr.get(), &out);
if (!status.isOK()) {
FAIL(str::stream() << "failed to parse projection " << projObj << " (query: " << query
- << "): "
- << status.toString());
+ << "): " << status.toString());
}
ASSERT(out);
return unique_ptr<ParsedProjection>(out);
diff --git a/src/mongo/db/query/plan_cache_indexability.cpp b/src/mongo/db/query/plan_cache_indexability.cpp
index 9e0d9f717c6..71d1fa456ce 100644
--- a/src/mongo/db/query/plan_cache_indexability.cpp
+++ b/src/mongo/db/query/plan_cache_indexability.cpp
@@ -92,7 +92,7 @@ bool nodeIsConservativelySupportedBySparseIndex(const MatchExpression* me) {
const bool inElemMatch = false;
return QueryPlannerIXSelect::nodeIsSupportedBySparseIndex(me, inElemMatch);
}
-}
+} // namespace
void PlanCacheIndexabilityState::processSparseIndex(const std::string& indexName,
const BSONObj& keyPattern) {
diff --git a/src/mongo/db/query/plan_cache_indexability_test.cpp b/src/mongo/db/query/plan_cache_indexability_test.cpp
index d4d91dfe7f9..48116f58416 100644
--- a/src/mongo/db/query/plan_cache_indexability_test.cpp
+++ b/src/mongo/db/query/plan_cache_indexability_test.cpp
@@ -47,8 +47,8 @@ std::unique_ptr<MatchExpression> parseMatchExpression(const BSONObj& obj,
expCtx->setCollator(collator);
StatusWithMatchExpression status = MatchExpressionParser::parse(obj, std::move(expCtx));
if (!status.isOK()) {
- FAIL(str::stream() << "failed to parse query: " << obj.toString() << ". Reason: "
- << status.getStatus().toString());
+ FAIL(str::stream() << "failed to parse query: " << obj.toString()
+ << ". Reason: " << status.getStatus().toString());
}
return std::move(status.getValue());
}
diff --git a/src/mongo/db/query/plan_cache_test.cpp b/src/mongo/db/query/plan_cache_test.cpp
index a2ab4e1f475..d5f63f37f24 100644
--- a/src/mongo/db/query/plan_cache_test.cpp
+++ b/src/mongo/db/query/plan_cache_test.cpp
@@ -1337,8 +1337,7 @@ TEST_F(CachePlanSelectionTest, Or2DSphereNonNear) {
TEST_F(CachePlanSelectionTest, AndWithinPolygonWithinCenterSphere) {
addIndex(BSON("a"
<< "2dsphere"
- << "b"
- << 1),
+ << "b" << 1),
"a_2dsphere_b_2dsphere");
BSONObj query = fromjson(
diff --git a/src/mongo/db/query/plan_enumerator.cpp b/src/mongo/db/query/plan_enumerator.cpp
index 7163d69e474..f213e98c6c9 100644
--- a/src/mongo/db/query/plan_enumerator.cpp
+++ b/src/mongo/db/query/plan_enumerator.cpp
@@ -41,10 +41,10 @@
namespace {
using namespace mongo;
-using std::unique_ptr;
using std::endl;
using std::set;
using std::string;
+using std::unique_ptr;
using std::vector;
std::string getPathPrefix(std::string path) {
@@ -668,9 +668,9 @@ bool PlanEnumerator::enumerateMandatoryIndex(const IndexToPredMap& idxToFirst,
// multikey information.
invariant(INDEX_2DSPHERE == thisIndex.type);
- if (predsOverLeadingField.end() != std::find(predsOverLeadingField.begin(),
- predsOverLeadingField.end(),
- mandatoryPred)) {
+ if (predsOverLeadingField.end() !=
+ std::find(
+ predsOverLeadingField.begin(), predsOverLeadingField.end(), mandatoryPred)) {
// The mandatory predicate is on the leading field of 'thisIndex'. We assign it to
// 'thisIndex' and skip assigning any other predicates on the leading field to
// 'thisIndex' because no additional predicate on the leading field will generate a
@@ -722,9 +722,9 @@ bool PlanEnumerator::enumerateMandatoryIndex(const IndexToPredMap& idxToFirst,
}
} else if (thisIndex.multikey) {
// Special handling for multikey mandatory indices.
- if (predsOverLeadingField.end() != std::find(predsOverLeadingField.begin(),
- predsOverLeadingField.end(),
- mandatoryPred)) {
+ if (predsOverLeadingField.end() !=
+ std::find(
+ predsOverLeadingField.begin(), predsOverLeadingField.end(), mandatoryPred)) {
// The mandatory predicate is over the first field of the index. Assign
// it now.
indexAssign.preds.push_back(mandatoryPred);
diff --git a/src/mongo/db/query/planner_analysis.cpp b/src/mongo/db/query/planner_analysis.cpp
index 7720824f7f7..3487e955675 100644
--- a/src/mongo/db/query/planner_analysis.cpp
+++ b/src/mongo/db/query/planner_analysis.cpp
@@ -46,9 +46,9 @@
namespace mongo {
-using std::unique_ptr;
using std::endl;
using std::string;
+using std::unique_ptr;
using std::vector;
namespace dps = ::mongo::dotted_path_support;
diff --git a/src/mongo/db/query/planner_ixselect.cpp b/src/mongo/db/query/planner_ixselect.cpp
index 84b7616a24f..1e2adddbd3f 100644
--- a/src/mongo/db/query/planner_ixselect.cpp
+++ b/src/mongo/db/query/planner_ixselect.cpp
@@ -682,13 +682,14 @@ void QueryPlannerIXSelect::_rateIndices(MatchExpression* node,
const IndexEntry& index = indices[i];
std::size_t keyPatternIndex = 0;
for (auto&& keyPatternElt : index.keyPattern) {
- if (keyPatternElt.fieldNameStringData() == fullPath && _compatible(keyPatternElt,
- index,
- keyPatternIndex,
- node,
- fullPath,
- collator,
- elemMatchCtx)) {
+ if (keyPatternElt.fieldNameStringData() == fullPath &&
+ _compatible(keyPatternElt,
+ index,
+ keyPatternIndex,
+ node,
+ fullPath,
+ collator,
+ elemMatchCtx)) {
if (keyPatternIndex == 0) {
rt->first.push_back(i);
} else {
diff --git a/src/mongo/db/query/planner_ixselect_test.cpp b/src/mongo/db/query/planner_ixselect_test.cpp
index e80eddd187b..e1018a87944 100644
--- a/src/mongo/db/query/planner_ixselect_test.cpp
+++ b/src/mongo/db/query/planner_ixselect_test.cpp
@@ -51,8 +51,8 @@ namespace {
constexpr CollatorInterface* kSimpleCollator = nullptr;
-using std::unique_ptr;
using std::string;
+using std::unique_ptr;
using std::vector;
/**
@@ -1131,8 +1131,7 @@ TEST(QueryPlannerIXSelectTest, InternalExprEqCanUseHashedIndex) {
TEST(QueryPlannerIXSelectTest, InternalExprEqCannotUseTextIndexPrefix) {
auto entry = buildSimpleIndexEntry(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
std::vector<IndexEntry> indices;
indices.push_back(entry);
std::set<size_t> expectedIndices;
@@ -1143,10 +1142,7 @@ TEST(QueryPlannerIXSelectTest, InternalExprEqCannotUseTextIndexPrefix) {
TEST(QueryPlannerIXSelectTest, InternalExprEqCanUseTextIndexSuffix) {
auto entry = buildSimpleIndexEntry(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1
- << "a"
- << 1));
+ << "_ftsx" << 1 << "a" << 1));
std::vector<IndexEntry> indices;
indices.push_back(entry);
std::set<size_t> expectedIndices = {0};
diff --git a/src/mongo/db/query/query_planner.cpp b/src/mongo/db/query/query_planner.cpp
index 9735dbade0e..e02cae41ec2 100644
--- a/src/mongo/db/query/query_planner.cpp
+++ b/src/mongo/db/query/query_planner.cpp
@@ -58,8 +58,8 @@
namespace mongo {
-using std::unique_ptr;
using std::numeric_limits;
+using std::unique_ptr;
namespace dps = ::mongo::dotted_path_support;
@@ -520,8 +520,8 @@ StatusWith<std::unique_ptr<QuerySolution>> QueryPlanner::planFromCache(
auto soln = QueryPlannerAnalysis::analyzeDataAccess(query, params, std::move(solnRoot));
if (!soln) {
return Status(ErrorCodes::BadValue,
- str::stream() << "Failed to analyze plan from cache. Query: "
- << query.toStringShort());
+ str::stream()
+ << "Failed to analyze plan from cache. Query: " << query.toStringShort());
}
LOG(5) << "Planner: solution constructed from the cache:\n" << redact(soln->toString());
@@ -610,11 +610,10 @@ StatusWith<std::vector<std::unique_ptr<QuerySolution>>> QueryPlanner::plan(
}
if (fullIndexList.size() > 1) {
return Status(ErrorCodes::IndexNotFound,
- str::stream() << "Hint matched multiple indexes, "
- << "must hint by index name. Matched: "
- << fullIndexList[0].toString()
- << " and "
- << fullIndexList[1].toString());
+ str::stream()
+ << "Hint matched multiple indexes, "
+ << "must hint by index name. Matched: " << fullIndexList[0].toString()
+ << " and " << fullIndexList[1].toString());
}
hintedIndexEntry.emplace(fullIndexList.front());
diff --git a/src/mongo/db/query/query_planner_geo_test.cpp b/src/mongo/db/query/query_planner_geo_test.cpp
index c70ec258481..b23c40a64fe 100644
--- a/src/mongo/db/query/query_planner_geo_test.cpp
+++ b/src/mongo/db/query/query_planner_geo_test.cpp
@@ -89,8 +89,7 @@ TEST_F(QueryPlannerTest, Basic2DSphereCompound) {
TEST_F(QueryPlannerTest, Basic2DCompound) {
addIndex(BSON("loc"
<< "2d"
- << "a"
- << 1));
+ << "a" << 1));
runQuery(
fromjson("{ loc: { $geoWithin: { $box : [[0, 0],[10, 10]] } },"
@@ -247,8 +246,7 @@ TEST_F(QueryPlannerTest, Multikey2DSphereGeoNearReverseCompound) {
TEST_F(QueryPlannerTest, 2DNonNearContainedOr) {
addIndex(BSON("a"
<< "2d"
- << "x"
- << 1));
+ << "x" << 1));
addIndex(BSON("y" << 1));
runQuery(
fromjson("{$and: [{x: 1}, {$or: [{a: {$within: {$polygon: [[0, 0], [0, 1], [1, 0], [0, "
@@ -649,10 +647,7 @@ TEST_F(QueryPlannerTest, CompoundMultikey2DSphereNearCompoundTest) {
// true means multikey
addIndex(BSON("a" << 1 << "b"
<< "2dsphere"
- << "c"
- << 1
- << "d"
- << 1),
+ << "c" << 1 << "d" << 1),
true);
runQuery(
fromjson("{a: {$gte: 0}, c: {$gte: 0, $lt: 4}, d: {$gt: 1, $lt: 5},"
@@ -671,8 +666,7 @@ TEST_F(QueryPlannerTest, CompoundMultikey2DNear) {
// true means multikey
addIndex(BSON("a"
<< "2d"
- << "b"
- << 1),
+ << "b" << 1),
true);
runQuery(fromjson("{a: {$near: [0, 0]}, b: {$gte: 0}}"));
@@ -1163,10 +1157,7 @@ TEST_F(QueryPlannerGeo2dsphereTest,
MultikeyPaths multikeyPaths{{1U}, {1U}, {1U}};
addIndex(BSON("a.geo"
<< "2dsphere"
- << "a.b"
- << 1
- << "a.c"
- << 1),
+ << "a.b" << 1 << "a.c" << 1),
multikeyPaths);
runQuery(fromjson("{'a.geo': {$nearSphere: [0, 0]}, 'a.b': 2, 'a.c': 3}"));
@@ -1196,10 +1187,7 @@ TEST_F(QueryPlannerGeo2dsphereTest,
MultikeyPaths multikeyPaths{{0U}, {0U}, {0U}};
addIndex(BSON("a.geo"
<< "2dsphere"
- << "a.b"
- << 1
- << "a.c"
- << 1),
+ << "a.b" << 1 << "a.c" << 1),
multikeyPaths);
runQuery(fromjson("{'a.geo': {$nearSphere: [0, 0]}, 'a.b': 2, 'a.c': 3}"));
@@ -1230,10 +1218,7 @@ TEST_F(QueryPlannerGeo2dsphereTest,
MultikeyPaths multikeyPaths{{0U}, {0U}, {0U}};
addIndex(BSON("a.geo"
<< "2dsphere"
- << "a.b"
- << 1
- << "a.c"
- << 1),
+ << "a.b" << 1 << "a.c" << 1),
multikeyPaths);
runQuery(fromjson("{'a.geo': {$nearSphere: [0, 0]}, a: {$elemMatch: {b: 2, c: 3}}}"));
@@ -1265,10 +1250,7 @@ TEST_F(QueryPlannerGeo2dsphereTest,
MultikeyPaths multikeyPaths{{0U, 1U}, {0U, 1U}, {0U, 1U}};
addIndex(BSON("a.b.geo"
<< "2dsphere"
- << "a.b.c"
- << 1
- << "a.b.d"
- << 1),
+ << "a.b.c" << 1 << "a.b.d" << 1),
multikeyPaths);
runQuery(fromjson("{'a.b.geo': {$nearSphere: [0, 0]}, a: {$elemMatch: {'b.c': 2, 'b.d': 3}}}"));
@@ -1432,8 +1414,7 @@ TEST_F(QueryPlanner2dsphereVersionTest, TwoDNearCompound) {
std::vector<int> versions{2, 3};
std::vector<BSONObj> keyPatterns = {BSON("geo"
<< "2dsphere"
- << "nongeo"
- << 1)};
+ << "nongeo" << 1)};
BSONObj predicate = fromjson("{geo: {$nearSphere: [-71.34895, 42.46037]}}");
testMultiple2dsphereIndexVersions(versions, keyPatterns, predicate, 1U);
}
@@ -1444,16 +1425,10 @@ TEST_F(QueryPlanner2dsphereVersionTest, TwoDSphereSparseBelowOr) {
std::vector<int> versions{2, 3};
std::vector<BSONObj> keyPatterns = {BSON("geo1"
<< "2dsphere"
- << "a"
- << 1
- << "b"
- << 1),
+ << "a" << 1 << "b" << 1),
BSON("geo2"
<< "2dsphere"
- << "a"
- << 1
- << "b"
- << 1)};
+ << "a" << 1 << "b" << 1)};
BSONObj predicate = fromjson(
"{a: 4, b: 5, $or: ["
@@ -1475,8 +1450,7 @@ TEST_F(QueryPlanner2dsphereVersionTest, TwoDSphereSparseBelowElemMatch) {
std::vector<int> versions{2, 3};
std::vector<BSONObj> keyPatterns = {BSON("a.b"
<< "2dsphere"
- << "a.c"
- << 1)};
+ << "a.c" << 1)};
BSONObj predicate = fromjson(
"{a: {$elemMatch: {b: {$geoWithin: {$centerSphere: [[10,20], 0.01]}},"
@@ -1600,8 +1574,7 @@ TEST_F(QueryPlannerTest, 2dInexactFetchPredicateOverTrailingFieldHandledCorrectl
addIndex(BSON("a"
<< "2d"
- << "b"
- << 1));
+ << "b" << 1));
runQuery(fromjson("{a: {$geoWithin: {$center: [[0, 0], 1]}}, b: {$exists: true}}"));
assertNumSolutions(1U);
@@ -1616,8 +1589,7 @@ TEST_F(QueryPlannerTest, 2dInexactFetchPredicateOverTrailingFieldHandledCorrectl
const bool multikey = true;
addIndex(BSON("a"
<< "2d"
- << "b"
- << 1),
+ << "b" << 1),
multikey);
runQuery(fromjson("{a: {$geoWithin: {$center: [[0, 0], 1]}}, b: {$exists: true}}"));
@@ -1632,8 +1604,7 @@ TEST_F(QueryPlannerTest, 2dNearInexactFetchPredicateOverTrailingFieldHandledCorr
addIndex(BSON("a"
<< "2d"
- << "b"
- << 1));
+ << "b" << 1));
runQuery(fromjson("{a: {$near: [0, 0]}, b: {$exists: true}}"));
assertNumSolutions(1U);
@@ -1647,8 +1618,7 @@ TEST_F(QueryPlannerTest, 2dNearInexactFetchPredicateOverTrailingFieldMultikey) {
const bool multikey = true;
addIndex(BSON("a"
<< "2d"
- << "b"
- << 1),
+ << "b" << 1),
multikey);
runQuery(fromjson("{a: {$near: [0, 0]}, b: {$exists: true}}"));
@@ -1661,8 +1631,7 @@ TEST_F(QueryPlannerTest, 2dNearWithInternalExprEqOverTrailingField) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a"
<< "2d"
- << "b"
- << 1));
+ << "b" << 1));
runQuery(fromjson("{a: {$near: [0, 0]}, b: {$_internalExprEq: 1}}"));
assertNumSolutions(1U);
@@ -1673,8 +1642,7 @@ TEST_F(QueryPlannerTest, 2dNearWithInternalExprEqOverTrailingFieldMultikey) {
const bool multikey = true;
addIndex(BSON("a"
<< "2d"
- << "b"
- << 1),
+ << "b" << 1),
multikey);
runQuery(fromjson("{a: {$near: [0, 0]}, b: {$_internalExprEq: 1}}"));
@@ -1687,8 +1655,7 @@ TEST_F(QueryPlannerTest, 2dGeoWithinWithInternalExprEqOverTrailingField) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a"
<< "2d"
- << "b"
- << 1));
+ << "b" << 1));
runQuery(
fromjson("{a: {$within: {$polygon: [[0,0], [2,0], [4,0]]}}, b: {$_internalExprEq: 2}}"));
@@ -1745,8 +1712,7 @@ TEST_F(QueryPlannerTest, 2dsphereNonNearWithInternalExprEqOverTrailingField) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a"
<< "2dsphere"
- << "b"
- << 1));
+ << "b" << 1));
runQuery(
fromjson("{b: {$_internalExprEq: 0}, a: {$geoWithin: {$centerSphere: [[0, 0], 10]}}}"));
@@ -1767,8 +1733,7 @@ TEST_F(QueryPlannerTest, 2dsphereNonNearWithInternalExprEqOverTrailingFieldMulti
const bool multikey = true;
addIndex(BSON("a"
<< "2dsphere"
- << "b"
- << 1),
+ << "b" << 1),
multikey);
runQuery(
@@ -1791,8 +1756,7 @@ TEST_F(QueryPlannerTest, 2dWithinPredicateOverTrailingFieldElemMatchMultikey) {
const bool multikey = true;
addIndex(BSON("a"
<< "2d"
- << "b"
- << 1),
+ << "b" << 1),
multikey);
runQuery(fromjson("{a: {$geoWithin: {$center: [[0, 0], 1]}}, b: {$elemMatch: {c: 1}}}"));
diff --git a/src/mongo/db/query/query_planner_test.cpp b/src/mongo/db/query/query_planner_test.cpp
index 552241ae9ea..3e0cf497f86 100644
--- a/src/mongo/db/query/query_planner_test.cpp
+++ b/src/mongo/db/query/query_planner_test.cpp
@@ -434,7 +434,7 @@ TEST_F(QueryPlannerTest, NotEqualsNullSparseIndex) {
addIndex(BSON("x" << 1),
false, // multikey
true // sparse
- );
+ );
runQuery(fromjson("{x: {$ne: null}}"));
@@ -449,7 +449,7 @@ TEST_F(QueryPlannerTest, NotEqualsNullSparseMultiKeyIndex) {
addIndex(BSON("x" << 1),
true, // multikey
true // sparse
- );
+ );
runQuery(fromjson("{x: {$ne: null}}"));
@@ -462,7 +462,7 @@ TEST_F(QueryPlannerTest, NotEqualsNullInElemMatchValueSparseMultiKeyIndex) {
addIndex(BSON("x" << 1),
true, // multikey
true // sparse
- );
+ );
runQuery(fromjson("{'x': {$elemMatch: {$ne: null}}}"));
@@ -1674,8 +1674,7 @@ TEST_F(QueryPlannerTest, CantUseHashedIndexToProvideSortWithIndexablePred) {
TEST_F(QueryPlannerTest, CantUseTextIndexToProvideSort) {
addIndex(BSON("x" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuerySortProj(BSONObj(), BSON("x" << 1), BSONObj());
ASSERT_EQUALS(getNumSolutions(), 1U);
@@ -2744,7 +2743,7 @@ TEST_F(QueryPlannerTest, NegationCannotUseSparseIndex) {
addIndex(fromjson("{a: 1}"),
false, // multikey
true // sparse
- );
+ );
runQuery(fromjson("{a: {$ne: 5}}"));
assertHasOnlyCollscan();
@@ -2758,7 +2757,7 @@ TEST_F(QueryPlannerTest, NegationInElemMatchDoesNotUseSparseIndex) {
addIndex(fromjson("{a: 1}"),
true, // multikey
true // sparse
- );
+ );
runQuery(fromjson("{a: {$elemMatch: {$ne: 5}}}"));
assertHasOnlyCollscan();
@@ -2770,7 +2769,7 @@ TEST_F(QueryPlannerTest, SparseIndexCannotSupportEqualsNull) {
addIndex(BSON("i" << 1),
false, // multikey
true // sparse
- );
+ );
runQuery(fromjson("{i: {$eq: null}}"));
assertHasOnlyCollscan();
@@ -2784,7 +2783,7 @@ TEST_F(QueryPlannerTest, SparseIndexCanSupportGTEOrLTENull) {
addIndex(BSON("i" << 1),
false, // multikey
true // sparse
- );
+ );
runQuery(fromjson("{i: {$gte: null}}"));
assertNumSolutions(1U);
diff --git a/src/mongo/db/query/query_planner_test_fixture.cpp b/src/mongo/db/query/query_planner_test_fixture.cpp
index d96e3e822f0..ff4aef1309e 100644
--- a/src/mongo/db/query/query_planner_test_fixture.cpp
+++ b/src/mongo/db/query/query_planner_test_fixture.cpp
@@ -548,8 +548,8 @@ std::unique_ptr<MatchExpression> QueryPlannerTest::parseMatchExpression(
expCtx->setCollator(collator);
StatusWithMatchExpression status = MatchExpressionParser::parse(obj, std::move(expCtx));
if (!status.isOK()) {
- FAIL(str::stream() << "failed to parse query: " << obj.toString() << ". Reason: "
- << status.getStatus().toString());
+ FAIL(str::stream() << "failed to parse query: " << obj.toString()
+ << ". Reason: " << status.getStatus().toString());
}
return std::move(status.getValue());
}
diff --git a/src/mongo/db/query/query_planner_text_test.cpp b/src/mongo/db/query/query_planner_text_test.cpp
index d0b148349ca..ed4b1e45247 100644
--- a/src/mongo/db/query/query_planner_text_test.cpp
+++ b/src/mongo/db/query/query_planner_text_test.cpp
@@ -52,8 +52,7 @@ using namespace mongo;
TEST_F(QueryPlannerTest, SimpleText) {
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(fromjson("{$text: {$search: 'blah'}}"));
assertNumSolutions(1);
@@ -65,8 +64,7 @@ TEST_F(QueryPlannerTest, CantUseTextUnlessHaveTextPred) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(fromjson("{a:1}"));
// No table scans allowed so there is no solution.
@@ -79,8 +77,7 @@ TEST_F(QueryPlannerTest, HaveOKPrefixOnTextIndex) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(fromjson("{a:1, $text:{$search: 'blah'}}"));
assertNumSolutions(1);
@@ -99,8 +96,7 @@ TEST_F(QueryPlannerTest, HaveBadPrefixOnTextIndex) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runInvalidQuery(fromjson("{a:{$gt: 1}, $text:{$search: 'blah'}}"));
runInvalidQuery(fromjson("{$text: {$search: 'blah'}}"));
@@ -113,8 +109,7 @@ TEST_F(QueryPlannerTest, PrefixOnTextIndexIsOutsidePred) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
addIndex(BSON("b" << 1));
runInvalidQuery(fromjson("{$and: [{a: 5}, {$or: [{$text: {$search: 'blah'}}, {b: 6}]}]}"));
}
@@ -124,8 +119,7 @@ TEST_F(QueryPlannerTest, ManyPrefixTextIndex) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "b" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
// Both points.
runQuery(fromjson("{a:1, b:1, $text:{$search: 'blah'}}"));
@@ -150,10 +144,7 @@ TEST_F(QueryPlannerTest, SuffixOptional) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1
- << "b"
- << 1));
+ << "_ftsx" << 1 << "b" << 1));
runQuery(fromjson("{a:1, $text:{$search: 'blah'}}"));
assertNumSolutions(1);
@@ -168,10 +159,7 @@ TEST_F(QueryPlannerTest, RemoveFromSubtree) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1
- << "b"
- << 1));
+ << "_ftsx" << 1 << "b" << 1));
runQuery(fromjson("{a:1, $or: [{a:1}, {b:7}], $text:{$search: 'blah'}}"));
assertNumSolutions(1);
@@ -187,8 +175,7 @@ TEST_F(QueryPlannerTest, CompoundPrefixEvenIfMultikey) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "b" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1),
+ << "_ftsx" << 1),
true);
// Both points.
@@ -201,10 +188,7 @@ TEST_F(QueryPlannerTest, IndexOnOwnFieldButNotLeafPrefix) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1
- << "b"
- << 1));
+ << "_ftsx" << 1 << "b" << 1));
// 'a' is not an EQ so it doesn't compound w/the text pred. We also shouldn't use the text
// index to satisfy it w/o the text query.
@@ -215,10 +199,7 @@ TEST_F(QueryPlannerTest, IndexOnOwnFieldButNotLeafSuffixNoPrefix) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1
- << "b"
- << 1));
+ << "_ftsx" << 1 << "b" << 1));
runQuery(fromjson("{b:{$elemMatch:{$gt: 0, $lt: 2}}, $text:{$search: 'blah'}}"));
assertNumSolutions(1);
@@ -228,8 +209,7 @@ TEST_F(QueryPlannerTest, TextInsideAndWithCompoundIndex) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(fromjson("{$and: [{a: 3}, {$text: {$search: 'foo'}}], a: 3}"));
assertNumSolutions(1U);
@@ -242,8 +222,7 @@ TEST_F(QueryPlannerTest, TextInsideAndWithCompoundIndexAndMultiplePredsOnIndexPr
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(fromjson("{$and: [{a: 1}, {a: 2}, {$text: {$search: 'foo'}}]}"));
assertNumSolutions(1U);
@@ -257,8 +236,7 @@ TEST_F(QueryPlannerTest, TextInsideOrBasic) {
addIndex(BSON("a" << 1));
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(fromjson("{a: 0, $or: [{_id: 1}, {$text: {$search: 'foo'}}]}"));
assertNumSolutions(1U);
@@ -274,8 +252,7 @@ TEST_F(QueryPlannerTest, TextInsideOrWithAnotherOr) {
addIndex(BSON("a" << 1));
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(
fromjson("{$and: [{$or: [{a: 3}, {a: 4}]}, "
"{$or: [{$text: {$search: 'foo'}}, {a: 5}]}]}"));
@@ -294,8 +271,7 @@ TEST_F(QueryPlannerTest, TextInsideOrOfAnd) {
addIndex(BSON("a" << 1));
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(
fromjson("{$or: [{a: {$gt: 1, $gt: 2}}, "
"{a: {$gt: 3}, $text: {$search: 'foo'}}]}"));
@@ -316,8 +292,7 @@ TEST_F(QueryPlannerTest, TextInsideAndOrAnd) {
addIndex(BSON("b" << 1));
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(
fromjson("{a: 1, $or: [{a:2}, {b:2}, "
"{a: 1, $text: {$search: 'foo'}}]}"));
@@ -336,8 +311,7 @@ TEST_F(QueryPlannerTest, TextInsideAndOrAndOr) {
addIndex(BSON("a" << 1));
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(
fromjson("{$or: [{a: {$gt: 1, $gt: 2}}, "
"{a: {$gt: 3}, $or: [{$text: {$search: 'foo'}}, "
@@ -360,8 +334,7 @@ TEST_F(QueryPlannerTest, TextInsideOrOneBranchNotIndexed) {
addIndex(BSON("a" << 1));
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(fromjson("{a: 1, $or: [{b: 2}, {$text: {$search: 'foo'}}]}"));
assertNumSolutions(0);
@@ -374,8 +347,7 @@ TEST_F(QueryPlannerTest, TextInsideOrWithAnotherUnindexableOr) {
addIndex(BSON("a" << 1));
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(
fromjson("{$and: [{$or: [{a: 1}, {b: 1}]}, "
"{$or: [{a: 2}, {$text: {$search: 'foo'}}]}]}"));
@@ -390,8 +362,7 @@ TEST_F(QueryPlannerTest, TextInsideOrWithAnotherUnindexableOr) {
TEST_F(QueryPlannerTest, AndTextWithGeoNonNear) {
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(
fromjson("{$text: {$search: 'foo'}, a: {$geoIntersects: {$geometry: "
"{type: 'Point', coordinates: [3.0, 1.0]}}}}"));
@@ -405,8 +376,7 @@ TEST_F(QueryPlannerTest, AndTextWithGeoNonNear) {
TEST_F(QueryPlannerTest, OrTextExact) {
addIndex(BSON("pre" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
addIndex(BSON("other" << 1));
runQuery(fromjson("{$or: [{$text: {$search: 'dave'}, pre: 3}, {other: 2}]}"));
@@ -421,8 +391,7 @@ TEST_F(QueryPlannerTest, OrTextExact) {
TEST_F(QueryPlannerTest, OrTextInexactCovered) {
addIndex(BSON("pre" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
addIndex(BSON("other" << 1));
runQuery(fromjson("{$or: [{$text: {$search: 'dave'}, pre: 3}, {other: /bar/}]}"));
@@ -437,8 +406,7 @@ TEST_F(QueryPlannerTest, OrTextInexactCovered) {
TEST_F(QueryPlannerTest, TextCaseSensitive) {
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(fromjson("{$text: {$search: 'blah', $caseSensitive: true}}"));
assertNumSolutions(1);
@@ -448,8 +416,7 @@ TEST_F(QueryPlannerTest, TextCaseSensitive) {
TEST_F(QueryPlannerTest, TextDiacriticSensitive) {
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(fromjson("{$text: {$search: 'blah', $diacriticSensitive: true}}"));
assertNumSolutions(1);
@@ -459,8 +426,7 @@ TEST_F(QueryPlannerTest, TextDiacriticSensitive) {
TEST_F(QueryPlannerTest, SortKeyMetaProjectionWithTextScoreMetaSort) {
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuerySortProj(fromjson("{$text: {$search: 'foo'}}"),
fromjson("{a: {$meta: 'textScore'}}"),
@@ -477,8 +443,7 @@ TEST_F(QueryPlannerTest, PredicatesOverLeadingFieldsWithSharedPathPrefixHandledC
const bool multikey = true;
addIndex(BSON("a.x" << 1 << "a.y" << 1 << "b.x" << 1 << "b.y" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1),
+ << "_ftsx" << 1),
multikey);
runQuery(fromjson("{'a.x': 1, 'a.y': 2, 'b.x': 3, 'b.y': 4, $text: {$search: 'foo'}}"));
@@ -491,8 +456,7 @@ TEST_F(QueryPlannerTest, PredicatesOverLeadingFieldsWithSharedPathPrefixHandledC
TEST_F(QueryPlannerTest, EqualityToArrayOverLeadingFieldHandledCorrectly) {
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runQuery(fromjson("{a: [1, 2, 3], $text: {$search: 'foo'}}"));
@@ -504,8 +468,7 @@ TEST_F(QueryPlannerTest, EqualityToArrayOverLeadingFieldHandledCorrectlyWithMult
const bool multikey = true;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1),
+ << "_ftsx" << 1),
multikey);
runQuery(fromjson("{a: [1, 2, 3], $text: {$search: 'foo'}}"));
@@ -517,10 +480,7 @@ TEST_F(QueryPlannerTest, EqualityToArrayOverLeadingFieldHandledCorrectlyWithMult
TEST_F(QueryPlannerTest, InexactFetchPredicateOverTrailingFieldHandledCorrectly) {
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1
- << "b"
- << 1));
+ << "_ftsx" << 1 << "b" << 1));
runQuery(fromjson("{a: 3, $text: {$search: 'foo'}, b: {$exists: true}}"));
@@ -533,10 +493,7 @@ TEST_F(QueryPlannerTest, InexactFetchPredicateOverTrailingFieldHandledCorrectlyM
const bool multikey = true;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1
- << "b"
- << 1),
+ << "_ftsx" << 1 << "b" << 1),
multikey);
runQuery(fromjson("{a: 3, $text: {$search: 'foo'}, b: {$exists: true}}"));
@@ -550,8 +507,7 @@ TEST_F(QueryPlannerTest, ExprEqCannotUsePrefixOfTextIndex) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
runInvalidQuery(fromjson("{a: {$_internalExprEq: 3}, $text: {$search: 'blah'}}"));
}
@@ -560,10 +516,7 @@ TEST_F(QueryPlannerTest, ExprEqCanUseSuffixOfTextIndex) {
params.options = QueryPlannerParams::NO_TABLE_SCAN;
addIndex(BSON("_fts"
<< "text"
- << "_ftsx"
- << 1
- << "a"
- << 1));
+ << "_ftsx" << 1 << "a" << 1));
runQuery(fromjson("{a: {$_internalExprEq: 3}, $text: {$search: 'blah'}}"));
diff --git a/src/mongo/db/query/query_planner_wildcard_index_test.cpp b/src/mongo/db/query/query_planner_wildcard_index_test.cpp
index d0fd0def30e..eba458736af 100644
--- a/src/mongo/db/query/query_planner_wildcard_index_test.cpp
+++ b/src/mongo/db/query/query_planner_wildcard_index_test.cpp
@@ -901,8 +901,7 @@ TEST_F(QueryPlannerWildcardTest, WildcardIndexDoesNotSupplyCandidatePlanForTextS
addWildcardIndex(BSON("$**" << 1));
addIndex(BSON("a" << 1 << "_fts"
<< "text"
- << "_ftsx"
- << 1));
+ << "_ftsx" << 1));
// Confirm that the wildcard index generates candidate plans for queries which do not include a
// $text predicate.
diff --git a/src/mongo/db/query/query_request.cpp b/src/mongo/db/query/query_request.cpp
index 4fc31cd4965..b3c87b40ab8 100644
--- a/src/mongo/db/query/query_request.cpp
+++ b/src/mongo/db/query/query_request.cpp
@@ -413,9 +413,7 @@ StatusWith<unique_ptr<QueryRequest>> QueryRequest::parseFromFindCommand(unique_p
} else if (!isGenericArgument(fieldName)) {
return Status(ErrorCodes::FailedToParse,
str::stream() << "Failed to parse: " << cmdObj.toString() << ". "
- << "Unrecognized field '"
- << fieldName
- << "'.");
+ << "Unrecognized field '" << fieldName << "'.");
}
}
@@ -663,26 +661,26 @@ Status QueryRequest::validate() const {
if (_limit && *_limit < 0) {
return Status(ErrorCodes::BadValue,
- str::stream() << "Limit value must be non-negative, but received: "
- << *_limit);
+ str::stream()
+ << "Limit value must be non-negative, but received: " << *_limit);
}
if (_batchSize && *_batchSize < 0) {
return Status(ErrorCodes::BadValue,
- str::stream() << "BatchSize value must be non-negative, but received: "
- << *_batchSize);
+ str::stream()
+ << "BatchSize value must be non-negative, but received: " << *_batchSize);
}
if (_ntoreturn && *_ntoreturn < 0) {
return Status(ErrorCodes::BadValue,
- str::stream() << "NToReturn value must be non-negative, but received: "
- << *_ntoreturn);
+ str::stream()
+ << "NToReturn value must be non-negative, but received: " << *_ntoreturn);
}
if (_maxTimeMS < 0) {
return Status(ErrorCodes::BadValue,
- str::stream() << "MaxTimeMS value must be non-negative, but received: "
- << _maxTimeMS);
+ str::stream()
+ << "MaxTimeMS value must be non-negative, but received: " << _maxTimeMS);
}
if (_tailableMode != TailableModeEnum::kNormal) {
diff --git a/src/mongo/db/query/query_request_test.cpp b/src/mongo/db/query/query_request_test.cpp
index 7ee502140f3..e4f9989b44f 100644
--- a/src/mongo/db/query/query_request_test.cpp
+++ b/src/mongo/db/query/query_request_test.cpp
@@ -1571,5 +1571,5 @@ TEST_F(QueryRequestTest, ParseFromUUID) {
ASSERT_EQ(nss, qr.nss());
}
-} // namespace mongo
} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/query/query_settings_test.cpp b/src/mongo/db/query/query_settings_test.cpp
index 41cb1cc0c3d..6a6d0dce66f 100644
--- a/src/mongo/db/query/query_settings_test.cpp
+++ b/src/mongo/db/query/query_settings_test.cpp
@@ -42,9 +42,9 @@
using mongo::AllowedIndicesFilter;
using mongo::BSONObj;
+using mongo::fromjson;
using mongo::IndexEntry;
using mongo::SimpleBSONObjComparator;
-using mongo::fromjson;
namespace {
TEST(QuerySettingsTest, AllowedIndicesFilterAllowsIndexesByName) {
@@ -113,4 +113,4 @@ TEST(QuerySettingsTest, AllowedIndicesFilterAllowsIndexesByKeyPattern) {
ASSERT_TRUE(filter.allows(a_idx));
ASSERT_FALSE(filter.allows(ab_idx));
}
-}
+} // namespace
diff --git a/src/mongo/db/query/query_solution.cpp b/src/mongo/db/query/query_solution.cpp
index 331e94875dd..ddbe87074a8 100644
--- a/src/mongo/db/query/query_solution.cpp
+++ b/src/mongo/db/query/query_solution.cpp
@@ -154,7 +154,7 @@ void addEqualityFieldSorts(const BSONObj& sortPattern,
sortsOut->insert(prefixBob.obj());
}
}
-}
+} // namespace
string QuerySolutionNode::toString() const {
str::stream ss;
diff --git a/src/mongo/db/query/query_solution_test.cpp b/src/mongo/db/query/query_solution_test.cpp
index 5a143a5a5c5..420c9b0efd0 100644
--- a/src/mongo/db/query/query_solution_test.cpp
+++ b/src/mongo/db/query/query_solution_test.cpp
@@ -728,8 +728,7 @@ auto createMatchExprAndParsedProjection(const BSONObj& query, const BSONObj& pro
ParsedProjection::make(opCtx.get(), projObj, queryMatchExpr.getValue().get(), &out);
if (!status.isOK()) {
FAIL(str::stream() << "failed to parse projection " << projObj << " (query: " << query
- << "): "
- << status.toString());
+ << "): " << status.toString());
}
ASSERT(out);
return std::make_pair(std::move(queryMatchExpr.getValue()),
diff --git a/src/mongo/db/query/stage_builder.cpp b/src/mongo/db/query/stage_builder.cpp
index 2e73e2509ef..dcba367378b 100644
--- a/src/mongo/db/query/stage_builder.cpp
+++ b/src/mongo/db/query/stage_builder.cpp
@@ -98,10 +98,9 @@ PlanStage* buildStages(OperationContext* opCtx,
auto descriptor = collection->getIndexCatalog()->findIndexByName(
opCtx, ixn->index.identifier.catalogName);
invariant(descriptor,
- str::stream() << "Namespace: " << collection->ns() << ", CanonicalQuery: "
- << cq.toStringShort()
- << ", IndexEntry: "
- << ixn->index.toString());
+ str::stream() << "Namespace: " << collection->ns()
+ << ", CanonicalQuery: " << cq.toStringShort()
+ << ", IndexEntry: " << ixn->index.toString());
// We use the node's internal name, keyPattern and multikey details here. For $**
// indexes, these may differ from the information recorded in the index's descriptor.