summaryrefslogtreecommitdiff
path: root/jstests
diff options
context:
space:
mode:
authorgalon1 <gil.alon@mongodb.com>2022-09-20 20:57:33 +0000
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2022-09-20 22:51:06 +0000
commit4f0c417801b6aa0e478675220975d6dc92d45233 (patch)
tree40f19613f920c9e817996ed024265213379ddc90 /jstests
parent631a1b711803941cdab5d2b11d544eae5e9ed63a (diff)
downloadmongo-4f0c417801b6aa0e478675220975d6dc92d45233.tar.gz
SERVER-63811 Add check to allow documents stage to run without a database existing
Diffstat (limited to 'jstests')
-rw-r--r--jstests/aggregation/sources/setWindowFields/output_overwrites_existing_data.js2
-rw-r--r--jstests/sharding/documents_db_not_exist.js48
-rw-r--r--jstests/sharding/documents_sharded.js160
3 files changed, 208 insertions, 2 deletions
diff --git a/jstests/aggregation/sources/setWindowFields/output_overwrites_existing_data.js b/jstests/aggregation/sources/setWindowFields/output_overwrites_existing_data.js
index 8301f78bbfc..3ec0290054f 100644
--- a/jstests/aggregation/sources/setWindowFields/output_overwrites_existing_data.js
+++ b/jstests/aggregation/sources/setWindowFields/output_overwrites_existing_data.js
@@ -9,8 +9,6 @@
(function() {
"use strict";
-// TODO SERVER-63811 Ensure the database exists so we get back non-empty results even in a sharded
-// cluster.
assert.commandWorked(db[jsTestName()].insert({dummy: 1}));
let windowResults = db.aggregate([
diff --git a/jstests/sharding/documents_db_not_exist.js b/jstests/sharding/documents_db_not_exist.js
new file mode 100644
index 00000000000..1742d149516
--- /dev/null
+++ b/jstests/sharding/documents_db_not_exist.js
@@ -0,0 +1,48 @@
+/**
+ * Tests that $documents stage continues even when the database does not exist
+ * @tags: [requires_fcv_62, multiversion_incompatible]
+ *
+ */
+
+(function() {
+"use strict";
+
+let st = new ShardingTest({shards: 3});
+
+function listDatabases(options) {
+ return assert.commandWorked(st.s.adminCommand(Object.assign({listDatabases: 1}, options)))
+ .databases;
+}
+
+function createAndDropDatabase(dbName) {
+ // Create the database.
+ let db = st.s.getDB(dbName);
+ assert.commandWorked(db.foo.insert({}));
+ // Confirms the database exists.
+ assert.eq(1, listDatabases({nameOnly: true, filter: {name: dbName}}).length);
+ // Drop the database
+ assert.commandWorked(db.dropDatabase());
+ // Confirm the database is dropped.
+ assert.eq(0, listDatabases({nameOnly: true, filter: {name: dbName}}).length);
+ return db;
+}
+
+// $documents stage evaluates to an array of objects.
+let db = createAndDropDatabase("test");
+let documents = [];
+for (let i = 0; i < 50; i++) {
+ documents.push({_id: i});
+}
+let result = db.aggregate([{$documents: documents}]);
+assert(result.toArray().length == 50);
+
+//$documents stage evaluates to an array of objects in a pipeline
+db = createAndDropDatabase("test2");
+result = db.aggregate([
+ {$documents: [{_id: 1, size: "medium"}, {_id: 2, size: "large"}]},
+ {$match: {size: "medium"}}
+]);
+assert(result.toArray().length == 1);
+
+st.stop();
+})();
diff --git a/jstests/sharding/documents_sharded.js b/jstests/sharding/documents_sharded.js
new file mode 100644
index 00000000000..22cddae48db
--- /dev/null
+++ b/jstests/sharding/documents_sharded.js
@@ -0,0 +1,160 @@
+/**
+ * This is the test for $documents stage in aggregation pipeline on a sharded collection.
+ * @tags: [ do_not_wrap_aggregations_in_facets ]
+ *
+ */
+
+(function() {
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For resultsEq.
+
+let st = new ShardingTest({shards: 2});
+const db = st.s.getDB(jsTestName());
+const dbName = db.getName();
+assert.commandWorked(db.adminCommand({enableSharding: dbName}));
+
+// Create sharded collections.
+const coll = db['shardedColl'];
+st.shardColl(coll, {x: 1}, {x: 1}, {x: 1}, dbName);
+
+const lookup_coll = db['lookupColl'];
+st.shardColl(lookup_coll, {id_name: 1}, {id_name: 1}, {id_name: 1}, dbName);
+for (let i = 0; i < 10; i++) {
+ assert.commandWorked(lookup_coll.insert({id_name: i, name: "name_" + i}));
+}
+
+// $documents given an array of objects.
+const docs = db.aggregate([{$documents: [{a1: 1}, {a1: 2}]}]).toArray();
+
+assert.eq(2, docs.length);
+assert.eq(docs[0], {a1: 1});
+assert.eq(docs[1], {a1: 2});
+
+// $documents evaluates to an array of objects.
+const docs1 =
+ db.aggregate([{$documents: {$map: {input: {$range: [0, 100]}, in : {x: "$$this"}}}}]).toArray();
+
+assert.eq(100, docs1.length);
+for (let i = 0; i < 100; i++) {
+ assert.eq(docs1[i], {x: i});
+}
+
+// $documents evaluates to an array of objects.
+const docsUnionWith =
+ coll.aggregate([
+ {
+ $unionWith: {
+ pipeline: [{$documents: {$map: {input: {$range: [0, 5]}, in : {x: "$$this"}}}}]
+ }
+ },
+ {$group: {_id: "$x", x: {$first: "$x"}}},
+ {$project: {_id: 0}},
+ ])
+ .toArray();
+assert(resultsEq([{x: 0}, {x: 1}, {x: 2}, {x: 3}, {x: 4}], docsUnionWith));
+
+{ // $documents with const objects inside $unionWith.
+ const res = coll.aggregate([
+ {$unionWith: {pipeline: [{$documents: [{x: 1}, {x: 2}]}]}},
+ {$group: {_id: "$x", x: {$first: "$x"}}},
+ {$project: {_id: 0}}
+ ])
+ .toArray();
+ assert(resultsEq([{x: 1}, {x: 2}], res));
+}
+
+{ // $documents with const objects inside $lookup (no "coll", explicit $match).
+ const res = lookup_coll.aggregate([
+ {
+ $lookup: {
+ let: {"id_lookup": "$id_name"},
+ pipeline: [
+ {$documents: [{xx: 1}, {xx: 2}, {xx : 3}]},
+ {
+ $match:
+ {
+ $expr:
+ {
+ $eq:
+ ["$$id_lookup", "$xx"]
+ }
+ }
+ }
+ ],
+ as: "names"
+ }
+ },
+ {$match: {"names": {"$ne": []}}},
+ {$project: {_id: 0}}
+ ]
+ )
+ .toArray();
+ assert(resultsEq(
+ [
+ {id_name: 1, name: "name_1", names: [{"xx": 1}]},
+ {id_name: 2, name: "name_2", names: [{"xx": 2}]},
+ {id_name: 3, name: "name_3", names: [{"xx": 3}]}
+ ],
+ res));
+}
+{ // $documents with const objects inside $lookup (no "coll", + localField/foreignField).
+ const res = lookup_coll.aggregate([
+ {
+ $lookup: {
+ localField: "id_name",
+ foreignField: "xx",
+ pipeline: [
+ {$documents: [{xx: 1}, {xx: 2}, {xx: 3}]}
+ ],
+ as: "names"
+ }
+ },
+ {$match: {"names": {"$ne": []}}},
+ {$project: {_id: 0}}
+ ])
+ .toArray();
+ assert(resultsEq(
+ [
+ {id_name: 1, name: "name_1", names: [{"xx": 1}]},
+ {id_name: 2, name: "name_2", names: [{"xx": 2}]},
+ {id_name: 3, name: "name_3", names: [{"xx": 3}]}
+ ],
+ res));
+}
+
+// Must fail when $document appears in the top level collection pipeline.
+assert.throwsWithCode(() => {
+ coll.aggregate([{$documents: {$map: {input: {$range: [0, 100]}, in : {x: "$$this"}}}}]);
+}, ErrorCodes.InvalidNamespace);
+
+// Must fail due to misplaced $document.
+assert.throwsWithCode(() => {
+ coll.aggregate([{$project: {x: [{xx: 1}, {xx: 2}]}}, {$documents: [{x: 1}]}]);
+}, 40602);
+
+// Test that $documents fails due to producing array of non-objects.
+assert.throwsWithCode(() => {
+ db.aggregate([{$documents: [1, 2, 3]}]);
+}, 40228);
+
+// Now with one object and one scalar.
+assert.throwsWithCode(() => {
+ db.aggregate([{$documents: [{x: 1}, 2]}]);
+}, 40228);
+
+// Test that $documents fails due when provided a non-array.
+assert.throwsWithCode(() => {
+ db.aggregate([{$documents: "string"}]);
+}, 5858203);
+
+// Test that $documents succeeds when given a singleton object.
+assert.eq(db.aggregate([{$documents: [{x: [1, 2, 3]}]}]).toArray(), [{x: [1, 2, 3]}]);
+
+// Must fail when $document appears in the top level collection pipeline.
+assert.throwsWithCode(() => {
+ coll.aggregate([{$documents: {$map: {input: {$range: [0, 100]}, in : {x: "$$this"}}}}]);
+}, ErrorCodes.InvalidNamespace);
+
+st.stop();
+})(); \ No newline at end of file