summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--buildscripts/vcxproj.header4
-rw-r--r--jstests/core/crud_api.js775
-rw-r--r--src/mongo/scripting/engine.cpp2
-rw-r--r--src/mongo/shell/SConscript6
-rw-r--r--src/mongo/shell/collection.js336
-rw-r--r--src/mongo/shell/crud_api.js758
-rw-r--r--src/mongo/shell/query.js128
7 files changed, 1889 insertions, 120 deletions
diff --git a/buildscripts/vcxproj.header b/buildscripts/vcxproj.header
index a7d041fa0e7..a179498c937 100644
--- a/buildscripts/vcxproj.header
+++ b/buildscripts/vcxproj.header
@@ -245,7 +245,7 @@
<Target Name="MongoShell"
BeforeTargets="ClCompile"
Outputs="$(MongoShell)\mongo.cpp"
- Inputs="$(MongoShell)\assert.js;$(MongoShell)\bulk_api.js;$(MongoShell)\collection.js;$(MongoShell)\db.js;$(MongoShell)\mongo.js;$(MongoShell)\mr.js;$(MongoShell)\query.js;$(MongoShell)\types.js;$(MongoShell)\upgrade_check.js;$(MongoShell)\utils.js;$(MongoShell)\utils_sh.js;$(MongoShell)\utils_auth.js" >
+ Inputs="$(MongoShell)\assert.js;$(MongoShell)\bulk_api.js;$(MongoShell)\collection.js;$(MongoShell)\crud_api.js;$(MongoShell)\db.js;$(MongoShell)\mongo.js;$(MongoShell)\mr.js;$(MongoShell)\query.js;$(MongoShell)\types.js;$(MongoShell)\upgrade_check.js;$(MongoShell)\utils.js;$(MongoShell)\utils_sh.js;$(MongoShell)\utils_auth.js" >
- <Exec Command="python site_scons\site_tools\jstoh.py $(MongoShell)\mongo.cpp $(MongoShell)\assert.js $(MongoShell)\bulk_api.js $(MongoShell)\collection.js $(MongoShell)\db.js $(MongoShell)\mongo.js $(MongoShell)\mr.js $(MongoShell)\query.js $(MongoShell)\types.js $(MongoShell)\upgrade_check.js $(MongoShell)\utils.js $(MongoShell)\utils_sh.js $(MongoShell)\utils_auth.js" />
+ <Exec Command="python site_scons\site_tools\jstoh.py $(MongoShell)\mongo.cpp $(MongoShell)\assert.js $(MongoShell)\bulk_api.js $(MongoShell)\collection.js $(MongoShell)\crud_api.js $(MongoShell)\db.js $(MongoShell)\mongo.js $(MongoShell)\mr.js $(MongoShell)\query.js $(MongoShell)\types.js $(MongoShell)\upgrade_check.js $(MongoShell)\utils.js $(MongoShell)\utils_sh.js $(MongoShell)\utils_auth.js" />
</Target>
diff --git a/jstests/core/crud_api.js b/jstests/core/crud_api.js
new file mode 100644
index 00000000000..7c3bce2f263
--- /dev/null
+++ b/jstests/core/crud_api.js
@@ -0,0 +1,775 @@
+(function() {
+ "use strict";
+
+ var crudAPISpecTests = function crudAPISpecTests() {
+ "use strict";
+
+ // Get the colllection
+ var coll = db.crud_tests;
+
+ // Setup
+ function createTestExecutor(coll, method, verifyResult) {
+ return function(args) {
+ // Drop collection
+ coll.drop();
+ // Insert test data
+ var r = coll.insertMany(args.insert);
+ assert.eq(args.insert.length, r.insertedIds.length);
+
+ // Execute the method with arguments
+ r = coll[method].apply(coll, args.params);
+ verifyResult(args.result, r);
+
+ // Get all the results
+ var results = coll.find({}).sort({_id: 1}).toArray();
+
+ assert.docEq(args.expected, results);
+ }
+ }
+
+ function checkResultObject(first, second) {
+ // Only assert on the "modifiedCount" property when write commands are enabled
+ if (db.getMongo().writeMode() === 'commands') {
+ assert.docEq(first, second);
+ } else {
+ var overrideModifiedCount = {modifiedCount: undefined};
+ assert.docEq(Object.merge(first, overrideModifiedCount),
+ Object.merge(second, overrideModifiedCount));
+ }
+ }
+
+ // Setup executors
+ var deleteManyExecutor = createTestExecutor(coll, 'deleteMany', checkResultObject);
+ var deleteOneExecutor = createTestExecutor(coll, 'deleteOne', checkResultObject);
+ var bulkWriteExecutor = createTestExecutor(coll, 'bulkWrite', checkResultObject);
+ var findOneAndDeleteExecutor = createTestExecutor(coll, 'findOneAndDelete',
+ checkResultObject);
+ var findOneAndReplaceExecutor = createTestExecutor(coll, 'findOneAndReplace',
+ checkResultObject);
+ var findOneAndUpdateExecutor = createTestExecutor(coll, 'findOneAndUpdate',
+ checkResultObject);
+ var insertManyExecutor = createTestExecutor(coll, 'insertMany', checkResultObject);
+ var insertOneExecutor = createTestExecutor(coll, 'insertOne', checkResultObject);
+ var replaceOneExecutor = createTestExecutor(coll, 'replaceOne', checkResultObject);
+ var updateManyExecutor = createTestExecutor(coll, 'updateMany', checkResultObject);
+ var updateOneExecutor = createTestExecutor(coll, 'updateOne', checkResultObject);
+ var countExecutor = createTestExecutor(coll, 'count', assert.eq);
+ var distinctExecutor = createTestExecutor(coll, 'distinct', assert.eq);
+
+ //
+ // BulkWrite
+ //
+
+ bulkWriteExecutor({
+ insert: [{ _id: 1, c: 1 }, { _id: 2, c: 2 }, { _id: 3, c: 3 }],
+ params: [[
+ { insertOne: { document: {_id: 4, a: 1 } } }
+ , { updateOne: { filter: {_id: 5, a:2}, update: {$set: {a:2}}, upsert:true } }
+ , { updateMany: { filter: {_id: 6,a:3}, update: {$set: {a:3}}, upsert:true } }
+ , { deleteOne: { filter: {c:1} } }
+ , { insertOne: { document: {_id: 7, c: 2 } } }
+ , { deleteMany: { filter: {c:2} } }
+ , { replaceOne: { filter: {c:3}, replacement: {c:4}, upsert:true } }]],
+ result: {
+ acknowledged: true, insertedCount:2,
+ matchedCount:1, deletedCount: 3,
+ upsertedCount:2, insertedIds : {'0' : 4, '4' : 7 }, upsertedIds : { '1' : 5, '2' : 6 }
+ },
+ expected: [{ "_id" : 3, "c" : 4 }, { "_id" : 4, "a" : 1 }, { "_id" : 5, "a" : 2 }, { "_id" : 6, "a" : 3 }]
+ });
+
+ bulkWriteExecutor({
+ insert: [{ _id: 1, c: 1 }, { _id: 2, c: 2 }, { _id: 3, c: 3 }],
+ params: [[
+ { insertOne: { document: { _id: 4, a: 1 } } }
+ , { updateOne: { filter: {_id: 5, a:2}, update: {$set: {a:2}}, upsert:true } }
+ , { updateMany: { filter: {_id: 6, a:3}, update: {$set: {a:3}}, upsert:true } }
+ , { deleteOne: { filter: {c:1} } }
+ , { deleteMany: { filter: {c:2} } }
+ , { replaceOne: { filter: {c:3}, replacement: {c:4}, upsert:true } }], { ordered: false }],
+ result: {
+ acknowledged: true, insertedCount:1, matchedCount:1, deletedCount:2, upsertedCount:2, insertedIds : {'0' : 4 }, upsertedIds : { '1' : 5, '2' : 6 }
+ },
+ expected: [{ "_id" : 3, "c" : 4 }, { "_id" : 4, "a" : 1 }, { "_id" : 5, "a" : 2 }, { "_id" : 6, "a" : 3 }]
+ });
+
+ // DeleteMany
+ //
+
+ // DeleteMany when many documents match
+ deleteManyExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: { $gt: 1 } }
+ ],
+ result: {acknowledged: true, deletedCount:2},
+ expected: [{_id:1, x: 11}]
+ });
+ // DeleteMany when no document matches
+ deleteManyExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ ],
+ result: {acknowledged: true, deletedCount:0},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // DeleteMany when many documents match, no write concern
+ deleteManyExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: { $gt: 1 } }, { w : 0 }
+ ],
+ result: {acknowledged: false},
+ expected: [{_id:1, x: 11}]
+ });
+
+ //
+ // DeleteOne
+ //
+
+ // DeleteOne when many documents match
+ deleteOneExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: { $gt: 1 } }
+ ],
+ result: {acknowledged: true, deletedCount:1},
+ expected: [{_id:1, x: 11}, {_id:3, x: 33}]
+ });
+ // DeleteOne when one document matches
+ deleteOneExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 2 }
+ ],
+ result: {acknowledged: true, deletedCount:1},
+ expected: [{_id:1, x: 11}, {_id:3, x: 33}]
+ });
+ // DeleteOne when no documents match
+ deleteOneExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ ],
+ result: {acknowledged: true, deletedCount:0},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // DeleteOne when many documents match, no write concern
+ deleteOneExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: { $gt: 1 } }, {w:0}
+ ],
+ result: {acknowledged: false},
+ expected: [{_id:1, x: 11}, {_id:3, x: 33}]
+ });
+
+ //
+ // FindOneAndDelete
+ //
+
+ // FindOneAndDelete when one document matches
+ findOneAndDeleteExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: { $gt: 2 } }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
+ ],
+ result: {x:33},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}]
+ });
+ // FindOneAndDelete when one document matches
+ findOneAndDeleteExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 2 }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
+ ],
+ result: {x:22},
+ expected: [{_id:1, x: 11}, {_id:3, x: 33}]
+ });
+ // FindOneAndDelete when no documents match
+ findOneAndDeleteExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
+ ],
+ result: null,
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+
+ //
+ // FindOneAndReplace
+ //
+
+ // FindOneAndReplace when many documents match returning the document before modification
+ findOneAndReplaceExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: { $gt: 1 } }
+ , { x: 32 }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
+ ],
+ result: {x:22},
+ expected: [{_id:1, x: 11}, {_id:2, x: 32}, {_id:3, x: 33}]
+ });
+ // FindOneAndReplace when many documents match returning the document after modification
+ findOneAndReplaceExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: { $gt: 1 } }
+ , { x: 32 }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true }
+ ],
+ result: {x:32},
+ expected: [{_id:1, x: 11}, {_id:2, x: 32}, {_id:3, x: 33}]
+ });
+ // FindOneAndReplace when one document matches returning the document before modification
+ findOneAndReplaceExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 2 }
+ , { x: 32 }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
+ ],
+ result: {x:22},
+ expected: [{_id:1, x: 11}, {_id:2, x: 32}, {_id:3, x: 33}]
+ });
+ // FindOneAndReplace when one document matches returning the document after modification
+ findOneAndReplaceExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 2 }
+ , { x: 32 }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true }
+ ],
+ result: {x:32},
+ expected: [{_id:1, x: 11}, {_id:2, x: 32}, {_id:3, x: 33}]
+ });
+ // FindOneAndReplace when no documents match returning the document before modification
+ findOneAndReplaceExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ , { x: 44 }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
+ ],
+ result: null,
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // FindOneAndReplace when no documents match with upsert returning the document before modification
+ findOneAndReplaceExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ , { x: 44 }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, upsert:true }
+ ],
+ result: null,
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x:44}]
+ });
+ // FindOneAndReplace when no documents match returning the document after modification
+ findOneAndReplaceExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ , { x: 44 }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true }
+ ],
+ result: null,
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // FindOneAndReplace when no documents match with upsert returning the document after modification
+ findOneAndReplaceExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ , { x: 44 }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true, upsert:true }
+ ],
+ result: {x:44},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 44}]
+ });
+
+ assert.throws(function() {
+ coll.findOneAndReplace({a:1}, {$set:{b:1}});
+ });
+
+ //
+ // FindOneAndUpdate
+ //
+
+ // FindOneAndUpdate when many documents match returning the document before modification
+ findOneAndUpdateExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: { $gt: 1 } }
+ , { $inc: { x: 1 } }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
+ ],
+ result: {x:22},
+ expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ });
+ // FindOneAndUpdate when many documents match returning the document after modification
+ findOneAndUpdateExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: { $gt: 1 } }
+ , { $inc: { x: 1 } }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument: true }
+ ],
+ result: {x:23},
+ expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ });
+ // FindOneAndUpdate when one document matches returning the document before modification
+ findOneAndUpdateExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 2 }
+ , { $inc: { x: 1 } }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
+ ],
+ result: {x:22},
+ expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ });
+ // FindOneAndUpdate when one document matches returning the document after modification
+ findOneAndUpdateExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 2 }
+ , { $inc: { x: 1 } }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument: true }
+ ],
+ result: {x:23},
+ expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ });
+ // FindOneAndUpdate when no documents match returning the document before modification
+ findOneAndUpdateExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ , { $inc: { x: 1 } }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
+ ],
+ result: null,
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // FindOneAndUpdate when no documents match with upsert returning the document before modification
+ findOneAndUpdateExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ , { $inc: { x: 1 } }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, upsert:true }
+ ],
+ result: null,
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ });
+ // FindOneAndUpdate when no documents match returning the document after modification
+ findOneAndUpdateExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ , { $inc: { x: 1 } }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true }
+ ],
+ result: null,
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // FindOneAndUpdate when no documents match with upsert returning the document after modification
+ findOneAndUpdateExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [
+ { _id: 4 }
+ , { $inc: { x: 1 } }
+ , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true, upsert:true }
+ ],
+ result: {x:1},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ });
+
+ assert.throws(function() {
+ coll.findOneAndUpdate({a:1}, {});
+ });
+
+ assert.throws(function() {
+ coll.findOneAndUpdate({a:1}, {b:1});
+ });
+
+ //
+ // InsertMany
+ //
+
+ // InsertMany with non-existing documents
+ insertManyExecutor({
+ insert: [{ _id:1, x:11 }],
+ params: [
+ [{_id: 2, x: 22}, {_id:3, x:33}]
+ ],
+ result: {acknowledged: true, insertedIds: [2, 3]},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // InsertMany with non-existing documents, no write concern
+ insertManyExecutor({
+ insert: [{ _id:1, x:11 }],
+ params: [
+ [{_id: 2, x: 22}, {_id:3, x:33}]
+ , {w:0}
+ ],
+ result: {acknowledged: false},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+
+ //
+ // InsertOne
+ //
+
+ // InsertOne with non-existing documents
+ insertOneExecutor({
+ insert: [{ _id:1, x:11 }],
+ params: [
+ {_id: 2, x: 22}
+ ],
+ result: {acknowledged: true, insertedId: 2},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}]
+ });
+ // InsertOne with non-existing documents, no write concern
+ insertOneExecutor({
+ insert: [{ _id:1, x:11 }],
+ params: [
+ {_id: 2, x: 22}, {w:0}
+ ],
+ result: {acknowledged: false},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}]
+ });
+
+ //
+ // ReplaceOne
+ //
+
+ // ReplaceOne when many documents match
+ replaceOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: { $gt: 1 } }, { x: 111 }],
+ result: {acknowledged:true, matchedCount:1, modifiedCount:1},
+ expected: [{_id:1, x: 11}, {_id:2, x: 111}, {_id:3, x: 33}]
+ });
+ // ReplaceOne when one document matches
+ replaceOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 1 }, { _id: 1, x: 111 }],
+ result: {acknowledged:true, matchedCount:1, modifiedCount:1},
+ expected: [{_id:1, x: 111}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // ReplaceOne when no documents match
+ replaceOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 4 }, { _id: 4, x: 1 }],
+ result: {acknowledged:true, matchedCount:0, modifiedCount:0},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // ReplaceOne with upsert when no documents match without an id specified
+ replaceOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 4 }, { x: 1 }, {upsert:true}],
+ result: {acknowledged:true, matchedCount:0, modifiedCount:0, upsertedId: 4},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ });
+ // ReplaceOne with upsert when no documents match with an id specified
+ replaceOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 4 }, { _id: 4, x: 1 }, {upsert:true}],
+ result: {acknowledged:true, matchedCount:0, modifiedCount:0, upsertedId: 4},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ });
+ // ReplaceOne with upsert when no documents match with an id specified, no write concern
+ replaceOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 4 }, { _id: 4, x: 1 }, {upsert:true, w:0}],
+ result: {acknowledged:false},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ });
+ // ReplaceOne with upsert when no documents match with an id specified, no write concern
+ replaceOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 4 }, { _id: 4, x: 1 }, {upsert:true, writeConcern:{w:0}}],
+ result: {acknowledged:false},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ });
+
+ assert.throws(function() {
+ coll.replaceOne({a:1}, {$set:{b:1}});
+ });
+
+ //
+ // UpdateMany
+ //
+
+ // UpdateMany when many documents match
+ updateManyExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: { $gt: 1 } }, { $inc: { x: 1 } }],
+ result: {acknowledged:true, matchedCount:2, modifiedCount:2},
+ expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 34}]
+ });
+ // UpdateMany when one document matches
+ updateManyExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 1 }, { $inc: { x: 1 } }],
+ result: {acknowledged:true, matchedCount:1, modifiedCount:1},
+ expected: [{_id:1, x: 12}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // UpdateMany when no documents match
+ updateManyExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 4 }, { $inc: { x: 1 } }],
+ result: {acknowledged:true, matchedCount:0, modifiedCount:0},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // UpdateMany with upsert when no documents match
+ updateManyExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 4 }, { $inc: { x: 1 } }, { upsert: true }],
+ result: {acknowledged:true, matchedCount:0, modifiedCount:0, upsertedId: 4},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ });
+ // UpdateMany with upsert when no documents match, no write concern
+ updateManyExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 4 }, { $inc: { x: 1 } }, { upsert: true, w: 0 }],
+ result: {acknowledged:false},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ });
+
+ assert.throws(function() {
+ coll.updateMany({a:1}, {});
+ });
+
+ assert.throws(function() {
+ coll.updateMany({a:1}, {b:1});
+ });
+
+ //
+ // UpdateOne
+ //
+
+ // UpdateOne when many documents match
+ updateOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: { $gt: 1 } }, { $inc: { x: 1 } }],
+ result: {acknowledged:true, matchedCount:1, modifiedCount:1},
+ expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ });
+ // UpdateOne when one document matches
+ updateOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 1 }, { $inc: { x: 1 } }],
+ result: {acknowledged:true, matchedCount:1, modifiedCount:1},
+ expected: [{_id:1, x: 12}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+ // UpdateOne when no documents match
+ updateOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 4 }, { $inc: { x: 1 } }],
+ result: {acknowledged:true, matchedCount:0, modifiedCount:0},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ });
+
+ // UpdateOne with upsert when no documents match
+ updateOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: 4 }, { $inc: { x: 1 } }, {upsert:true}],
+ result: {acknowledged:true, matchedCount:0, modifiedCount:0, upsertedId: 4},
+ expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id: 4, x: 1}]
+ });
+ // UpdateOne when many documents match, no write concern
+ updateOneExecutor({
+ insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
+ params: [{ _id: { $gt: 1 } }, { $inc: { x: 1 } }, {w:0}],
+ result: {acknowledged:false},
+ expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ });
+
+ assert.throws(function() {
+ coll.updateOne({a:1}, {});
+ });
+
+ assert.throws(function() {
+ coll.updateOne({a:1}, {b:1});
+ });
+
+ //
+ // Count
+ //
+
+ // Simple count of all elements
+ countExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [{}],
+ result: 3,
+ expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ });
+ // Simple count no arguments
+ countExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [],
+ result: 3,
+ expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ });
+ // Simple count filtered
+ countExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [{_id: {$gt: 1}}],
+ result: 2,
+ expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ });
+ // Simple count of all elements, applying limit
+ countExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [{}, {limit:1}],
+ result: 1,
+ expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ });
+ // Simple count of all elements, applying skip
+ countExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [{}, {skip:1}],
+ result: 2,
+ expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ });
+ // Simple count no arguments, applying hint
+ countExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: [{}, {hint: "_id"}],
+ result: 3,
+ expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ });
+
+ //
+ // Distinct
+ //
+
+ // Simple distinct of field x no filter
+ distinctExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: ['x'],
+ result: [11, 22, 33],
+ expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ });
+ // Simple distinct of field x
+ distinctExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: ['x', {}],
+ result: [11, 22, 33],
+ expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ });
+ // Simple distinct of field x filtered
+ distinctExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: ['x', {x: { $gt: 11 }}],
+ result: [22, 33],
+ expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ });
+ // Simple distinct of field x filtered with maxTimeMS
+ distinctExecutor({
+ insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
+ params: ['x', {x: { $gt: 11 }}, {maxTimeMS:100000}],
+ result: [22, 33],
+ expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ });
+
+ //
+ // Find
+ //
+
+ coll.deleteMany({});
+ // Insert all of them
+ coll.insertMany([{a:0, b:0}, {a:1, b:1}]);
+
+ // Simple projection
+ var result = coll.find({}).sort({a:1}).limit(1).skip(1).projection({_id:0, a:1}).toArray();
+ assert.docEq(result, [{a:1}]);
+
+ // Simple tailable cursor
+ var cursor = coll.find({}).sort({a:1}).tailable();
+ assert.eq(34, cursor._options);
+ var cursor = coll.find({}).sort({a:1}).tailable(false);
+ assert.eq(2, cursor._options);
+
+ // Check modifiers
+ var cursor = coll.find({}).modifiers({$hint:'a_1'});
+ assert.eq('a_1', cursor._query['$hint']);
+
+ // allowPartialResults
+ var cursor = coll.find({}).allowPartialResults();
+ assert.eq(128, cursor._options);
+
+ // noCursorTimeout
+ var cursor = coll.find({}).noCursorTimeout();
+ assert.eq(16, cursor._options);
+
+ // oplogReplay
+ var cursor = coll.find({}).oplogReplay();
+ assert.eq(8, cursor._options);
+
+ //
+ // Aggregation
+ //
+
+ coll.deleteMany({});
+ // Insert all of them
+ coll.insertMany([{a:0, b:0}, {a:1, b:1}]);
+
+ // Simple aggregation with useCursor
+ var result = coll.aggregate([{$match: {}}], {useCursor:true}).toArray();
+ assert.eq(2, result.length);
+
+ // Simple aggregation with batchSize
+ var result = coll.aggregate([{$match: {}}], {batchSize:2}).toArray();
+ assert.eq(2, result.length);
+
+ // Set the maxTimeMS and allowDiskUse on aggregation query
+ var result = coll.aggregate([{$match: {}}], {batchSize:2, maxTimeMS:100, allowDiskUse:true}).toArray();
+ assert.eq(2, result.length);
+
+ // Drop collection
+ coll.drop();
+ coll.ensureIndex({a:1}, {unique:true})
+
+ // Should throw duplicate key error
+ assert.throws(function() {
+ coll.insertMany([{a:0, b:0}, {a:0, b:1}])
+ });
+
+ assert(coll.findOne({a:0, b:0}) != null);
+ assert.throws(function() {
+ coll.insertOne({a:0, b:0})
+ });
+
+ assert.throws(function() {
+ coll.updateOne({b:2}, {$set: {a:0}}, {upsert:true});
+ });
+
+ assert.throws(function() {
+ coll.updateMany({b:2}, {$set: {a:0}}, {upsert:true});
+ });
+
+ assert.throws(function() {
+ coll.deleteOne({$invalidFieldName:{a:1}});
+ });
+
+ assert.throws(function() {
+ coll.deleteMany({$set:{a:1}});
+ });
+
+ assert.throws(function() {
+ coll.bulkWrite([
+ { insertOne: { document: { _id: 4, a: 0 } } }
+ ]);
+ });
+ }
+
+ crudAPISpecTests();
+})();
diff --git a/src/mongo/scripting/engine.cpp b/src/mongo/scripting/engine.cpp
index af72095c49b..39e43acae0b 100644
--- a/src/mongo/scripting/engine.cpp
+++ b/src/mongo/scripting/engine.cpp
@@ -271,6 +271,7 @@ ScriptingFunction Scope::createFunction(const char* code) {
namespace JSFiles {
extern const JSFile collection;
+extern const JSFile crud_api;
extern const JSFile db;
extern const JSFile explain_query;
extern const JSFile explainable;
@@ -294,6 +295,7 @@ void Scope::execCoreFiles() {
execSetup(JSFiles::query);
execSetup(JSFiles::bulk_api);
execSetup(JSFiles::collection);
+ execSetup(JSFiles::crud_api);
execSetup(JSFiles::explain_query);
execSetup(JSFiles::explainable);
execSetup(JSFiles::upgrade_check);
diff --git a/src/mongo/shell/SConscript b/src/mongo/shell/SConscript
index 0df703369f4..0541400a9e7 100644
--- a/src/mongo/shell/SConscript
+++ b/src/mongo/shell/SConscript
@@ -2,13 +2,14 @@
Import("env")
-# if you add a file here, you need to add it in scripting/engine.cpp and shell/createCPPfromJavaScriptFiles.js as well
+# Files added here need to be added in scripting/engine.cpp and buildscripts/vcxproj.header as well.
env.JSHeader(
target="mongo.cpp",
source=[
"assert.js",
"bulk_api.js",
"collection.js",
+ "crud_api.js",
"db.js",
"explain_query.js",
"explainable.js",
@@ -23,7 +24,8 @@ env.JSHeader(
],
)
-# if you add a file here, you need to add it in shell/shell_utils.cpp and shell/createCPPfromJavaScriptFiles.js as well
+# Files added here need to be added in shell/shell_utils.cpp and buildscripts/vcxproj.header as
+# well.
env.JSHeader(
target="mongo-server.cpp",
source=[
diff --git a/src/mongo/shell/collection.js b/src/mongo/shell/collection.js
index 1ef9ee3060c..53e2a63239f 100644
--- a/src/mongo/shell/collection.js
+++ b/src/mongo/shell/collection.js
@@ -32,12 +32,15 @@ DBCollection.prototype.help = function () {
var shortName = this.getName();
print("DBCollection help");
print("\tdb." + shortName + ".find().help() - show DBCursor help");
- print("\tdb." + shortName + ".count()");
+ print("\tdb." + shortName + ".bulkWrite( operations, <optional params> ) - bulk execute write operations, optional parameters are: w, wtimeout, j");
+ print("\tdb." + shortName + ".count( query = {}, <optional params> ) - count the number of documents that matches the query, optional parameters are: limit, skip, hint, maxTimeMS");
print("\tdb." + shortName + ".copyTo(newColl) - duplicates collection by copying all documents to newColl; no indexes are copied.");
print("\tdb." + shortName + ".convertToCapped(maxBytes) - calls {convertToCapped:'" + shortName + "', size:maxBytes}} command");
print("\tdb." + shortName + ".createIndex(keypattern[,options])");
print("\tdb." + shortName + ".dataSize()");
- print("\tdb." + shortName + ".distinct( key ) - e.g. db." + shortName + ".distinct( 'x' )");
+ print("\tdb." + shortName + ".deleteOne( filter, <optional params> ) - delete first matching document, optional parameters are: w, wtimeout, j");
+ print("\tdb." + shortName + ".deleteMany( filter, <optional params> ) - delete all matching documents, optional parameters are: w, wtimeout, j");
+ print("\tdb." + shortName + ".distinct( key, query, <optional params> ) - e.g. db." + shortName + ".distinct( 'x' ), optional parameters are: maxTimeMS");
print("\tdb." + shortName + ".drop() drop the collection");
print("\tdb." + shortName + ".dropIndex(index) - e.g. db." + shortName + ".dropIndex( \"indexName\" ) or db." + shortName + ".dropIndex( { \"indexKey\" : 1 } )");
print("\tdb." + shortName + ".dropIndexes()");
@@ -51,15 +54,20 @@ DBCollection.prototype.help = function () {
print("\tdb." + shortName + ".find(...).skip(n)");
print("\tdb." + shortName + ".find(...).sort(...)");
print("\tdb." + shortName + ".findOne([query])");
- print("\tdb." + shortName + ".findAndModify( { update : ... , remove : bool [, query: {}, sort: {}, 'new': false] } )");
+ print("\tdb." + shortName + ".findOneAndDelete( filter, <optional params> ) - delete first matching document, optional parameters are: projection, sort, maxTimeMS");
+ print("\tdb." + shortName + ".findOneAndReplace( filter, replacement, <optional params> ) - replace first matching document, optional parameters are: projection, sort, maxTimeMS, upsert, returnNewDocument");
+ print("\tdb." + shortName + ".findOneAndUpdate( filter, update, <optional params> ) - update first matching document, optional parameters are: projection, sort, maxTimeMS, upsert, returnNewDocument");
print("\tdb." + shortName + ".getDB() get DB object associated with collection");
print("\tdb." + shortName + ".getPlanCache() get query plan cache associated with collection");
print("\tdb." + shortName + ".getIndexes()");
print("\tdb." + shortName + ".group( { key : ..., initial: ..., reduce : ...[, cond: ...] } )");
print("\tdb." + shortName + ".insert(obj)");
+ print("\tdb." + shortName + ".insertOne( obj, <optional params> ) - insert a document, optional parameters are: w, wtimeout, j");
+ print("\tdb." + shortName + ".insertMany( [objects], <optional params> ) - insert multiple documents, optional parameters are: w, wtimeout, j");
print("\tdb." + shortName + ".mapReduce( mapFunction , reduceFunction , <optional params> )");
print("\tdb." + shortName + ".aggregate( [pipeline], <optional params> ) - performs an aggregation on a collection; returns a cursor");
print("\tdb." + shortName + ".remove(query)");
+ print("\tdb." + shortName + ".replaceOne( filter, replacement, <optional params> ) - replace the first matching document, optional parameters are: upsert, w, wtimeout, j");
print("\tdb." + shortName + ".renameCollection( newName , <dropTarget> ) renames the collection.");
print("\tdb." + shortName + ".runCommand( name , <options> ) runs a db command with the given name where the first param is the collection name");
print("\tdb." + shortName + ".save(obj)");
@@ -70,7 +78,9 @@ DBCollection.prototype.help = function () {
print("\tdb." + shortName + ".storageSize() - includes free space allocated to this collection");
print("\tdb." + shortName + ".totalIndexSize() - size in bytes of all the indexes");
print("\tdb." + shortName + ".totalSize() - storage allocated for all data and indexes");
- print("\tdb." + shortName + ".update(query, object[, upsert_bool, multi_bool]) - instead of two flags, you can pass an object with fields: upsert, multi");
+ print("\tdb." + shortName + ".update( query, object[, upsert_bool, multi_bool] ) - instead of two flags, you can pass an object with fields: upsert, multi");
+ print("\tdb." + shortName + ".updateOne( filter, update, <optional params> ) - update the first matching document, optional parameters are: upsert, w, wtimeout, j");
+ print("\tdb." + shortName + ".updateMany( filter, update, <optional params> ) - update all matching documents, optional parameters are: upsert, w, wtimeout, j");
print("\tdb." + shortName + ".validate( <full> ) - SLOW");;
print("\tdb." + shortName + ".getShardVersion() - only for use with sharding");
print("\tdb." + shortName + ".getShardDistribution() - prints statistics about data distribution in the cluster");
@@ -211,7 +221,7 @@ DBCollection.prototype.insert = function( obj , options, _allow_dot ){
throw Error( "no object passed to insert!" );
var flags = 0;
-
+
var wc = undefined;
var allowDottedFields = false;
if ( options === undefined ) {
@@ -223,7 +233,7 @@ DBCollection.prototype.insert = function( obj , options, _allow_dot ){
} else {
flags = options.ordered ? 0 : 1;
}
-
+
if (options.writeConcern)
wc = options.writeConcern;
if (options.allowdotted)
@@ -231,7 +241,7 @@ DBCollection.prototype.insert = function( obj , options, _allow_dot ){
} else {
flags = options;
}
-
+
// 1 = continueOnError, which is synonymous with unordered in the write commands/bulk-api
var ordered = ((flags & 1) == 0);
@@ -516,7 +526,7 @@ DBCollection.prototype._genIndexName = function( keys ){
var v = keys[k];
if ( typeof v == "function" )
continue;
-
+
if ( name.length > 0 )
name += "_";
name += k + "_";
@@ -656,8 +666,8 @@ DBCollection.prototype.findAndModify = function(args){
}
DBCollection.prototype.renameCollection = function( newName , dropTarget ){
- return this._db._adminCommand( { renameCollection : this._fullName ,
- to : this._db._name + "." + newName ,
+ return this._db._adminCommand( { renameCollection : this._fullName ,
+ to : this._db._name + "." + newName ,
dropTarget : dropTarget } )
}
@@ -934,10 +944,6 @@ DBCollection.prototype.getIndexKeys = function(){
}
-DBCollection.prototype.count = function( x ){
- return this.find( x ).count();
-}
-
DBCollection.prototype.hashAllDocs = function() {
var cmd = { dbhash : 1,
collections : [ this._shortName ] };
@@ -1113,36 +1119,48 @@ DBCollection.prototype.isCapped = function(){
return ( e && e.options && e.options.capped ) ? true : false;
}
-DBCollection.prototype._distinct = function( keyString , query ){
- return this._dbReadCommand( { distinct : this._shortName , key : keyString , query : query || {} } );
-}
-
-DBCollection.prototype.distinct = function( keyString , query ){
- keyStringType = typeof keyString;
- if (keyStringType != "string")
- throw Error("The first argument to the distinct command must be a string but was a " + keyStringType);
- queryType = typeof query;
- if (query != null && queryType != "object")
- throw Error("The query argument to the distinct command must be a document but was a " + queryType);
- var res = this._distinct( keyString , query );
- if ( ! res.ok )
- throw _getErrorWithCode(res, "distinct failed: " + tojson(res));
- return res.values;
-}
-
-
-DBCollection.prototype.aggregate = function(pipeline, extraOpts) {
+//
+// CRUD specification aggregation cursor extension
+//
+DBCollection.prototype.aggregate = function(pipeline, aggregateOptions) {
if (!(pipeline instanceof Array)) {
// support legacy varargs form. (Also handles db.foo.aggregate())
pipeline = argumentsToArray(arguments)
- extraOpts = {}
+ aggregateOptions = {}
+ } else if (aggregateOptions === undefined) {
+ aggregateOptions = {};
}
- else if (extraOpts === undefined) {
- extraOpts = {};
+
+ // Copy the aggregateOptions
+ var copy = Object.extend({}, aggregateOptions);
+
+ // Ensure handle crud API aggregateOptions
+ var keys = Object.keys(copy);
+
+ for (var i = 0; i < keys.length; i++) {
+ var name = keys[i];
+
+ if (name == 'batchSize') {
+ if (copy.cursor == null) {
+ copy.cursor = {};
+ }
+
+ copy.cursor.batchSize = copy['batchSize'];
+ delete copy['batchSize'];
+ } else if (name == 'useCursor') {
+ if (copy.cursor == null) {
+ copy.cursor = {};
+ }
+
+ delete copy['useCursor'];
+ }
}
+ // Assign the cleaned up options
+ aggregateOptions = copy;
+ // Create the initial command document
var cmd = {pipeline: pipeline};
- Object.extend(cmd, extraOpts);
+ Object.extend(cmd, aggregateOptions);
if (!('cursor' in cmd)) {
// implicitly use cursors
@@ -1163,26 +1181,27 @@ DBCollection.prototype.aggregate = function(pipeline, extraOpts) {
var res = doAgg(cmd);
if (!res.ok
- && (res.code == 17020 || res.errmsg == "unrecognized field \"cursor")
- && !("cursor" in extraOpts)) {
- // If the command failed because cursors aren't supported and the user didn't explicitly
- // request a cursor, try again without requesting a cursor.
- delete cmd.cursor;
-
- res = doAgg(cmd);
-
- if ('result' in res && !("cursor" in res)) {
- // convert old-style output to cursor-style output
- res.cursor = {ns: '', id: NumberLong(0)};
- res.cursor.firstBatch = res.result;
- delete res.result;
+ && (res.code == 17020 || res.errmsg == "unrecognized field \"cursor")
+ && !("cursor" in aggregateOptions)) {
+ // If the command failed because cursors aren't supported and the user didn't explicitly
+ // request a cursor, try again without requesting a cursor.
+ delete cmd.cursor;
+
+ res = doAgg(cmd);
+
+ if ('result' in res && !("cursor" in res)) {
+ // convert old-style output to cursor-style output
+ res.cursor = {ns: '', id: NumberLong(0)};
+ res.cursor.firstBatch = res.result;
+ delete res.result;
+ }
}
- }
assert.commandWorked(res, "aggregate failed");
- if ("cursor" in res)
+ if ("cursor" in res) {
return new DBCommandCursor(this._mongo, res);
+ }
return res;
}
@@ -1298,7 +1317,7 @@ DBCollection.autocomplete = function(obj){
// Sharding additions
-/*
+/*
Usage :
mongo <mongos>
@@ -1316,78 +1335,78 @@ true
> var splitter = collection.getSplitKeysForChunks() // by default, the chunks are not split, the keys are just
// found. A splitter function is returned which will actually
// do the splits.
-
+
> splitter() // ! Actually executes the splits on the cluster !
-
+
*/
DBCollection.prototype.getShardDistribution = function(){
var stats = this.stats()
-
+
if( ! stats.sharded ){
print( "Collection " + this + " is not sharded." )
return
}
-
+
var config = this.getMongo().getDB("config")
-
+
var numChunks = 0
-
+
for( var shard in stats.shards ){
-
+
var shardDoc = config.shards.findOne({ _id : shard })
-
- print( "\nShard " + shard + " at " + shardDoc.host )
-
+
+ print( "\nShard " + shard + " at " + shardDoc.host )
+
var shardStats = stats.shards[ shard ]
-
+
var chunks = config.chunks.find({ _id : sh._collRE( this ), shard : shard }).toArray()
-
+
numChunks += chunks.length
-
+
var estChunkData = shardStats.size / chunks.length
var estChunkCount = Math.floor( shardStats.count / chunks.length )
-
+
print( " data : " + sh._dataFormat( shardStats.size ) +
" docs : " + shardStats.count +
" chunks : " + chunks.length )
print( " estimated data per chunk : " + sh._dataFormat( estChunkData ) )
print( " estimated docs per chunk : " + estChunkCount )
-
+
}
-
+
print( "\nTotals" )
print( " data : " + sh._dataFormat( stats.size ) +
" docs : " + stats.count +
" chunks : " + numChunks )
for( var shard in stats.shards ){
-
+
var shardStats = stats.shards[ shard ]
-
+
var estDataPercent = Math.floor( shardStats.size / stats.size * 10000 ) / 100
var estDocPercent = Math.floor( shardStats.count / stats.count * 10000 ) / 100
-
+
print( " Shard " + shard + " contains " + estDataPercent + "% data, " + estDocPercent + "% docs in cluster, " +
"avg obj size on shard : " + sh._dataFormat( stats.shards[ shard ].avgObjSize ) )
}
-
+
print( "\n" )
-
+
}
DBCollection.prototype.getSplitKeysForChunks = function( chunkSize ){
-
+
var stats = this.stats()
-
+
if( ! stats.sharded ){
print( "Collection " + this + " is not sharded." )
return
}
-
+
var config = this.getMongo().getDB("config")
-
+
if( ! chunkSize ){
chunkSize = config.settings.findOne({ _id : "chunksize" }).value
print( "Chunk size not set, using default of " + chunkSize + "MB" )
@@ -1395,25 +1414,25 @@ DBCollection.prototype.getSplitKeysForChunks = function( chunkSize ){
else{
print( "Using chunk size of " + chunkSize + "MB" )
}
-
+
var shardDocs = config.shards.find().toArray()
-
+
var allSplitPoints = {}
- var numSplits = 0
-
+ var numSplits = 0
+
for( var i = 0; i < shardDocs.length; i++ ){
-
+
var shardDoc = shardDocs[i]
var shard = shardDoc._id
var host = shardDoc.host
var sconn = new Mongo( host )
-
+
var chunks = config.chunks.find({ _id : sh._collRE( this ), shard : shard }).toArray()
-
+
print( "\nGetting split points for chunks on shard " + shard + " at " + host )
-
+
var splitPoints = []
-
+
for( var j = 0; j < chunks.length; j++ ){
var chunk = chunks[j]
var result = sconn.getDB("admin").runCommand({ splitVector : this + "", min : chunk.min, max : chunk.max, maxChunkSize : chunkSize })
@@ -1423,39 +1442,39 @@ DBCollection.prototype.getSplitKeysForChunks = function( chunkSize ){
}
else{
splitPoints = splitPoints.concat( result.splitKeys )
-
+
if( result.splitKeys.length > 0 )
print( " Added " + result.splitKeys.length + " split points for chunk " + sh._pchunk( chunk ) )
}
}
-
+
print( "Total splits for shard " + shard + " : " + splitPoints.length )
-
+
numSplits += splitPoints.length
allSplitPoints[ shard ] = splitPoints
-
+
}
-
+
// Get most recent migration
var migration = config.changelog.find({ what : /^move.*/ }).sort({ time : -1 }).limit( 1 ).toArray()
- if( migration.length == 0 )
+ if( migration.length == 0 )
print( "\nNo migrations found in changelog." )
else {
migration = migration[0]
print( "\nMost recent migration activity was on " + migration.ns + " at " + migration.time )
}
-
- var admin = this.getMongo().getDB("admin")
+
+ var admin = this.getMongo().getDB("admin")
var coll = this
var splitFunction = function(){
-
+
// Turn off the balancer, just to be safe
print( "Turning off balancer..." )
config.settings.update({ _id : "balancer" }, { $set : { stopped : true } }, true )
print( "Sleeping for 30s to allow balancers to detect change. To be extra safe, check config.changelog" +
" for recent migrations." )
sleep( 30000 )
-
+
for( shard in allSplitPoints ){
for( var i = 0; i < allSplitPoints[ shard ].length; i++ ){
var splitKey = allSplitPoints[ shard ][i]
@@ -1463,21 +1482,21 @@ DBCollection.prototype.getSplitKeysForChunks = function( chunkSize ){
printjson( admin.runCommand({ split : coll + "", middle : splitKey }) )
}
}
-
+
print( "Turning the balancer back on." )
config.settings.update({ _id : "balancer" }, { $set : { stopped : false } } )
sleep( 1 )
}
-
+
splitFunction.getSplitPoints = function(){ return allSplitPoints; }
-
+
print( "\nGenerated " + numSplits + " split keys, run output function to perform splits.\n" +
- " ex : \n" +
+ " ex : \n" +
" > var splitter = <collection>.getSplitKeysForChunks()\n" +
" > splitter() // Execute splits on cluster !\n" )
-
+
return splitFunction
-
+
}
DBCollection.prototype.setSlaveOk = function( value ) {
@@ -1530,6 +1549,117 @@ DBCollection.prototype.unsetWriteConcern = function() {
delete this._writeConcern;
};
+//
+// CRUD specification read methods
+//
+
+/**
+* Count number of matching documents in the db to a query.
+*
+* @method
+* @param {object} query The query for the count.
+* @param {object} [options=null] Optional settings.
+* @param {number} [options.limit=null] The limit of documents to count.
+* @param {number} [options.skip=null] The number of documents to skip for the count.
+* @param {string|object} [options.hint=null] An index name hint or specification for the query.
+* @param {number} [options.maxTimeMS=null] The maximum amount of time to allow the query to run.
+* @return {number}
+*/
+DBCollection.prototype.count = function(query, options) {
+ var opts = Object.extend({}, options || {});
+
+ // Set parameters
+ var skip = (typeof opts.skip == 'number') ? opts.skip : null;
+ var limit = (typeof opts.limit == 'number') ? opts.limit : null;
+ var maxTimeMS = (typeof opts.maxTimeMS == 'number') ? opts.maxTimeMS : null;
+ var hint = opts.hint;
+
+ // Execute using command if we have passed in skip/limit or hint
+ if (skip != null || limit != null || hint != null || maxTimeMS != null) {
+ // Final query
+ var cmd = {
+ 'count': this.getName(),
+ 'query': query
+ };
+
+ // Add limit and skip if defined
+ if (typeof skip == 'number') {
+ cmd.skip = skip;
+ }
+
+ if (typeof limit == 'number') {
+ cmd.limit = limit;
+ }
+
+ if (hint) {
+ opts.hint = hint;
+ }
+
+ if (opts.maxTimeMS) {
+ cmd.maxTimeMS = opts.maxTimeMS;
+ }
+
+ // Run the command and return the result
+ var response = this.runReadCommand(cmd);
+ if (response.ok == 0) {
+ throw new Error("count failed: " + tojson(response));
+ }
+
+ return response.n;
+ }
+
+ // Return the result of the find
+ return this.find(query).count();
+}
+
+/**
+* The distinct command returns returns a list of distinct values for the given key across a collection.
+*
+* @method
+* @param {string} key Field of the document to find distinct values for.
+* @param {object} query The query for filtering the set of documents to which we apply the distinct filter.
+* @param {object} [options=null] Optional settings.
+* @param {number} [options.maxTimeMS=null] The maximum amount of time to allow the query to run.
+* @return {object}
+*/
+DBCollection.prototype.distinct = function(keyString, query, options){
+ var opts = Object.extend({}, options || {});
+ var keyStringType = typeof keyString;
+ var queryType = typeof query;
+
+ if (keyStringType != "string") {
+ throw new Error("The first argument to the distinct command must be a string but was a " + keyStringType);
+ }
+
+ if (query != null && queryType != "object") {
+ throw new Error("The query argument to the distinct command must be a document but was a " + queryType);
+ }
+
+ // Distinct command
+ var cmd = {
+ distinct : this.getName(),
+ key : keyString,
+ query : query || {}
+ };
+
+ // Set maxTimeMS if provided
+ if (opts.maxTimeMS) {
+ cmd.maxTimeMS = opts.maxTimeMS;
+ }
+
+ // Execute distinct command
+ var res = this.runReadCommand(cmd);
+ if (!res.ok) {
+ throw new Error("distinct failed: " + tojson(res));
+ }
+
+ return res.values;
+}
+
+DBCollection.prototype._distinct = function( keyString , query ){
+ return this._dbReadCommand( { distinct : this._shortName , key : keyString , query : query || {} } );
+}
+
/**
* PlanCache
* Holds a reference to the collection.
diff --git a/src/mongo/shell/crud_api.js b/src/mongo/shell/crud_api.js
new file mode 100644
index 00000000000..2e3e7b84be0
--- /dev/null
+++ b/src/mongo/shell/crud_api.js
@@ -0,0 +1,758 @@
+DBCollection.prototype._createWriteConcern = function(options) {
+ // If writeConcern set, use it, else get from collection (which will inherit from db/mongo)
+ var writeConcern = options.writeConcern || this.getWriteConcern();
+ var writeConcernOptions = ['w', 'wtimeout', 'j', 'fsync'];
+
+ if (writeConcern instanceof WriteConcern) {
+ writeConcern = writeConcern.toJSON();
+ }
+
+ // Only merge in write concern options if at least one is specified in options
+ if (options.w != null
+ || options.wtimeout != null
+ || options.j != null
+ || options.fsync != null) {
+ writeConcern = {};
+
+ writeConcernOptions.forEach(function(wc) {
+ if (options[wc] != null) {
+ writeConcern[wc] = options[wc];
+ }
+ });
+ }
+
+ return writeConcern;
+}
+
+/**
+ * @return {Object} a new document with an _id: ObjectId if _id is not present.
+ * Otherwise, returns the same object passed.
+ */
+DBCollection.prototype.addIdIfNeeded = function(obj) {
+ if ( typeof( obj._id ) == "undefined" && ! Array.isArray( obj ) ){
+ var tmp = obj; // don't want to modify input
+ obj = {_id: new ObjectId()};
+
+ for (var key in tmp){
+ obj[key] = tmp[key];
+ }
+ }
+
+ return obj;
+}
+
+/**
+* Perform a bulkWrite operation without a fluent API
+*
+* Legal operation types are
+*
+* { insertOne: { document: { a: 1 } } }
+*
+* { updateOne: { filter: {a:2}, update: {$set: {a:2}}, upsert:true } }
+*
+* { updateMany: { filter: {a:2}, update: {$set: {a:2}}, upsert:true } }
+*
+* { deleteOne: { filter: {c:1} } }
+*
+* { deleteMany: { filter: {c:1} } }
+*
+* { replaceOne: { filter: {c:3}, replacement: {c:4}, upsert:true}}
+*
+* @method
+* @param {object[]} operations Bulk operations to perform.
+* @param {object} [options=null] Optional settings.
+* @param {(number|string)} [options.w=null] The write concern.
+* @param {number} [options.wtimeout=null] The write concern timeout.
+* @param {boolean} [options.j=false] Specify a journal write concern.
+* @return {object}
+*/
+DBCollection.prototype.bulkWrite = function(operations, options) {
+ var opts = Object.extend({}, options || {});
+ opts.ordered = (typeof opts.ordered == 'boolean') ? opts.ordered : true;
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Result
+ var result = {acknowledged: (writeConcern && writeConcern.w == 0) ? false: true};
+
+ // Use bulk operation API already in the shell
+ var bulkOp = opts.ordered
+ ? this.initializeOrderedBulkOp()
+ : this.initializeUnorderedBulkOp();
+
+ // Contains all inserted _ids
+ var insertedIds = {};
+
+ // For each of the operations we need to add the op to the bulk
+ operations.forEach(function(op, index) {
+ if(op.insertOne) {
+ if(!op.insertOne.document) {
+ throw new Error('insertOne bulkWrite operation expects the document field');
+ }
+
+ // Add _id ObjectId if needed
+ op.insertOne.document = this.addIdIfNeeded(op.insertOne.document);
+ // InsertedIds is a map of [originalInsertOrderIndex] = document._id
+ insertedIds[index] = op.insertOne.document._id;
+ // Translate operation to bulk operation
+ bulkOp.insert(op.insertOne.document);
+ } else if(op.updateOne) {
+ if(!op.updateOne.filter) {
+ throw new Error('updateOne bulkWrite operation expects the filter field');
+ }
+
+ if(!op.updateOne.update) {
+ throw new Error('updateOne bulkWrite operation expects the update field');
+ }
+
+ // Translate operation to bulk operation
+ var operation = bulkOp.find(op.updateOne.filter);
+ if(op.updateOne.upsert) {
+ operation = operation.upsert();
+ }
+
+ operation.updateOne(op.updateOne.update)
+ } else if(op.updateMany) {
+ if(!op.updateMany.filter) {
+ throw new Error('updateMany bulkWrite operation expects the filter field');
+ }
+
+ if(!op.updateMany.update) {
+ throw new Error('updateMany bulkWrite operation expects the update field');
+ }
+
+ // Translate operation to bulk operation
+ var operation = bulkOp.find(op.updateMany.filter);
+ if(op.updateMany.upsert) {
+ operation = operation.upsert();
+ }
+
+ operation.update(op.updateMany.update)
+ } else if(op.replaceOne) {
+ if(!op.replaceOne.filter) {
+ throw new Error('replaceOne bulkWrite operation expects the filter field');
+ }
+
+ if(!op.replaceOne.replacement) {
+ throw new Error('replaceOne bulkWrite operation expects the replacement field');
+ }
+
+ // Translate operation to bulkOp operation
+ var operation = bulkOp.find(op.replaceOne.filter);
+ if(op.replaceOne.upsert) {
+ operation = operation.upsert();
+ }
+
+ operation.replaceOne(op.replaceOne.replacement)
+ } else if(op.deleteOne) {
+ if(!op.deleteOne.filter) {
+ throw new Error('deleteOne bulkWrite operation expects the filter field');
+ }
+
+ // Translate operation to bulkOp operation
+ bulkOp.find(op.deleteOne.filter).removeOne();
+ } else if(op.deleteMany) {
+ if(!op.deleteMany.filter) {
+ throw new Error('deleteMany bulkWrite operation expects the filter field');
+ }
+
+ // Translate operation to bulkOp operation
+ bulkOp.find(op.deleteMany.filter).remove();
+ }
+ }, this);
+
+ // Execute bulkOp operation
+ var response = bulkOp.execute(writeConcern);
+ if(!result.acknowledged) {
+ return result;
+ }
+
+ result.deletedCount = response.nRemoved;
+ result.insertedCount = response.nInserted;
+ result.matchedCount = response.nMatched;
+ result.upsertedCount = response.nUpserted;
+ result.insertedIds = insertedIds;
+ result.upsertedIds = {};
+
+ // Iterate over all the upserts
+ var upserts = response.getUpsertedIds();
+ upserts.forEach(function(x) {
+ result.upsertedIds[x.index] = x._id;
+ });
+
+ // Return the result
+ return result;
+}
+
+/**
+* Inserts a single document into MongoDB.
+*
+* @method
+* @param {object} doc Document to insert.
+* @param {object} [options=null] Optional settings.
+* @param {(number|string)} [options.w=null] The write concern.
+* @param {number} [options.wtimeout=null] The write concern timeout.
+* @param {boolean} [options.j=false] Specify a journal write concern.
+* @return {object}
+*/
+DBCollection.prototype.insertOne = function(document, options) {
+ var opts = Object.extend({}, options || {});
+
+ // Add _id ObjectId if needed
+ document = this.addIdIfNeeded(document);
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Result
+ var result = {acknowledged: (writeConcern && writeConcern.w == 0) ? false: true};
+
+ // Use bulk operation API already in the shell
+ var bulk = this.initializeOrderedBulkOp();
+ bulk.insert(document);
+
+ try {
+ // Execute insert
+ bulk.execute(writeConcern);
+ } catch (err) {
+ if(err.hasWriteErrors()) {
+ throw err.getWriteErrorAt(0);
+ }
+
+ if(err.hasWriteConcernError()) {
+ throw err.getWriteConcernError();
+ }
+
+ throw err;
+ }
+
+ if (!result.acknowledged) {
+ return result;
+ }
+
+ // Set the inserted id
+ result.insertedId = document._id;
+
+ // Return the result
+ return result;
+}
+
+/**
+* Inserts an array of documents into MongoDB.
+*
+* @method
+* @param {object[]} docs Documents to insert.
+* @param {object} [options=null] Optional settings.
+* @param {(number|string)} [options.w=null] The write concern.
+* @param {number} [options.wtimeout=null] The write concern timeout.
+* @param {boolean} [options.j=false] Specify a journal write concern.
+* @param {boolean} [options.ordered=true] Execute inserts in ordered or unordered fashion.
+* @return {object}
+*/
+DBCollection.prototype.insertMany = function(documents, options) {
+ var opts = Object.extend({}, options || {});
+ opts.ordered = (typeof opts.ordered == 'boolean') ? opts.ordered : true;
+
+ // Ensure all documents have an _id
+ documents = documents.map(function(x) {
+ return this.addIdIfNeeded(x);
+ }, this);
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Result
+ var result = {acknowledged: (writeConcern && writeConcern.w == 0) ? false: true};
+
+ // Use bulk operation API already in the shell
+ var bulk = opts.ordered
+ ? this.initializeOrderedBulkOp()
+ : this.initializeUnorderedBulkOp();
+
+ // Add all operations to the bulk operation
+ documents.forEach(function(doc) {
+ bulk.insert(doc);
+ });
+
+ // Execute bulk write operation
+ bulk.execute(writeConcern);
+
+ if (!result.acknowledged) {
+ return result;
+ }
+
+ // Set all the created inserts
+ result.insertedIds = documents.map(function(x) {
+ return x._id;
+ });
+
+ // Return the result
+ return result;
+}
+
+/**
+* Delete a document on MongoDB
+*
+* @method
+* @param {object} filter The filter used to select the document to remove
+* @param {object} [options=null] Optional settings.
+* @param {(number|string)} [options.w=null] The write concern.
+* @param {number} [options.wtimeout=null] The write concern timeout.
+* @param {boolean} [options.j=false] Specify a journal write concern.
+* @return {object}
+*/
+DBCollection.prototype.deleteOne = function(filter, options) {
+ var opts = Object.extend({}, options || {});
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Result
+ var result = {acknowledged: (writeConcern && writeConcern.w == 0) ? false: true};
+
+ // Use bulk operation API already in the shell
+ var bulk = this.initializeOrderedBulkOp();
+
+ // Add the deleteOne operation
+ bulk.find(filter).removeOne();
+
+ try {
+ // Remove the first document that matches the selector
+ var r = bulk.execute(writeConcern);
+ } catch (err) {
+ if(err.hasWriteErrors()) {
+ throw err.getWriteErrorAt(0);
+ }
+
+ if(err.hasWriteConcernError()) {
+ throw err.getWriteConcernError();
+ }
+
+ throw err;
+ }
+
+ if (!result.acknowledged) {
+ return result;
+ }
+
+ result.deletedCount = r.nRemoved;
+ return result;
+}
+
+/**
+* Delete multiple documents on MongoDB
+*
+* @method
+* @param {object} filter The Filter used to select the documents to remove
+* @param {object} [options=null] Optional settings.
+* @param {(number|string)} [options.w=null] The write concern.
+* @param {number} [options.wtimeout=null] The write concern timeout.
+* @param {boolean} [options.j=false] Specify a journal write concern.
+* @return {object}
+*/
+DBCollection.prototype.deleteMany = function(filter, options) {
+ var opts = Object.extend({}, options || {});
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Result
+ var result = {acknowledged: (writeConcern && writeConcern.w == 0) ? false: true};
+
+ // Use bulk operation API already in the shell
+ var bulk = this.initializeOrderedBulkOp();
+
+ // Add the deleteOne operation
+ bulk.find(filter).remove();
+
+ try {
+ // Remove all documents that matche the selector
+ var r = bulk.execute(writeConcern);
+ } catch (err) {
+ if(err.hasWriteErrors()) {
+ throw err.getWriteErrorAt(0);
+ }
+
+ if(err.hasWriteConcernError()) {
+ throw err.getWriteConcernError();
+ }
+
+ throw err;
+ }
+
+ if (!result.acknowledged) {
+ return result;
+ }
+
+ result.deletedCount = r.nRemoved;
+ return result;
+}
+
+/**
+* Replace a document on MongoDB
+*
+* @method
+* @param {object} filter The Filter used to select the document to update
+* @param {object} doc The Document that replaces the matching document
+* @param {object} [options=null] Optional settings.
+* @param {boolean} [options.upsert=false] Update operation is an upsert.
+* @param {(number|string)} [options.w=null] The write concern.
+* @param {number} [options.wtimeout=null] The write concern timeout.
+* @param {boolean} [options.j=false] Specify a journal write concern.
+* @return {object}
+*/
+DBCollection.prototype.replaceOne = function(filter, replacement, options) {
+ var opts = Object.extend({}, options || {});
+
+ // Check if first key in update statement contains a $
+ var keys = Object.keys(replacement);
+ // Check if first key does not have the $
+ if(keys.length > 0 && keys[0][0] == "$") {
+ throw new Error('the replace operation document must not contain atomic operators');
+ }
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Result
+ var result = {acknowledged: (writeConcern && writeConcern.w == 0) ? false: true };
+
+ // Use bulk operation API already in the shell
+ var bulk = this.initializeOrderedBulkOp();
+
+ // Add the deleteOne operation
+ var op = bulk.find(filter);
+ if (opts.upsert) {
+ op = op.upsert();
+ }
+
+ op.replaceOne(replacement);
+
+ try {
+ // Replace the document
+ var r = bulk.execute(writeConcern);
+ } catch (err) {
+ if(err.hasWriteErrors()) {
+ throw err.getWriteErrorAt(0);
+ }
+
+ if(err.hasWriteConcernError()) {
+ throw err.getWriteConcernError();
+ }
+
+ throw err;
+ }
+
+ if (!result.acknowledged) {
+ return result;
+ }
+
+ result.matchedCount = r.nMatched;
+ result.modifiedCount = (r.nModified != null) ? r.nModified : r.n;
+
+ if (r.getUpsertedIds().length > 0) {
+ result.upsertedId = r.getUpsertedIdAt(0)._id;
+ }
+
+ return result;
+}
+
+/**
+* Update a single document on MongoDB
+*
+* @method
+* @param {object} filter The Filter used to select the document to update
+* @param {object} update The update operations to be applied to the document
+* @param {object} [options=null] Optional settings.
+* @param {boolean} [options.upsert=false] Update operation is an upsert.
+* @param {(number|string)} [options.w=null] The write concern.
+* @param {number} [options.wtimeout=null] The write concern timeout.
+* @param {boolean} [options.j=false] Specify a journal write concern.
+* @return {object}
+*/
+DBCollection.prototype.updateOne = function(filter, update, options) {
+ var opts = Object.extend({}, options || {});
+
+ // Check if first key in update statement contains a $
+ var keys = Object.keys(update);
+ if(keys.length == 0) {
+ throw new Error("the update operation document must contain at least one atomic operator");
+ }
+
+ // Check if first key does not have the $
+ if(keys[0][0] != "$") {
+ throw new Error('the update operation document must contain atomic operators');
+ }
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Result
+ var result = {acknowledged: (writeConcern && writeConcern.w == 0) ? false: true};
+
+ // Use bulk operation API already in the shell
+ var bulk = this.initializeOrderedBulkOp();
+
+ // Add the updateOne operation
+ var op = bulk.find(filter);
+ if (opts.upsert) {
+ op = op.upsert();
+ }
+
+ op.updateOne(update);
+
+ try {
+ // Update the first document that matches the selector
+ var r = bulk.execute(writeConcern);
+ } catch (err) {
+ if(err.hasWriteErrors()) {
+ throw err.getWriteErrorAt(0);
+ }
+
+ if(err.hasWriteConcernError()) {
+ throw err.getWriteConcernError();
+ }
+
+ throw err;
+ }
+
+ if (!result.acknowledged) {
+ return result;
+ }
+
+ result.matchedCount = r.nMatched;
+ result.modifiedCount = (r.nModified != null) ? r.nModified : r.n;
+
+ if (r.getUpsertedIds().length > 0) {
+ result.upsertedId = r.getUpsertedIdAt(0)._id
+ }
+
+ return result;
+}
+
+/**
+* Update multiple documents on MongoDB
+*
+* @method
+* @param {object} filter The Filter used to select the document to update
+* @param {object} update The update operations to be applied to the document
+* @param {object} [options=null] Optional settings.
+* @param {boolean} [options.upsert=false] Update operation is an upsert.
+* @param {(number|string)} [options.w=null] The write concern.
+* @param {number} [options.wtimeout=null] The write concern timeout.
+* @param {boolean} [options.j=false] Specify a journal write concern.
+* @return {object}
+*/
+DBCollection.prototype.updateMany = function(filter, update, options) {
+ var opts = Object.extend({}, options || {});
+
+ // Check if first key in update statement contains a $
+ var keys = Object.keys(update);
+ if(keys.length == 0) {
+ throw new Error("the update operation document must contain at least one atomic operator");
+ }
+
+ // Check if first key does not have the $
+ if(keys[0][0] != "$") {
+ throw new Error('the update operation document must contain atomic operators');
+ }
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Result
+ var result = {acknowledged: (writeConcern && writeConcern.w == 0) ? false: true};
+
+ // Use bulk operation API already in the shell
+ var bulk = this.initializeOrderedBulkOp();
+
+ // Add the updateMany operation
+ var op = bulk.find(filter);
+ if (opts.upsert) {
+ op = op.upsert();
+ }
+
+ op.update(update);
+
+ try {
+ // Update all documents that match the selector
+ var r = bulk.execute(writeConcern);
+ } catch (err) {
+ if(err.hasWriteErrors()) {
+ throw err.getWriteErrorAt(0);
+ }
+
+ if(err.hasWriteConcernError()) {
+ throw err.getWriteConcernError();
+ }
+
+ throw err;
+ }
+
+ if (!result.acknowledged) {
+ return result;
+ }
+
+ result.matchedCount = r.nMatched;
+ result.modifiedCount = (r.nModified != null) ? r.nModified : r.n;
+
+ if (r.getUpsertedIds().length > 0) {
+ result.upsertedId = r.getUpsertedIdAt(0)._id
+ }
+
+ return result;
+}
+
+/**
+* Find a document and delete it in one atomic operation,
+* requires a write lock for the duration of the operation.
+*
+* @method
+* @param {object} filter Document selection filter.
+* @param {object} [options=null] Optional settings.
+* @param {object} [options.projection=null] Limits the fields to return for all matching documents.
+* @param {object} [options.sort=null] Determines which document the operation modifies if the query selects multiple documents.
+* @param {number} [options.maxTimeMS=null] The maximum amount of time to allow the query to run.
+* @return {object}
+*/
+DBCollection.prototype.findOneAndDelete = function(filter, options) {
+ var opts = Object.extend({}, options || {});
+ // Set up the command
+ var cmd = {query: filter, remove: true};
+
+ if (opts.sort) {
+ cmd.sort = opts.sort;
+ }
+
+ if (opts.projection) {
+ cmd.fields = opts.projection;
+ }
+
+ if (opts.maxTimeMS) {
+ cmd.maxTimeMS = opts.maxTimeMS;
+ }
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Setup the write concern
+ if (writeConcern) {
+ cmd.writeConcern = writeConcern;
+ }
+
+ // Execute findAndModify
+ return this.findAndModify(cmd);
+}
+
+/**
+* Find a document and replace it in one atomic operation, requires a write lock for the duration of the operation.
+*
+* @method
+* @param {object} filter Document selection filter.
+* @param {object} replacement Document replacing the matching document.
+* @param {object} [options=null] Optional settings.
+* @param {object} [options.projection=null] Limits the fields to return for all matching documents.
+* @param {object} [options.sort=null] Determines which document the operation modifies if the query selects multiple documents.
+* @param {number} [options.maxTimeMS=null] The maximum amount of time to allow the query to run.
+* @param {boolean} [options.upsert=false] Upsert the document if it does not exist.
+* @param {boolean} [options.returnNewDocument=false] When true, returns the updated document rather than the original. The default is false.
+* @return {object}
+*/
+DBCollection.prototype.findOneAndReplace = function(filter, replacement, options) {
+ var opts = Object.extend({}, options || {});
+
+ // Check if first key in update statement contains a $
+ var keys = Object.keys(replacement);
+ // Check if first key does not have the $
+ if(keys.length > 0 && keys[0][0] == "$") {
+ throw new Error("the replace operation document must not contain atomic operators");
+ }
+
+ // Set up the command
+ var cmd = {query: filter, update: replacement};
+ if (opts.sort) {
+ cmd.sort = opts.sort;
+ }
+
+ if (opts.projection) {
+ cmd.fields = opts.projection;
+ }
+
+ if (opts.maxTimeMS) {
+ cmd.maxTimeMS = opts.maxTimeMS;
+ }
+
+ // Set flags
+ cmd.upsert = (typeof opts.upsert == 'boolean') ? opts.upsert : false;
+ cmd.new = (typeof opts.returnNewDocument == 'boolean') ? opts.returnNewDocument : false;
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Setup the write concern
+ if (writeConcern) {
+ cmd.writeConcern = writeConcern;
+ }
+
+ // Execute findAndModify
+ return this.findAndModify(cmd);
+}
+
+/**
+* Find a document and update it in one atomic operation, requires a write lock for the duration of the operation.
+*
+* @method
+* @param {object} filter Document selection filter.
+* @param {object} update Update operations to be performed on the document
+* @param {object} [options=null] Optional settings.
+* @param {object} [options.projection=null] Limits the fields to return for all matching documents.
+* @param {object} [options.sort=null] Determines which document the operation modifies if the query selects multiple documents.
+* @param {number} [options.maxTimeMS=null] The maximum amount of time to allow the query to run.
+* @param {boolean} [options.upsert=false] Upsert the document if it does not exist.
+* @param {boolean} [options.returnNewDocument=false] When true, returns the updated document rather than the original. The default is false.
+* @return {object}
+*/
+DBCollection.prototype.findOneAndUpdate = function(filter, update, options) {
+ var opts = Object.extend({}, options || {});
+
+ // Check if first key in update statement contains a $
+ var keys = Object.keys(update);
+ if(keys.length == 0) {
+ throw new Error("the update operation document must contain at least one atomic operator");
+ }
+
+ // Check if first key does not have the $
+ if(keys[0][0] != "$") {
+ throw new Error("the update operation document must contain atomic operators");
+ }
+
+ // Set up the command
+ var cmd = {query: filter, update: update};
+ if (opts.sort) {
+ cmd.sort = opts.sort;
+ }
+
+ if (opts.projection) {
+ cmd.fields = opts.projection;
+ }
+
+ if (opts.maxTimeMS) {
+ cmd.maxTimeMS = opts.maxTimeMS;
+ }
+
+ // Set flags
+ cmd.upsert = (typeof opts.upsert == 'boolean') ? opts.upsert : false;
+ cmd.new = (typeof opts.returnNewDocument == 'boolean') ? opts.returnNewDocument : false;
+
+ // Get the write concern
+ var writeConcern = this._createWriteConcern(opts);
+
+ // Setup the write concern
+ if (writeConcern) {
+ cmd.writeConcern = writeConcern;
+ }
+
+ // Execute findAndModify
+ return this.findAndModify(cmd);
+}
diff --git a/src/mongo/shell/query.js b/src/mongo/shell/query.js
index f23ceac8539..a28806351c2 100644
--- a/src/mongo/shell/query.js
+++ b/src/mongo/shell/query.js
@@ -2,12 +2,12 @@
if ( typeof DBQuery == "undefined" ){
DBQuery = function( mongo , db , collection , ns , query , fields , limit , skip , batchSize , options ){
-
+
this._mongo = mongo; // 0
this._db = db; // 1
this._collection = collection; // 2
this._ns = ns; // 3
-
+
this._query = query || {}; // 4
this._fields = fields; // 5
this._limit = limit || 0; // 6
@@ -44,7 +44,7 @@ DBQuery.prototype.help = function () {
print("\t.returnKey()")
print("\t.maxScan(n)")
print("\t.readPref(mode, tagset)")
-
+
print("\nCursor methods");
print("\t.toArray() - iterates through docs and returns an array of the results")
print("\t.forEach( func )")
@@ -59,8 +59,8 @@ DBQuery.prototype.help = function () {
}
DBQuery.prototype.clone = function(){
- var q = new DBQuery( this._mongo , this._db , this._collection , this._ns ,
- this._query , this._fields ,
+ var q = new DBQuery( this._mongo , this._db , this._collection , this._ns ,
+ this._query , this._fields ,
this._limit , this._skip , this._batchSize , this._options );
q._special = this._special;
return q;
@@ -69,7 +69,7 @@ DBQuery.prototype.clone = function(){
DBQuery.prototype._ensureSpecial = function(){
if ( this._special )
return;
-
+
var n = { query : this._query };
this._query = n;
this._special = true;
@@ -259,13 +259,13 @@ DBQuery.prototype.hasNext = function(){
DBQuery.prototype.next = function(){
this._exec();
-
+
var o = this._cursor.hasNext();
if ( o )
this._cursorSeen++;
else
throw Error( "error hasNext: " + o );
-
+
var ret = this._cursor.next();
if ( ret.$err ) {
throw _getErrorWithCode(ret, "error: " + tojson( ret ));
@@ -294,7 +294,7 @@ DBQuery.prototype.readOnly = function(){
DBQuery.prototype.toArray = function(){
if ( this._arr )
return this._arr;
-
+
var a = [];
while ( this.hasNext() )
a.push( this.next() );
@@ -351,7 +351,7 @@ DBQuery.prototype.countReturn = function(){
if ( this._limit > 0 && this._limit < c )
return this._limit;
-
+
return c;
}
@@ -419,11 +419,11 @@ DBQuery.prototype.maxTimeMS = function( maxTimeMS ) {
/**
* Sets the read preference for this cursor.
- *
+ *
* @param mode {string} read preference mode to use.
* @param tagSet {Array.<Object>} optional. The list of tags to use, order matters.
* Note that this object only keeps a shallow copy of this array.
- *
+ *
* @return this cursor
*/
DBQuery.prototype.readPref = function( mode, tagSet ) {
@@ -504,7 +504,7 @@ DBQuery.prototype.shellPrint = function(){
catch ( e ){
print( e );
}
-
+
}
/**
@@ -518,6 +518,108 @@ DBQuery.prototype.toString = function(){
return "DBQuery: " + this._ns + " -> " + tojson( this._query );
}
+//
+// CRUD specification find cursor extension
+//
+
+/**
+* Get partial results from a mongos if some shards are down (instead of throwing an error).
+*
+* @method
+* @see http://docs.mongodb.org/meta-driver/latest/legacy/mongodb-wire-protocol/#op-query
+* @return {DBQuery}
+*/
+DBQuery.prototype.allowPartialResults = function() {
+ this._checkModify();
+ this.addOption(DBQuery.Option.partial);
+ return this;
+}
+
+/**
+* The server normally times out idle cursors after an inactivity period (10 minutes)
+* to prevent excess memory use. Set this option to prevent that.
+*
+* @method
+* @see http://docs.mongodb.org/meta-driver/latest/legacy/mongodb-wire-protocol/#op-query
+* @return {DBQuery}
+*/
+DBQuery.prototype.noCursorTimeout = function() {
+ this._checkModify();
+ this.addOption(DBQuery.Option.noTimeout);
+ return this;
+}
+
+/**
+* Internal replication use only - driver should not set
+*
+* @method
+* @see http://docs.mongodb.org/meta-driver/latest/legacy/mongodb-wire-protocol/#op-query
+* @return {DBQuery}
+*/
+DBQuery.prototype.oplogReplay = function() {
+ this._checkModify();
+ this.addOption(DBQuery.Option.oplogReplay);
+ return this;
+}
+
+/**
+* Limits the fields to return for all matching documents.
+*
+* @method
+* @see http://docs.mongodb.org/manual/tutorial/project-fields-from-query-results/
+* @param {object} document Document specifying the projection of the resulting documents.
+* @return {DBQuery}
+*/
+DBQuery.prototype.projection = function(document) {
+ this._checkModify();
+ this._fields = document;
+ return this;
+}
+
+/**
+* Specify cursor as a tailable cursor, allowing to specify if it will use awaitData
+*
+* @method
+* @see http://docs.mongodb.org/manual/tutorial/create-tailable-cursor/
+* @param {boolean} [awaitData=true] cursor blocks for a few seconds to wait for data if no documents found.
+* @return {DBQuery}
+*/
+DBQuery.prototype.tailable = function(awaitData) {
+ this._checkModify();
+ this.addOption(DBQuery.Option.tailable);
+
+ // Set await data if either specifically set or not specified
+ if (awaitData || awaitData == null) {
+ this.addOption(DBQuery.Option.awaitData);
+ }
+
+ return this;
+}
+
+/**
+* Specify a document containing modifiers for the query.
+*
+* @method
+* @see http://docs.mongodb.org/manual/reference/operator/query-modifier/
+* @param {object} document A document containing modifers to apply to the cursor.
+* @return {DBQuery}
+*/
+DBQuery.prototype.modifiers = function(document) {
+ this._checkModify();
+
+ for(var name in document) {
+ if(name[0] != '$') {
+ throw new Error('All modifiers must start with a $ such as $maxScan or $returnKey');
+ }
+ }
+
+ for(var name in document) {
+ this._addSpecial(name, document[name]);
+ }
+
+ return this;
+}
+
DBQuery.shellBatchSize = 20;
/**