summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEliot Horowitz <eliot@10gen.com>2009-07-24 14:28:16 -0400
committerEliot Horowitz <eliot@10gen.com>2009-07-24 14:28:16 -0400
commita62f1b40ba338ff2f0e3e3b751f1d1599bd27bcc (patch)
tree68ecaa3c6d1bd90e0e817452d9b3f7ff0dd0b24f
parent6e3a37e594f6ba07d305c3b71d891c3d11497469 (diff)
parentde9d456c3577f6be96ccb458953a0c086343eaa2 (diff)
downloadmongo-a62f1b40ba338ff2f0e3e3b751f1d1599bd27bcc.tar.gz
Merge branch 'master' of git@github.com:mongodb/mongo
-rw-r--r--SConstruct26
-rw-r--r--db/db.cpp6
-rw-r--r--db/dbwebserver.cpp2
-rw-r--r--db/query.cpp318
-rw-r--r--dbtests/perf/perftest.cpp1
-rw-r--r--dbtests/repltests.cpp102
-rw-r--r--dbtests/updatetests.cpp16
-rw-r--r--jstests/basic3.js8
-rw-r--r--jstests/basic9.js25
-rw-r--r--jstests/basica.js33
-rw-r--r--jstests/index_check3.js19
-rw-r--r--jstests/pull.js19
-rw-r--r--jstests/pullall.js18
-rw-r--r--jstests/pushall.js20
-rw-r--r--jstests/regex.js16
-rw-r--r--jstests/update5.js40
-rw-r--r--jstests/update6.js46
-rw-r--r--scripting/engine_spidermonkey.cpp44
-rw-r--r--scripting/engine_spidermonkey.h13
-rw-r--r--scripting/sm_db.cpp14
-rw-r--r--shell/collection.js59
-rw-r--r--shell/db.js2
-rw-r--r--shell/dbshell.cpp4
-rw-r--r--stdafx.h1
-rw-r--r--util/top.h2
25 files changed, 672 insertions, 182 deletions
diff --git a/SConstruct b/SConstruct
index 5e7676089a2..9ffab71cc56 100644
--- a/SConstruct
+++ b/SConstruct
@@ -152,6 +152,10 @@ AddOption( "--boost-compiler",
# --- environment setup ---
+def removeIfInList( lst , thing ):
+ if thing in lst:
+ lst.remove( thing )
+
def printLocalInfo():
import sys, SCons
print( "scons version: " + SCons.__version__ )
@@ -170,7 +174,7 @@ darwin = False
windows = False
freebsd = False
solaris = False
-force64 = not GetOption( "force64" ) is None
+force64 = not GetOption( "force64" ) is None
if not force64 and os.getcwd().endswith( "mongo-64" ):
force64 = True
print( "*** assuming you want a 64-bit build b/c of directory *** " )
@@ -218,7 +222,7 @@ if GetOption( "extrapath" ) is not None:
env.Append( CPPPATH=[ x + "/include" ] )
env.Append( LIBPATH=[ x + "/lib" ] )
release = True
-
+
# ------ SOURCE FILE SETUP -----------
commonFiles = Split( "stdafx.cpp buildinfo.cpp db/jsobj.cpp db/json.cpp db/commands.cpp db/lasterror.cpp db/nonce.cpp db/queryutil.cpp shell/mongo.cpp" )
@@ -342,7 +346,7 @@ elif "linux2" == os.sys.platform:
nixLibPrefix = "lib64"
env.Append( LIBPATH=["/usr/lib64" , "/lib64" ] )
env.Append( LIBS=["pthread"] )
-
+
if force64:
print( "error: force64 doesn't make sense on a 64-bit machine" )
Exit(1)
@@ -620,7 +624,8 @@ def doConfigure( myenv , needJava=True , needPcre=True , shell=False ):
myCheckLib( "pcrecpp" , True )
myCheckLib( "pcre" , True )
- myenv["_HAVEPCAP"] = myCheckLib( "pcap", staticOnly=release )
+ myenv["_HAVEPCAP"] = myCheckLib( "pcap" )
+ removeIfInList( myenv["LIBS"] , "pcap" )
# this is outside of usesm block so don't have to rebuild for java
if windows:
@@ -730,10 +735,6 @@ env.Append( BUILDERS={'JSHeader' : jshBuilder})
# --- targets ----
-def removeIfInList( lst , thing ):
- if thing in lst:
- lst.remove( thing )
-
clientEnv = env.Clone();
clientEnv.Append( CPPPATH=["../"] )
clientEnv.Prepend( LIBS=[ "mongoclient"] )
@@ -788,7 +789,9 @@ perftest = testEnv.Program( "perftest", "dbtests/perf/perftest.cpp" )
clientTests += [ clientEnv.Program( "clientTest" , [ "client/examples/clientTest.cpp" ] ) ]
# --- sniffer ---
+mongosniff_built = False
if darwin or clientEnv["_HAVEPCAP"]:
+ mongosniff_built = True
sniffEnv = clientEnv.Clone()
sniffEnv.Append( LIBS=[ "pcap" ] )
sniffEnv.Program( "mongosniff" , "tools/sniffer.cpp" )
@@ -1153,13 +1156,13 @@ def installBinary( e , name ):
name += ".exe"
inst = e.Install( installDir + "/bin" , name )
-
+
fullInstallName = installDir + "/bin/" + name
allBinaries += [ name ]
if solaris or linux:
e.AddPostAction( inst, e.Action( 'strip ' + fullInstallName ) )
-
+
if linux and len( COMMAND_LINE_TARGETS ) == 1 and str( COMMAND_LINE_TARGETS[0] ) == "s3dist":
e.AddPostAction( inst , checkGlibc )
@@ -1171,6 +1174,9 @@ installBinary( env , "mongoimportjson" )
installBinary( env , "mongofiles" )
+if mongosniff_built:
+ installBinary(env, "mongosniff")
+
installBinary( env , "mongod" )
installBinary( env , "mongos" )
diff --git a/db/db.cpp b/db/db.cpp
index d711f2d583c..021b66a9ed0 100644
--- a/db/db.cpp
+++ b/db/db.cpp
@@ -472,7 +472,7 @@ int main(int argc, char* argv[], char *envp[] )
#endif
( "mms-token" , po::value<string>() , "account token for mongo monitoring server" )
( "mms-name" , po::value<string>() , "server name mongo monitoring server" )
- ( "mms-interval" , po::value<int>()->default_value(30) , "ping interval for mongo monitoring server (defaut 30)" )
+ ( "mms-interval" , po::value<int>()->default_value(30) , "ping interval for mongo monitoring server (default 30)" )
;
replication_options.add_options()
@@ -566,7 +566,7 @@ int main(int argc, char* argv[], char *envp[] )
cout << visible_options << endl;
return 0;
}
-
+
if (params.count("help")) {
show_help_text(visible_options);
return 0;
@@ -732,7 +732,7 @@ int main(int argc, char* argv[], char *envp[] )
cout << visible_options << endl;
return 0;
}
-
+
#if defined(_WIN32)
if ( installService ) {
if ( !ServiceController::installService( L"MongoDB", L"Mongo DB", L"Mongo DB Server", argc, argv ) )
diff --git a/db/dbwebserver.cpp b/db/dbwebserver.cpp
index a29f97def4c..c0cc0d6599c 100644
--- a/db/dbwebserver.cpp
+++ b/db/dbwebserver.cpp
@@ -26,6 +26,8 @@
#include "security.h"
#include <pcrecpp.h>
+#include <boost/date_time/posix_time/posix_time.hpp>
+
namespace mongo {
diff --git a/db/query.cpp b/db/query.cpp
index a31f54b370a..c040bfdde61 100644
--- a/db/query.cpp
+++ b/db/query.cpp
@@ -1,20 +1,20 @@
// query.cpp
/**
-* Copyright (C) 2008 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
+ * Copyright (C) 2008 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
#include "stdafx.h"
#include "query.h"
@@ -42,7 +42,7 @@ namespace mongo {
*/
const int MaxBytesToReturnToClientAtOnce = 4 * 1024 * 1024;
-//ns->query->DiskLoc
+ //ns->query->DiskLoc
LRUishMap<BSONObj,DiskLoc,5> lrutest(123);
extern bool useCursors;
@@ -52,10 +52,10 @@ namespace mongo {
class DeleteOp : public QueryOp {
public:
DeleteOp( bool justOne, int& bestCount ) :
- justOne_( justOne ),
- count_(),
- bestCount_( bestCount ),
- nScanned_() {
+ justOne_( justOne ),
+ count_(),
+ bestCount_( bestCount ),
+ nScanned_() {
}
virtual void init() {
c_ = qp().newCursor();
@@ -229,7 +229,7 @@ namespace mongo {
};
struct Mod {
- enum Op { INC, SET, PUSH } op;
+ enum Op { INC, SET, PUSH, PUSH_ALL, PULL, PULL_ALL } op;
const char *fieldName;
double *ndouble;
int *nint;
@@ -267,8 +267,8 @@ namespace mongo {
}
bool mayAddEmbedded( map< string, BSONElement > &existing, string right ) {
for( string left = EmbeddedBuilder::splitDot( right );
- left.length() > 0 && left[ left.length() - 1 ] != '.';
- left += "." + EmbeddedBuilder::splitDot( right ) ) {
+ left.length() > 0 && left[ left.length() - 1 ] != '.';
+ left += "." + EmbeddedBuilder::splitDot( right ) ) {
if ( existing.count( left ) > 0 && existing[ left ].type() != Object )
return false;
if ( modForField( left.c_str() ) )
@@ -277,11 +277,11 @@ namespace mongo {
return true;
}
static Mod::Op opFromStr( const char *fn ) {
- const char *valid[] = { "$inc", "$set", "$push" };
- for( int i = 0; i < 3; ++i )
+ const char *valid[] = { "$inc", "$set", "$push", "$pushAll", "$pull", "$pullAll" };
+ for( int i = 0; i < 6; ++i )
if ( strcmp( fn, valid[ i ] ) == 0 )
return Mod::Op( i );
- uassert( "Invalid modifier specified", false );
+ uassert( "Invalid modifier specified " + string( fn ), false );
return Mod::INC;
}
public:
@@ -308,7 +308,7 @@ namespace mongo {
bool haveModForField( const char *fieldName ) const {
// Presumably the number of mods is small, so this loop isn't too expensive.
for( vector<Mod>::const_iterator i = mods_.begin(); i != mods_.end(); ++i ) {
- if ( strcmp( fieldName, i->fieldName ) == 0 )
+ if ( strlen( fieldName ) == strlen( i->fieldName ) && strcmp( fieldName, i->fieldName ) == 0 )
return true;
}
return false;
@@ -317,8 +317,8 @@ namespace mongo {
// Presumably the number of mods is small, so this loop isn't too expensive.
for( vector<Mod>::const_iterator i = mods_.begin(); i != mods_.end(); ++i ) {
const char *dot = strchr( i->fieldName, '.' );
- int len = dot ? dot - i->fieldName : strlen( i->fieldName );
- if ( strncmp( fieldName, i->fieldName, len ) == 0 )
+ size_t len = dot ? dot - i->fieldName : strlen( i->fieldName );
+ if ( len == strlen( fieldName ) && strncmp( fieldName, i->fieldName, len ) == 0 )
return true;
}
return false;
@@ -333,13 +333,13 @@ namespace mongo {
}
bool havePush() const {
for ( vector<Mod>::const_iterator i = mods_.begin(); i != mods_.end(); i++ )
- if ( i->op == Mod::PUSH )
+ if ( i->op == Mod::PUSH || i->op == Mod::PUSH_ALL )
return true;
return false;
}
void appendSizeSpecForPushes( BSONObjBuilder &b ) const {
for ( vector<Mod>::const_iterator i = mods_.begin(); i != mods_.end(); i++ ) {
- if ( i->op == Mod::PUSH ) {
+ if ( i->op == Mod::PUSH || i->op == Mod::PUSH_ALL ) {
if ( i->pushStartSize == -1 )
b.appendNull( i->fieldName );
else
@@ -360,20 +360,47 @@ namespace mongo {
inPlacePossible = false;
} else {
switch( m.op ) {
- case Mod::INC:
- uassert( "Cannot apply $inc modifier to non-number", e.isNumber() || e.eoo() );
- if ( !e.isNumber() )
- inPlacePossible = false;
- break;
- case Mod::SET:
- if ( !( e.isNumber() && m.elt.isNumber() ) &&
- m.elt.valuesize() != e.valuesize() )
- inPlacePossible = false;
- break;
- case Mod::PUSH:
- uassert( "Cannot apply $push modifier to non-array", e.type() == Array || e.eoo() );
+ case Mod::INC:
+ uassert( "Cannot apply $inc modifier to non-number", e.isNumber() || e.eoo() );
+ if ( !e.isNumber() )
inPlacePossible = false;
- break;
+ break;
+ case Mod::SET:
+ if ( !( e.isNumber() && m.elt.isNumber() ) &&
+ m.elt.valuesize() != e.valuesize() )
+ inPlacePossible = false;
+ break;
+ case Mod::PUSH:
+ case Mod::PUSH_ALL:
+ uassert( "Cannot apply $push/$pushAll modifier to non-array", e.type() == Array || e.eoo() );
+ inPlacePossible = false;
+ break;
+ case Mod::PULL:
+ case Mod::PULL_ALL: {
+ uassert( "Cannot apply $pull/$pullAll modifier to non-array", e.type() == Array || e.eoo() );
+ BSONObjIterator i( e.embeddedObject() );
+ while( inPlacePossible && i.moreWithEOO() ) {
+ BSONElement arrI = i.next();
+ if ( arrI.eoo() )
+ break;
+ if ( m.op == Mod::PULL ) {
+ if ( arrI.woCompare( m.elt, false ) == 0 ) {
+ inPlacePossible = false;
+ }
+ } else if ( m.op == Mod::PULL_ALL ) {
+ BSONObjIterator j( m.elt.embeddedObject() );
+ while( inPlacePossible && j.moreWithEOO() ) {
+ BSONElement arrJ = j.next();
+ if ( arrJ.eoo() )
+ break;
+ if ( arrI.woCompare( arrJ, false ) == 0 ) {
+ inPlacePossible = false;
+ }
+ }
+ }
+ }
+ break;
+ }
}
}
}
@@ -383,6 +410,8 @@ namespace mongo {
for ( vector<Mod>::const_iterator i = mods_.begin(); i != mods_.end(); ++i ) {
const Mod& m = *i;
BSONElement e = obj.getFieldDotted(m.fieldName);
+ if ( m.op == Mod::PULL || m.op == Mod::PULL_ALL )
+ continue;
if ( m.op == Mod::INC ) {
m.setn( e.number() + m.getn() );
BSONElementManipulator( e ).setNumber( m.getn() );
@@ -439,7 +468,7 @@ namespace mongo {
if ( cmp <= 0 )
uassert( "Modifier spec implies existence of an encapsulating object with a name that already represents a non-object,"
" or is referenced in another $set clause",
- mayAddEmbedded( existing, m->fieldName ) );
+ mayAddEmbedded( existing, m->fieldName ) );
if ( cmp == 0 ) {
BSONElement e = p->second;
if ( m->op == Mod::INC ) {
@@ -447,37 +476,84 @@ namespace mongo {
b2.appendAs( m->elt, m->fieldName );
} else if ( m->op == Mod::SET ) {
b2.appendAs( m->elt, m->fieldName );
- } else if ( m->op == Mod::PUSH ) {
+ } else if ( m->op == Mod::PUSH || m->op == Mod::PUSH_ALL ) {
BSONObjBuilder arr( b2.subarrayStartAs( m->fieldName ) );
BSONObjIterator i( e.embeddedObject() );
- int count = 0;
+ int startCount = 0;
while( i.moreWithEOO() ) {
BSONElement arrI = i.next();
if ( arrI.eoo() )
break;
arr.append( arrI );
- ++count;
+ ++startCount;
+ }
+ if ( m->op == Mod::PUSH ) {
+ stringstream ss;
+ ss << startCount;
+ string nextIndex = ss.str();
+ arr.appendAs( m->elt, nextIndex.c_str() );
+ } else {
+ BSONObjIterator i( m->elt.embeddedObject() );
+ int count = startCount;
+ while( i.moreWithEOO() ) {
+ BSONElement arrI = i.next();
+ if ( arrI.eoo() )
+ break;
+ stringstream ss;
+ ss << count++;
+ string nextIndex = ss.str();
+ arr.appendAs( arrI, nextIndex.c_str() );
+ }
+ }
+ arr.done();
+ m->pushStartSize = startCount;
+ } else if ( m->op == Mod::PULL || m->op == Mod::PULL_ALL ) {
+ BSONObjBuilder arr( b2.subarrayStartAs( m->fieldName ) );
+ BSONObjIterator i( e.embeddedObject() );
+ int count = 0;
+ while( i.moreWithEOO() ) {
+ BSONElement arrI = i.next();
+ if ( arrI.eoo() )
+ break;
+ bool allowed = true;
+ if ( m->op == Mod::PULL ) {
+ allowed = ( arrI.woCompare( m->elt, false ) != 0 );
+ } else {
+ BSONObjIterator j( m->elt.embeddedObject() );
+ while( allowed && j.moreWithEOO() ) {
+ BSONElement arrJ = j.next();
+ if ( arrJ.eoo() )
+ break;
+ allowed = ( arrI.woCompare( arrJ, false ) != 0 );
+ }
+ }
+ if ( allowed ) {
+ stringstream ss;
+ ss << count++;
+ string index = ss.str();
+ arr.appendAs( arrI, index.c_str() );
+ }
}
- stringstream ss;
- ss << count;
- string nextIndex = ss.str();
- arr.appendAs( m->elt, nextIndex.c_str() );
arr.done();
- m->pushStartSize = count;
}
++m;
++p;
} else if ( cmp < 0 ) {
+ // $ modifier applied to missing field -- create field from scratch
if ( m->op == Mod::PUSH ) {
BSONObjBuilder arr( b2.subarrayStartAs( m->fieldName ) );
arr.appendAs( m->elt, "0" );
arr.done();
m->pushStartSize = -1;
- } else {
+ } else if ( m->op == Mod::PUSH_ALL ) {
+ b2.appendAs( m->elt, m->fieldName );
+ m->pushStartSize = -1;
+ } else if ( m->op != Mod::PULL && m->op != Mod::PULL_ALL ) {
b2.appendAs( m->elt, m->fieldName );
}
++m;
} else if ( cmp > 0 ) {
+ // No $ modifier
if ( mayAddEmbedded( existing, p->first ) )
b2.appendAs( p->second, p->first );
++p;
@@ -491,6 +567,9 @@ namespace mongo {
{ $inc: { a:1, b:1 } }
{ $set: { a:77 } }
{ $push: { a:55 } }
+ { $pushAll: { a:[77,88] } }
+ { $pull: { a:66 } }
+ { $pullAll : { a:[99,1010] } }
NOTE: MODIFIES source from object!
*/
void ModSet::getMods(const BSONObj &from) {
@@ -500,7 +579,7 @@ namespace mongo {
if ( e.eoo() )
break;
const char *fn = e.fieldName();
- uassert( "Invalid modifier specified", e.type() == Object );
+ uassert( "Invalid modifier specified" + string( fn ), e.type() == Object );
BSONObj j = e.embeddedObject();
BSONObjIterator jt(j);
Mod::Op op = opFromStr( fn );
@@ -517,9 +596,10 @@ namespace mongo {
uassert( "Invalid mod field name, may not end in a period", m.fieldName[ strlen( m.fieldName ) - 1 ] != '.' );
for ( vector<Mod>::iterator i = mods_.begin(); i != mods_.end(); i++ ) {
uassert( "Field name duplication not allowed with modifiers",
- strcmp( m.fieldName, i->fieldName ) != 0 );
+ strcmp( m.fieldName, i->fieldName ) != 0 );
}
uassert( "Modifier $inc allowed for numbers only", f.isNumber() || op != Mod::INC );
+ uassert( "Modifier $pushAll/pullAll allowed for arrays only", f.type() == Array || ( op != Mod::PUSH_ALL && op != Mod::PULL_ALL ) );
m.elt = f;
if ( f.type() == NumberDouble ) {
m.ndouble = (double *) f.value();
@@ -614,15 +694,15 @@ namespace mongo {
}
/* note: we only update one row and quit. if you do multiple later,
- be careful or multikeys in arrays could break things badly. best
- to only allow updating a single row with a multikey lookup.
- */
+ be careful or multikeys in arrays could break things badly. best
+ to only allow updating a single row with a multikey lookup.
+ */
if ( profile )
ss << " nscanned:" << u->nscanned();
/* look for $inc etc. note as listed here, all fields to inc must be this type, you can't set some
- regular ones at the moment. */
+ regular ones at the moment. */
const char *firstField = updateobj.firstElement().fieldName();
if ( firstField[0] == '$' ) {
ModSet mods;
@@ -694,9 +774,9 @@ namespace mongo {
}
/* todo:
- _ smart requery find record immediately
- (clean return codes up later...)
- */
+ _ smart requery find record immediately
+ (clean return codes up later...)
+ */
int _updateObjects(const char *ns, BSONObj updateobj, BSONObj pattern, bool upsert, stringstream& ss, bool logop=false) {
return __updateObjects( ns, updateobj, pattern, upsert, ss, logop );
}
@@ -767,7 +847,7 @@ namespace mongo {
}
-//int dump = 0;
+ //int dump = 0;
/* empty result for error conditions */
QueryResult* emptyMoreResult(long long cursorid) {
@@ -828,23 +908,23 @@ namespace mongo {
else {
BSONObj js = c->current();
/* if ( cc->ids_.get() ) {
- BSONElement idRef = js.getField( "_id" );
- if ( !idRef.eoo() ) {
- BSONObjBuilder idBuilder;
- idBuilder.append( idRef );
- BSONObj id = idBuilder.obj();
- if ( cc->ids_->get( id ) ) {
- c->advance();
- continue;
- }
- cc->ids_->put( id );
- }
- }*/
+ BSONElement idRef = js.getField( "_id" );
+ if ( !idRef.eoo() ) {
+ BSONObjBuilder idBuilder;
+ idBuilder.append( idRef );
+ BSONObj id = idBuilder.obj();
+ if ( cc->ids_->get( id ) ) {
+ c->advance();
+ continue;
+ }
+ cc->ids_->put( id );
+ }
+ }*/
bool ok = fillQueryResultFromObj(b, cc->filter.get(), js);
if ( ok ) {
n++;
if ( (ntoreturn>0 && (n >= ntoreturn || b.len() > MaxBytesToReturnToClientAtOnce)) ||
- (ntoreturn==0 && b.len()>1*1024*1024) ) {
+ (ntoreturn==0 && b.len()>1*1024*1024) ) {
c->advance();
cc->pos += n;
//cc->updateLocation();
@@ -948,7 +1028,7 @@ namespace mongo {
};
/* { count: "collectionname"[, query: <query>] }
- returns -1 on ns does not exist error.
+ returns -1 on ns does not exist error.
*/
long long runCount( const char *ns, const BSONObj &cmd, string &err ) {
NamespaceDetails *d = nsdetails( ns );
@@ -977,22 +1057,22 @@ namespace mongo {
class DoQueryOp : public QueryOp {
public:
DoQueryOp( int ntoskip, int ntoreturn, const BSONObj &order, bool wantMore,
- bool explain, FieldMatcher *filter, int queryOptions ) :
- b_( 32768 ),
- ntoskip_( ntoskip ),
- ntoreturn_( ntoreturn ),
- order_( order ),
- wantMore_( wantMore ),
- explain_( explain ),
- filter_( filter ),
- ordering_(),
- nscanned_(),
- queryOptions_( queryOptions ),
- n_(),
- soSize_(),
- saveClientCursor_(),
- findingStart_( (queryOptions & Option_OplogReplay) != 0 ),
- findingStartCursor_()
+ bool explain, FieldMatcher *filter, int queryOptions ) :
+ b_( 32768 ),
+ ntoskip_( ntoskip ),
+ ntoreturn_( ntoreturn ),
+ order_( order ),
+ wantMore_( wantMore ),
+ explain_( explain ),
+ filter_( filter ),
+ ordering_(),
+ nscanned_(),
+ queryOptions_( queryOptions ),
+ n_(),
+ soSize_(),
+ saveClientCursor_(),
+ findingStart_( (queryOptions & Option_OplogReplay) != 0 ),
+ findingStartCursor_()
{}
virtual void init() {
@@ -1049,9 +1129,9 @@ namespace mongo {
}
bool mayCreateCursor1 = wantMore_ && ntoreturn_ != 1 && useCursors;
-/* if ( !ids_.get() && !c_->capped() && ( mayCreateCursor1 || mayCreateCursor2() ) ) {
- ids_.reset( new IdSet() );
- }*/
+ /* if ( !ids_.get() && !c_->capped() && ( mayCreateCursor1 || mayCreateCursor2() ) ) {
+ ids_.reset( new IdSet() );
+ }*/
if( 0 ) {
BSONObj js = c_->current();
@@ -1070,14 +1150,14 @@ namespace mongo {
// got a match.
assert( js.objsize() >= 0 ); //defensive for segfaults
/*if ( ids_.get() ) {
- BSONElement idRef = js.getField( "_id" );
- if ( !idRef.eoo() ) {
- BSONObjBuilder b;
- b.append( idRef );
- BSONObj id = b.obj();
- ids_->put( id );
- }
- }*/
+ BSONElement idRef = js.getField( "_id" );
+ if ( !idRef.eoo() ) {
+ BSONObjBuilder b;
+ b.append( idRef );
+ BSONObj id = b.obj();
+ ids_->put( id );
+ }
+ }*/
if ( ordering_ ) {
// note: no cursors for non-indexed, ordered results. results must be fairly small.
so_->add(js);
@@ -1098,14 +1178,14 @@ namespace mongo {
if ( ok ) n_++;
if ( ok ) {
if ( (ntoreturn_>0 && (n_ >= ntoreturn_ || b_.len() > MaxBytesToReturnToClientAtOnce)) ||
- (ntoreturn_==0 && (b_.len()>1*1024*1024 || n_>=101)) ) {
+ (ntoreturn_==0 && (b_.len()>1*1024*1024 || n_>=101)) ) {
/* if ntoreturn is zero, we return up to 101 objects. on the subsequent getmore, there
- is only a size limit. The idea is that on a find() where one doesn't use much results,
- we don't return much, but once getmore kicks in, we start pushing significant quantities.
+ is only a size limit. The idea is that on a find() where one doesn't use much results,
+ we don't return much, but once getmore kicks in, we start pushing significant quantities.
- The n limit (vs. size) is important when someone fetches only one small field from big
- objects, which causes massive scanning server-side.
- */
+ The n limit (vs. size) is important when someone fetches only one small field from big
+ objects, which causes massive scanning server-side.
+ */
/* if only 1 requested, no cursor saved for efficiency...we assume it is findOne() */
if ( mayCreateCursor1 ) {
c_->advance();
@@ -1147,7 +1227,7 @@ namespace mongo {
bool scanAndOrderRequired() const { return ordering_; }
auto_ptr< Cursor > cursor() { return c_; }
auto_ptr< KeyValJSMatcher > matcher() { return matcher_; }
-// auto_ptr< IdSet > ids() { return ids_; }
+ // auto_ptr< IdSet > ids() { return ids_; }
int n() const { return n_; }
long long nscanned() const { return nscanned_; }
bool saveClientCursor() const { return saveClientCursor_; }
@@ -1171,7 +1251,7 @@ namespace mongo {
auto_ptr< ScanAndOrder > so_;
bool findingStart_;
ClientCursor * findingStartCursor_;
-// auto_ptr< IdSet > ids_; /* for dedupping traversal of multikey indexes */
+ // auto_ptr< IdSet > ids_; /* for dedupping traversal of multikey indexes */
};
auto_ptr< QueryResult > runQuery(Message& m, stringstream& ss ) {
@@ -1192,9 +1272,9 @@ namespace mongo {
int ntoreturn = _ntoreturn;
if ( _ntoreturn < 0 ) {
/* _ntoreturn greater than zero is simply a hint on how many objects to send back per
- "cursor batch".
- A negative number indicates a hard limit.
- */
+ "cursor batch".
+ A negative number indicates a hard limit.
+ */
ntoreturn = -_ntoreturn;
wantMore = false;
}
@@ -1278,9 +1358,9 @@ namespace mongo {
}
/* The ElemIter will not be happy if this isn't really an object. So throw exception
- here when that is true.
- (Which may indicate bad data from client.)
- */
+ here when that is true.
+ (Which may indicate bad data from client.)
+ */
if ( query.objsize() == 0 ) {
out() << "Bad query object?\n jsobj:";
out() << jsobj.toString() << "\n query:";
@@ -1311,7 +1391,7 @@ namespace mongo {
cursorid = cc->cursorid;
DEV out() << " query has more, cursorid: " << cursorid << endl;
cc->matcher = dqo.matcher();
-// cc->ids_ = dqo.ids();
+ // cc->ids_ = dqo.ids();
cc->ns = ns;
cc->pos = n;
cc->filter = filter;
diff --git a/dbtests/perf/perftest.cpp b/dbtests/perf/perftest.cpp
index 21cf7d043e5..aa3d047a831 100644
--- a/dbtests/perf/perftest.cpp
+++ b/dbtests/perf/perftest.cpp
@@ -27,6 +27,7 @@
#include <unittest/Registry.hpp>
#include <unittest/UnitTest.hpp>
+#include <boost/date_time/posix_time/posix_time.hpp>
namespace mongo {
extern const char* dbpath;
diff --git a/dbtests/repltests.cpp b/dbtests/repltests.cpp
index 5881629c298..4629a7d7ac3 100644
--- a/dbtests/repltests.cpp
+++ b/dbtests/repltests.cpp
@@ -671,7 +671,103 @@ namespace ReplTests {
insert( fromjson( "{'_id':0}" ) );
}
};
+
+ class PushAll : public Base {
+ public:
+ void doIt() const {
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$pushAll:{a:[5.0,6.0]}}" ) );
+ }
+ using ReplTests::Base::check;
+ void check() const {
+ ASSERT_EQUALS( 1, count() );
+ check( fromjson( "{'_id':0,a:[4,5,6]}" ), one( fromjson( "{'_id':0}" ) ) );
+ }
+ void reset() const {
+ deleteAll( ns() );
+ insert( fromjson( "{'_id':0,a:[4]}" ) );
+ }
+ };
+ class PushAllUpsert : public Base {
+ public:
+ void doIt() const {
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$pushAll:{a:[5.0,6.0]}}" ), true );
+ }
+ using ReplTests::Base::check;
+ void check() const {
+ ASSERT_EQUALS( 1, count() );
+ check( fromjson( "{'_id':0,a:[4,5,6]}" ), one( fromjson( "{'_id':0}" ) ) );
+ }
+ void reset() const {
+ deleteAll( ns() );
+ insert( fromjson( "{'_id':0,a:[4]}" ) );
+ }
+ };
+
+ class EmptyPushAll : public Base {
+ public:
+ void doIt() const {
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$pushAll:{a:[5.0,6.0]}}" ) );
+ }
+ using ReplTests::Base::check;
+ void check() const {
+ ASSERT_EQUALS( 1, count() );
+ check( fromjson( "{'_id':0,a:[5,6]}" ), one( fromjson( "{'_id':0}" ) ) );
+ }
+ void reset() const {
+ deleteAll( ns() );
+ insert( fromjson( "{'_id':0}" ) );
+ }
+ };
+
+ class Pull : public Base {
+ public:
+ void doIt() const {
+ client()->update( ns(), BSON( "_id" << 0 ), BSON( "$pull" << BSON( "a" << 4.0 ) ) );
+ }
+ using ReplTests::Base::check;
+ void check() const {
+ ASSERT_EQUALS( 1, count() );
+ check( fromjson( "{'_id':0,a:[5]}" ), one( fromjson( "{'_id':0}" ) ) );
+ }
+ void reset() const {
+ deleteAll( ns() );
+ insert( fromjson( "{'_id':0,a:[4,5]}" ) );
+ }
+ };
+
+ class PullNothing : public Base {
+ public:
+ void doIt() const {
+ client()->update( ns(), BSON( "_id" << 0 ), BSON( "$pull" << BSON( "a" << 6.0 ) ) );
+ }
+ using ReplTests::Base::check;
+ void check() const {
+ ASSERT_EQUALS( 1, count() );
+ check( fromjson( "{'_id':0,a:[4,5]}" ), one( fromjson( "{'_id':0}" ) ) );
+ }
+ void reset() const {
+ deleteAll( ns() );
+ insert( fromjson( "{'_id':0,a:[4,5]}" ) );
+ }
+ };
+
+ class PullAll : public Base {
+ public:
+ void doIt() const {
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$pullAll:{a:[4,5]}}" ) );
+ }
+ using ReplTests::Base::check;
+ void check() const {
+ ASSERT_EQUALS( 1, count() );
+ check( fromjson( "{'_id':0,a:[6]}" ), one( fromjson( "{'_id':0}" ) ) );
+ }
+ void reset() const {
+ deleteAll( ns() );
+ insert( fromjson( "{'_id':0,a:[4,5,6]}" ) );
+ }
+ };
+
} // namespace Idempotence
class DeleteOpIsIdBased : public Base {
@@ -837,6 +933,12 @@ namespace ReplTests {
add< Idempotence::PushUpsert >();
add< Idempotence::MultiPush >();
add< Idempotence::EmptyPush >();
+ add< Idempotence::PushAll >();
+ add< Idempotence::PushAllUpsert >();
+ add< Idempotence::EmptyPushAll >();
+ add< Idempotence::Pull >();
+ add< Idempotence::PullNothing >();
+ add< Idempotence::PullAll >();
add< DeleteOpIsIdBased >();
add< DbIdsTest >();
add< MemIdsTest >();
diff --git a/dbtests/updatetests.cpp b/dbtests/updatetests.cpp
index 10beba793c0..325a031a454 100644
--- a/dbtests/updatetests.cpp
+++ b/dbtests/updatetests.cpp
@@ -106,6 +106,20 @@ namespace UpdateTests {
}
};
+ class PushAllNonArray : public Fail {
+ void doIt() {
+ insert( ns(), fromjson( "{a:[1]}" ) );
+ update( ns(), BSONObj(), fromjson( "{$pushAll:{a:'d'}}" ) );
+ }
+ };
+
+ class PullAllNonArray : public Fail {
+ void doIt() {
+ insert( ns(), fromjson( "{a:[1]}" ) );
+ update( ns(), BSONObj(), fromjson( "{$pullAll:{a:'d'}}" ) );
+ }
+ };
+
class IncTargetNonNumber : public Fail {
void doIt() {
insert( ns(), BSON( "a" << "a" ) );
@@ -474,6 +488,8 @@ namespace UpdateTests {
add< ModNotFirst >();
add< ModDuplicateFieldSpec >();
add< IncNonNumber >();
+ add< PushAllNonArray >();
+ add< PullAllNonArray >();
add< IncTargetNonNumber >();
add< SetNum >();
add< SetString >();
diff --git a/jstests/basic3.js b/jstests/basic3.js
index 5be2b71ad95..b1ebafd7bb5 100644
--- a/jstests/basic3.js
+++ b/jstests/basic3.js
@@ -14,3 +14,11 @@ catch ( e ){
}
assert( ok , ". in names aren't allowed doesn't work" );
+try{
+ t.save( { "x" : { "a.b" : 5 } } );
+ ok = false;
+}
+catch ( e ){
+ ok = true;
+}
+assert( ok , ". in embedded names aren't allowed doesn't work" );
diff --git a/jstests/basic9.js b/jstests/basic9.js
new file mode 100644
index 00000000000..59204188036
--- /dev/null
+++ b/jstests/basic9.js
@@ -0,0 +1,25 @@
+
+t = db.getCollection( "foo" );
+
+t.save( { "foo$bar" : 5 } );
+
+ok = false;
+
+try{
+ t.save( { "$foo" : 5 } );
+ ok = false;
+}
+catch ( e ){
+ ok = true;
+}
+assert( ok , "key names aren't allowed to start with $ doesn't work" );
+
+try{
+ t.save( { "x" : { "$foo" : 5 } } );
+ ok = false;
+}
+catch ( e ){
+ ok = true;
+}
+assert( ok , "embedded key names aren't allowed to start with $ doesn't work" );
+
diff --git a/jstests/basica.js b/jstests/basica.js
new file mode 100644
index 00000000000..0cc364beb42
--- /dev/null
+++ b/jstests/basica.js
@@ -0,0 +1,33 @@
+
+t = db.basica;
+
+
+t.drop();
+
+t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } );
+
+x = t.findOne();
+x.b["0"].x = 4;
+x.b["0"].z = 4;
+x.b[0].m = 9;
+x.b[0]["asd"] = 11;
+x.a = 2;
+x.z = 11;
+
+tojson( x );
+t.save( x );
+assert.eq( tojson( x ) , tojson( t.findOne() ) , "FIRST" );
+
+// -----
+
+t.drop();
+
+t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } );
+
+x = t.findOne();
+x.b["0"].z = 4;
+
+//printjson( x );
+t.save( x );
+assert.eq( tojson( x ) , tojson( t.findOne() ) , "SECOND" );
+
diff --git a/jstests/index_check3.js b/jstests/index_check3.js
index 62352e5d61c..1ecfe52654e 100644
--- a/jstests/index_check3.js
+++ b/jstests/index_check3.js
@@ -30,7 +30,22 @@ for ( var i=0; i<100; i++ ){
t.ensureIndex( { foo : 1 } );
printjson( t.find( { foo : { $lt : 50 } } ).explain() );
-assert.gt( 30 , t.find( { foo : { $lt : 50 } } ).explain().nscanned , "lt" )
+assert.gt( 30 , t.find( { foo : { $lt : 50 } } ).explain().nscanned , "lt" );
printjson( t.find( { foo : { $gt : 50 } } ).explain() );
-assert.gt( 30 , t.find( { foo : { $gt : 50 } } ).explain().nscanned , "gt" )
+assert.gt( 30 , t.find( { foo : { $gt : 50 } } ).explain().nscanned , "gt" );
+
+
+t.drop();
+t.save( {i:'a'} );
+for( var i=0; i < 10; ++i ) {
+ t.save( {} );
+}
+
+t.ensureIndex( { i : 1 } );
+
+printjson( t.find( { i : { $lte : 'a' } } ).explain() );
+assert.gt( 3 , t.find( { i : { $lte : 'a' } } ).explain().nscanned , "lte" );
+printjson( t.find( { i : { $gte : 'a' } } ).explain() );
+// bug SERVER-99
+// assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" );
diff --git a/jstests/pull.js b/jstests/pull.js
new file mode 100644
index 00000000000..cf8147a456c
--- /dev/null
+++ b/jstests/pull.js
@@ -0,0 +1,19 @@
+t = db.jstests_pull;
+t.drop();
+
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pull: { a: 2 } } );
+t.update( {}, { $pull: { a: 6 } } );
+assert.eq( [ 1, 3 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pull: { a: 2 } } );
+t.update( {}, { $pull: { a: 2 } } );
+assert.eq( [ 1, 3 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 2 ] } );
+t.update( {}, { $pull: { a: 2 } } );
+t.update( {}, { $pull: { a: 6 } } );
+assert.eq( [], t.findOne().a );
diff --git a/jstests/pullall.js b/jstests/pullall.js
new file mode 100644
index 00000000000..b720ce58204
--- /dev/null
+++ b/jstests/pullall.js
@@ -0,0 +1,18 @@
+t = db.jstests_pushall;
+t.drop();
+
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pullAll: { a: [ 3 ] } } );
+assert.eq( [ 1, 2 ], t.findOne().a );
+t.update( {}, { $pullAll: { a: [ 3 ] } } );
+assert.eq( [ 1, 2 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pullAll: { a: [ 2, 3 ] } } );
+assert.eq( [ 1 ], t.findOne().a );
+t.update( {}, { $pullAll: { a: [] } } );
+assert.eq( [ 1 ], t.findOne().a );
+t.update( {}, { $pullAll: { a: [ 1, 5 ] } } );
+assert.eq( [], t.findOne().a );
+
diff --git a/jstests/pushall.js b/jstests/pushall.js
new file mode 100644
index 00000000000..eda68203ed3
--- /dev/null
+++ b/jstests/pushall.js
@@ -0,0 +1,20 @@
+t = db.jstests_pushall;
+t.drop();
+
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pushAll: { a: [ 4 ] } } );
+assert.eq( [ 1, 2, 3, 4 ], t.findOne().a );
+t.update( {}, { $pushAll: { a: [ 4 ] } } );
+assert.eq( [ 1, 2, 3, 4, 4 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pushAll: { a: [ 4, 5 ] } } );
+assert.eq( [ 1, 2, 3, 4, 5 ], t.findOne().a );
+t.update( {}, { $pushAll: { a: [] } } );
+assert.eq( [ 1, 2, 3, 4, 5 ], t.findOne().a );
+
+t.drop();
+t.save( {} );
+t.update( {}, { $pushAll: { a: [ 1, 2 ] } } );
+assert.eq( [ 1, 2 ], t.findOne().a );
diff --git a/jstests/regex.js b/jstests/regex.js
index 4512da07c70..f431d506ea6 100644
--- a/jstests/regex.js
+++ b/jstests/regex.js
@@ -2,23 +2,23 @@ t = db.jstests_regex;
t.drop();
t.save( { a: "bcd" } );
-assert.eq( 1, t.count( { a: /b/ } ) );
-assert.eq( 1, t.count( { a: /bc/ } ) );
-assert.eq( 1, t.count( { a: /bcd/ } ) );
-assert.eq( 0, t.count( { a: /bcde/ } ) );
+assert.eq( 1, t.count( { a: /b/ } ) , "A" );
+assert.eq( 1, t.count( { a: /bc/ } ) , "B" );
+assert.eq( 1, t.count( { a: /bcd/ } ) , "C" );
+assert.eq( 0, t.count( { a: /bcde/ } ) , "D" );
t.drop();
t.save( { a: { b: "cde" } } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "E" );
t.drop();
t.save( { a: { b: [ "cde" ] } } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "F" );
t.drop();
t.save( { a: [ { b: "cde" } ] } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "G" );
t.drop();
t.save( { a: [ { b: [ "cde" ] } ] } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "H" );
diff --git a/jstests/update5.js b/jstests/update5.js
new file mode 100644
index 00000000000..4ff9f1b9d38
--- /dev/null
+++ b/jstests/update5.js
@@ -0,0 +1,40 @@
+
+t = db.update5;
+
+function go( key ){
+
+ t.drop();
+
+ function check( num , name ){
+ assert.eq( 1 , t.find().count() , tojson( key ) + " count " + name );
+ assert.eq( num , t.findOne().n , tojson( key ) + " value " + name );
+ }
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 1 , "A" );
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 2 , "B" );
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 3 , "C" );
+
+ var ik = {};
+ for ( k in key )
+ ik[k] = 1;
+ t.ensureIndex( ik );
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 4 , "D" );
+
+}
+
+go( { a : 5 } );
+go( { a : 5 } );
+
+go( { a : 5 , b : 7 } );
+go( { a : null , b : 7 } );
+
+go( { referer: 'blah' } );
+//go( { referer: 'blah', name: 'bar' } );
+//go( { date: null, referer: 'blah', name: 'bar' } );
diff --git a/jstests/update6.js b/jstests/update6.js
new file mode 100644
index 00000000000..ed8eaded508
--- /dev/null
+++ b/jstests/update6.js
@@ -0,0 +1,46 @@
+
+t = db.update6;
+t.drop();
+
+t.save( { a : 1 , b : { c : 1 , d : 1 } } );
+
+t.update( { a : 1 } , { $inc : { "b.c" : 1 } } );
+assert.eq( 2 , t.findOne().b.c , "A" );
+assert.eq( "c,d" , t.findOne().b.keySet().toString() , "B" );
+
+t.update( { a : 1 } , { $inc : { "b.0e" : 1 } } );
+assert.eq( 1 , t.findOne().b["0e"] , "C" );
+assert.eq( "0e,c,d" , t.findOne().b.keySet().toString() , "D" );
+
+// -----
+
+t.drop();
+
+t.save( {"_id" : 2 ,
+ "b3" : {"0720" : 5 , "0721" : 12 , "0722" : 11 , "0723" : 3 , "0721" : 12} ,
+ //"b323" : {"0720" : 1} ,
+ }
+ );
+
+
+assert.eq( 4 , t.find({_id:2},{b3:1})[0].b3.keySet().length , "test 1 : ks before" );
+t.update({_id:2},{$inc: { 'b3.0719' : 1}},true)
+assert.eq( 5 , t.find({_id:2},{b3:1})[0].b3.keySet().length , "test 1 : ks after" );
+
+
+// -----
+
+t.drop();
+
+t.save( {"_id" : 2 ,
+ "b3" : {"0720" : 5 , "0721" : 12 , "0722" : 11 , "0723" : 3 , "0721" : 12} ,
+ "b324" : {"0720" : 1} ,
+ }
+ );
+
+
+assert.eq( 4 , t.find({_id:2},{b3:1})[0].b3.keySet().length , "test 2 : ks before" );
+printjson( t.find({_id:2},{b3:1})[0].b3 )
+t.update({_id:2},{$inc: { 'b3.0719' : 1}} )
+printjson( t.find({_id:2},{b3:1})[0].b3 )
+assert.eq( 5 , t.find({_id:2},{b3:1})[0].b3.keySet().length , "test 2 : ks after" );
diff --git a/scripting/engine_spidermonkey.cpp b/scripting/engine_spidermonkey.cpp
index 0b1690721b8..72d0ca1fb06 100644
--- a/scripting/engine_spidermonkey.cpp
+++ b/scripting/engine_spidermonkey.cpp
@@ -4,6 +4,7 @@
#include "engine_spidermonkey.h"
#include "../client/dbclient.h"
+#include <boost/date_time/posix_time/posix_time.hpp>
namespace mongo {
@@ -42,15 +43,21 @@ namespace mongo {
BSONFieldIterator( BSONHolder * holder ){
+ set<string> added;
+
BSONObjIterator it( holder->_obj );
while ( it.more() ){
BSONElement e = it.next();
if ( holder->_removed.count( e.fieldName() ) )
continue;
_names.push_back( e.fieldName() );
+ added.insert( e.fieldName() );
}
- _names.merge( holder->_extra );
+ for ( list<string>::iterator i = holder->_extra.begin(); i != holder->_extra.end(); i++ ){
+ if ( ! added.count( *i ) )
+ _names.push_back( *i );
+ }
_it = _names.begin();
}
@@ -142,14 +149,15 @@ namespace mongo {
BSONObj orig;
if ( JS_InstanceOf( _context , o , &bson_class , 0 ) ){
BSONHolder * holder = GETHOLDER(_context,o);
- if ( ! holder->_modified )
+ if ( ! holder->_modified ){
return holder->_obj;
+ }
orig = holder->_obj;
}
BSONObjBuilder b;
- if ( ! appendSpecialDBObject( this , b , "value" , o ) ){
+ if ( ! appendSpecialDBObject( this , b , "value" , OBJECT_TO_JSVAL( o ) , o ) ){
jsval theid = getProperty( o , "_id" );
if ( ! JSVAL_IS_VOID( theid ) ){
@@ -193,6 +201,13 @@ namespace mongo {
uassert( "not a function" , JS_TypeOfValue( _context , v ) == JSTYPE_FUNCTION );
return getFunctionCode( JS_ValueToFunction( _context , v ) );
}
+
+ void appendRegex( BSONObjBuilder& b , const string& name , string s ){
+ assert( s[0] == '/' );
+ s = s.substr(1);
+ string::size_type end = s.rfind( '/' );
+ b.appendRegex( name.c_str() , s.substr( 0 , end ).c_str() , s.substr( end + 1 ).c_str() );
+ }
void append( BSONObjBuilder& b , string name , jsval val , BSONType oldType = EOO ){
//cout << "name: " << name << "\t" << typeString( val ) << " oldType: " << oldType << endl;
@@ -217,7 +232,7 @@ namespace mongo {
if ( ! o || o == JSVAL_NULL ){
b.appendNull( name.c_str() );
}
- else if ( ! appendSpecialDBObject( this , b , name , o ) ){
+ else if ( ! appendSpecialDBObject( this , b , name , val , o ) ){
BSONObj sub = toObject( o );
if ( JS_IsArrayObject( _context , o ) ){
b.appendArray( name.c_str() , sub );
@@ -232,9 +247,7 @@ namespace mongo {
case JSTYPE_FUNCTION: {
string s = toString(val);
if ( s[0] == '/' ){
- s = s.substr(1);
- string::size_type end = s.rfind( '/' );
- b.appendRegex( name.c_str() , s.substr( 0 , end ).c_str() , s.substr( end + 1 ).c_str() );
+ appendRegex( b , name , s );
}
else {
b.appendCode( name.c_str() , getFunctionCode( val ).c_str() );
@@ -581,8 +594,9 @@ namespace mongo {
if ( ! holder->_inResolve ){
Convertor c(cx);
string name = c.toString( idval );
- if ( holder->_obj[name].eoo() )
+ if ( holder->_obj[name].eoo() ){
holder->_extra.push_back( name );
+ }
holder->_modified = true;
}
return JS_TRUE;
@@ -701,6 +715,15 @@ namespace mongo {
holder->_inResolve = true;
assert( JS_SetProperty( cx , obj , s.c_str() , &val ) );
holder->_inResolve = false;
+
+ if ( val != JSVAL_NULL && val != JSVAL_VOID && JSVAL_IS_OBJECT( val ) ){
+ // TODO: this is a hack to get around sub objects being modified
+ JSObject * oo = JSVAL_TO_OBJECT( val );
+ if ( JS_InstanceOf( cx , oo , &bson_class , 0 ) ||
+ JS_IsArrayObject( cx , oo ) ){
+ holder->_modified = true;
+ }
+ }
*objp = obj;
JS_LeaveLocalRootScope( cx );
@@ -714,8 +737,13 @@ namespace mongo {
public:
SMEngine(){
+#ifdef SM18
+ JS_SetCStringsAreUTF8();
+#endif
+
_runtime = JS_NewRuntime(8L * 1024L * 1024L);
uassert( "JS_NewRuntime failed" , _runtime );
+
if ( ! utf8Ok() ){
cerr << "*** warning: spider monkey build without utf8 support. consider rebuilding with utf8 support" << endl;
}
diff --git a/scripting/engine_spidermonkey.h b/scripting/engine_spidermonkey.h
index 313b9fa24c5..f862bb62f25 100644
--- a/scripting/engine_spidermonkey.h
+++ b/scripting/engine_spidermonkey.h
@@ -4,12 +4,15 @@
#include "engine.h"
+// START inc hacking
+
#if defined( MOZJS )
#define MOZILLA_1_8_BRANCH
#include "mozjs/jsapi.h"
#include "mozjs/jsdate.h"
+#include "mozjs/jsregexp.h"
#warning if you are using an ubuntu version of spider monkey, we recommend installing spider monkey from source
@@ -25,14 +28,18 @@
#include "jsapi.h"
#include "jsdate.h"
+#include "jsregexp.h"
#else
#include "js/jsapi.h"
#include "js/jsdate.h"
+#include "js/jsregexp.h"
#endif
+// END inc hacking
+
// -- SM 1.6 hacks ---
#ifndef JSCLASS_GLOBAL_FLAGS
@@ -49,6 +56,10 @@ JSBool JS_CStringsAreUTF8(){
#endif
// -- END SM 1.6 hacks ---
+#ifdef JSVAL_IS_TRACEABLE
+#define SM18
+#endif
+
namespace mongo {
class SMScope;
@@ -75,7 +86,7 @@ namespace mongo {
// mongo
void initMongoJS( SMScope * scope , JSContext * cx , JSObject * global , bool local );
- bool appendSpecialDBObject( Convertor * c , BSONObjBuilder& b , const string& name , JSObject * o );
+ bool appendSpecialDBObject( Convertor * c , BSONObjBuilder& b , const string& name , jsval val , JSObject * o );
#define JSVAL_IS_OID(v) ( JSVAL_IS_OBJECT( v ) && JS_InstanceOf( cx , JSVAL_TO_OBJECT( v ) , &object_id_class , 0 ) )
diff --git a/scripting/sm_db.cpp b/scripting/sm_db.cpp
index 9dba3382d77..601a516116c 100644
--- a/scripting/sm_db.cpp
+++ b/scripting/sm_db.cpp
@@ -652,8 +652,8 @@ namespace mongo {
scope->exec( jsconcatcode );
}
- bool appendSpecialDBObject( Convertor * c , BSONObjBuilder& b , const string& name , JSObject * o ){
-
+ bool appendSpecialDBObject( Convertor * c , BSONObjBuilder& b , const string& name , jsval val , JSObject * o ){
+
if ( JS_InstanceOf( c->_context , o , &object_id_class , 0 ) ){
OID oid;
oid.init( c->getString( o , "str" ) );
@@ -709,11 +709,17 @@ namespace mongo {
if ( JS_InstanceOf( c->_context , o , &dbquery_class , 0 ) ||
JS_InstanceOf( c->_context , o , &mongo_class , 0 ) ||
JS_InstanceOf( c->_context , o , &db_collection_class , 0 ) ){
- b.append( name.c_str() , c->toString( OBJECT_TO_JSVAL(o) ) );
+ b.append( name.c_str() , c->toString( val ) );
return true;
}
-
+#ifdef SM18
+ if ( JS_InstanceOf( c->_context , o , &js_RegExpClass , 0 ) ){
+ c->appendRegex( b , name , c->toString( val ) );
+ return true;
+ }
+#endif
+
return false;
}
diff --git a/shell/collection.js b/shell/collection.js
index 1601bc0f640..f394c37f816 100644
--- a/shell/collection.js
+++ b/shell/collection.js
@@ -7,7 +7,7 @@ if ( ( typeof DBCollection ) == "undefined" ){
this._db = db;
this._shortName = shortName;
this._fullName = fullName;
-
+
this.verify();
}
}
@@ -16,7 +16,7 @@ DBCollection.prototype.verify = function(){
assert( this._fullName , "no fullName" );
assert( this._shortName , "no shortName" );
assert( this._db , "no db" );
-
+
assert.eq( this._fullName , this._db._name + "." + this._shortName , "name mismatch" );
assert( this._mongo , "no mongo in DBCollection" );
@@ -65,25 +65,25 @@ DBCollection.prototype._massageObject = function( q ){
return {};
var type = typeof q;
-
+
if ( type == "function" )
return { $where : q };
-
+
if ( q.isObjectId )
return { _id : q };
-
+
if ( type == "object" )
return q;
-
+
if ( type == "string" ){
if ( q.length == 24 )
return { _id : q };
-
+
return { $where : q };
}
throw "don't know how to massage : " + type;
-
+
}
@@ -95,14 +95,23 @@ DBCollection.prototype._validateObject = function( o ){
DBCollection.prototype._validateForStorage = function( o ){
this._validateObject( o );
for ( var k in o ){
- if ( k.indexOf( "." ) >= 0 )
+ if ( k.indexOf( "." ) >= 0 ) {
throw "can't have . in field names [" + k + "]" ;
+ }
+
+ if ( k.indexOf( "$" ) == 0 ) {
+ throw "field names cannot start with $ [" + k + "]" ;
+ }
+
+ if ( o[k] !== null && typeof( o[k] ) === "object" ) {
+ this._validateForStorage( o[k] );
+ }
}
-}
+};
DBCollection.prototype.find = function( query , fields , limit , skip ){
- return new DBQuery( this._mongo , this._db , this ,
+ return new DBQuery( this._mongo , this._db , this ,
this._fullName , this._massageObject( query ) , fields , limit , skip );
}
@@ -117,10 +126,12 @@ DBCollection.prototype.findOne = function( query , fields ){
return ret;
}
-DBCollection.prototype.insert = function( obj ){
+DBCollection.prototype.insert = function( obj , _allow_dot ){
if ( ! obj )
throw "no object!";
- this._validateForStorage( obj );
+ if ( ! _allow_dot ) {
+ this._validateForStorage( obj );
+ }
return this._mongo.insert( this._fullName , obj );
}
@@ -183,7 +194,7 @@ DBCollection.prototype._indexSpec = function( keys, options ) {
DBCollection.prototype.createIndex = function( keys , options ){
var o = this._indexSpec( keys, options );
- this._db.getCollection( "system.indexes" ).insert( o );
+ this._db.getCollection( "system.indexes" ).insert( o , true );
}
DBCollection.prototype.ensureIndex = function( keys , options ){
@@ -217,7 +228,7 @@ DBCollection.prototype.dropIndexes = function(){
assert( res , "no result from dropIndex result" );
if ( res.ok )
return res;
-
+
if ( res.errmsg.match( /not found/ ) )
return res;
@@ -232,7 +243,7 @@ DBCollection.prototype.drop = function(){
DBCollection.prototype.validate = function() {
var res = this._db.runCommand( { validate: this.getName() } );
-
+
res.valid = false;
if ( res.result ){
@@ -245,7 +256,7 @@ DBCollection.prototype.validate = function() {
res.lastExtentSize = Number( r[1] );
}
}
-
+
return res;
}
@@ -257,7 +268,7 @@ DBCollection.prototype.getIndices = DBCollection.prototype.getIndexes;
DBCollection.prototype.getIndexSpecs = DBCollection.prototype.getIndexes;
DBCollection.prototype.getIndexKeys = function(){
- return this.getIndexes().map(
+ return this.getIndexes().map(
function(i){
return i.key;
}
@@ -293,32 +304,32 @@ DBCollection.prototype.clean = function() {
*/
DBCollection.prototype.dropIndex = function(index) {
assert(index , "need to specify index to dropIndex" );
-
+
if ( ! isString( index ) && isObject( index ) )
index = this._genIndexName( index );
-
+
var res = this._dbCommand( { deleteIndexes: this.getName(), index: index } );
this.resetIndexCache();
return res;
}
DBCollection.prototype.copyTo = function( newName ){
- return this.getDB().eval(
+ return this.getDB().eval(
function( collName , newName ){
var from = db[collName];
var to = db[newName];
to.ensureIndex( { _id : 1 } );
var count = 0;
-
+
var cursor = from.find();
while ( cursor.hasNext() ){
var o = cursor.next();
count++;
to.save( o );
}
-
+
return count;
- } , this.getName() , newName
+ } , this.getName() , newName
);
}
diff --git a/shell/db.js b/shell/db.js
index 832f2105823..3a2e64b24ad 100644
--- a/shell/db.js
+++ b/shell/db.js
@@ -121,6 +121,8 @@ DB.prototype.createCollection = function(name, opt) {
* @return Object returned has member ok set to true if operation succeeds, false otherwise.
*/
DB.prototype.dropDatabase = function() {
+ if ( arguments.length )
+ throw "dropDatabase doesn't take arguments";
return this._dbCommand( { dropDatabase: 1 } );
}
diff --git a/shell/dbshell.cpp b/shell/dbshell.cpp
index 3368262d490..80c9dd1d0c6 100644
--- a/shell/dbshell.cpp
+++ b/shell/dbshell.cpp
@@ -182,7 +182,7 @@ int main(int argc, char* argv[]) {
strcmp(str, "-h" ) == 0 ) {
cout
- << "usage: " << argv[0] << " [options] [db address] [file names]\n"
+ << "usage: " << argv[0] << " [options] [db address] [file names (ending in .js)]\n"
<< "db address can be:\n"
<< " foo = foo database on local machine\n"
<< " 192.169.0.5/foo = foo database on 192.168.0.5 machine\n"
@@ -195,7 +195,7 @@ int main(int argc, char* argv[]) {
<< " --port <port> - port to connect to\n"
<< " --nodb don't connect to mongo program on startup. No 'db address' arg expected.\n"
<< " --eval <script> evaluate javascript.\n"
- << "file names: a list of files to run. will exit after unless --shell is specified\n"
+ << "file names: a list of files to run. files have to end in .js will exit after unless --shell is specified\n"
;
return 0;
diff --git a/stdafx.h b/stdafx.h
index 7fadc345646..05db8cd7ac9 100644
--- a/stdafx.h
+++ b/stdafx.h
@@ -63,7 +63,6 @@ using namespace std;
#include <boost/archive/iterators/base64_from_binary.hpp>
#include <boost/archive/iterators/binary_from_base64.hpp>
#include <boost/archive/iterators/transform_width.hpp>
-#include <boost/date_time/posix_time/posix_time.hpp>
#include <boost/filesystem/convenience.hpp>
#include <boost/filesystem/operations.hpp>
#include <boost/program_options.hpp>
diff --git a/util/top.h b/util/top.h
index 15862cf37bc..ed319ece997 100644
--- a/util/top.h
+++ b/util/top.h
@@ -17,6 +17,8 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <boost/date_time/posix_time/posix_time.hpp>
+
namespace mongo {
// Records per namespace utilization of the mongod process.