summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/mongo/SConscript14
-rw-r--r--src/mongo/db/cloner.h1
-rw-r--r--src/mongo/db/db.cpp4
-rw-r--r--src/mongo/db/fts/SConscript1
-rw-r--r--src/mongo/db/fts/fts_index.cpp96
-rw-r--r--src/mongo/db/fts/fts_index.h64
-rw-r--r--src/mongo/db/fts/fts_search.h1
-rw-r--r--src/mongo/db/geo/2d.cpp458
-rw-r--r--src/mongo/db/geo/2d.h23
-rw-r--r--src/mongo/db/geo/haystack.cpp269
-rw-r--r--src/mongo/db/geo/s2index.cpp278
-rw-r--r--src/mongo/db/hashindex.cpp112
-rw-r--r--src/mongo/db/hashindex.h105
-rw-r--r--src/mongo/db/index.cpp39
-rw-r--r--src/mongo/db/index/2d_index_cursor.cpp10
-rw-r--r--src/mongo/db/index/catalog_hack.h54
-rw-r--r--src/mongo/db/index/hash_access_method.cpp19
-rw-r--r--src/mongo/db/index/hash_access_method.h7
-rw-r--r--src/mongo/db/index/hash_index_cursor.cpp4
-rw-r--r--src/mongo/db/index/s2_access_method.cpp2
-rw-r--r--src/mongo/db/index/s2_near_cursor.cpp3
-rw-r--r--src/mongo/db/index/s2_near_cursor.h2
-rw-r--r--src/mongo/db/index/s2_simple_cursor.cpp4
-rw-r--r--src/mongo/db/index_names.h22
-rw-r--r--src/mongo/db/index_selection.cpp121
-rw-r--r--src/mongo/db/indexkey.cpp391
-rw-r--r--src/mongo/db/indexkey.h152
-rw-r--r--src/mongo/db/pdfile.cpp12
-rw-r--r--src/mongo/db/query_optimizer_internal.cpp13
-rw-r--r--src/mongo/db/query_plan.cpp26
-rw-r--r--src/mongo/db/query_plan.h5
-rw-r--r--src/mongo/db/query_runner.cpp1
-rw-r--r--src/mongo/db/queryutil.cpp42
-rw-r--r--src/mongo/db/queryutil.h13
-rw-r--r--src/mongo/db/scanandorder.cpp2
-rw-r--r--src/mongo/db/scanandorder.h7
-rw-r--r--src/mongo/dbtests/cursortests.cpp14
-rw-r--r--src/mongo/dbtests/namespacetests.cpp115
-rw-r--r--src/mongo/dbtests/queryoptimizercursortests.cpp2
-rw-r--r--src/mongo/dbtests/queryoptimizertests.cpp1
-rw-r--r--src/mongo/dbtests/queryoptimizertests2.cpp2
-rw-r--r--src/mongo/dbtests/queryutiltests.cpp53
-rw-r--r--src/mongo/s/d_split.cpp10
43 files changed, 354 insertions, 2220 deletions
diff --git a/src/mongo/SConscript b/src/mongo/SConscript
index cd36650e456..a7873303039 100644
--- a/src/mongo/SConscript
+++ b/src/mongo/SConscript
@@ -269,6 +269,7 @@ env.StaticLibrary("coredb", [
"db/dbcommands_generic.cpp",
"db/dbwebserver.cpp",
"db/index_names.cpp",
+ "db/index/btree_key_generator.cpp",
"db/keypattern.cpp",
"db/matcher.cpp",
"db/matcher/matcher.cpp",
@@ -319,8 +320,6 @@ env.StaticLibrary("coredb", [
coreServerFiles = [ "db/client_basic.cpp",
"util/net/miniwebserver.cpp",
- "db/indexkey.cpp",
- "db/index_selection.cpp",
"db/stats/counters.cpp",
"db/stats/service_stats.cpp",
]
@@ -376,7 +375,8 @@ if has_option( "asio" ):
env.StaticLibrary('index_set', [ 'db/index_set.cpp' ] )
-# mongod files - also files used in tools. present in dbtests, but not in mongos and not in client libs.
+# mongod files - also files used in tools. present in dbtests, but not in mongos and not in client
+# libs.
serverOnlyFiles = [ "db/curop.cpp",
"db/kill_current_op.cpp",
"db/memconcept.cpp",
@@ -431,13 +431,13 @@ serverOnlyFiles = [ "db/curop.cpp",
"db/prefetch.cpp",
"db/repl/write_concern.cpp",
"db/btreecursor.cpp",
+ "db/index_selection.cpp",
"db/index/2d_access_method.cpp",
"db/index/2d_index_cursor.cpp",
"db/index/btree_access_method.cpp",
"db/index/btree_based_builder.cpp",
"db/index/btree_index_cursor.cpp",
"db/index/btree_interface.cpp",
- "db/index/btree_key_generator.cpp",
"db/index/fts_access_method.cpp",
"db/index/hash_access_method.cpp",
"db/index/hash_index_cursor.cpp",
@@ -459,7 +459,6 @@ serverOnlyFiles = [ "db/curop.cpp",
"db/client.cpp",
"db/database.cpp",
"db/pdfile.cpp",
- "db/record.cpp",
"db/cursor.cpp",
"db/query_optimizer.cpp",
"db/query_optimizer_internal.cpp",
@@ -471,14 +470,12 @@ serverOnlyFiles = [ "db/curop.cpp",
"db/index_builder.cpp",
"db/index_update.cpp",
"db/index_rebuilder.cpp",
+ "db/record.cpp",
"db/scanandorder.cpp",
"db/explain.cpp",
- "db/geo/2d.cpp",
"db/geo/geonear.cpp",
"db/geo/haystack.cpp",
"db/geo/s2common.cpp",
- "db/geo/s2index.cpp",
- "db/hashindex.cpp",
"db/ops/count.cpp",
"db/ops/delete.cpp",
"db/ops/query.cpp",
@@ -489,6 +486,7 @@ serverOnlyFiles = [ "db/curop.cpp",
"db/dbcommands.cpp",
"db/compact.cpp",
"db/dbcommands_admin.cpp",
+ "db/indexkey.cpp",
# most commands are only for mongod
"db/commands/apply_ops.cpp",
diff --git a/src/mongo/db/cloner.h b/src/mongo/db/cloner.h
index 813725e38bb..400d8413753 100644
--- a/src/mongo/db/cloner.h
+++ b/src/mongo/db/cloner.h
@@ -23,7 +23,6 @@
namespace mongo {
struct CloneOptions;
- class IndexSpec;
class DBClientBase;
class DBClientCursor;
class Query;
diff --git a/src/mongo/db/db.cpp b/src/mongo/db/db.cpp
index 2c42fec7dea..1513796145b 100644
--- a/src/mongo/db/db.cpp
+++ b/src/mongo/db/db.cpp
@@ -368,8 +368,8 @@ namespace mongo {
for ( ; cursor && cursor->ok(); cursor->advance()) {
const BSONObj index = cursor->current();
const BSONObj key = index.getObjectField("key");
- const string plugin = IndexPlugin::findPluginName(key);
- if (IndexPlugin::existedBefore24(plugin))
+ const string plugin = KeyPattern::findPluginName(key);
+ if (IndexNames::existedBefore24(plugin))
continue;
log() << "Index " << index << " claims to be of type '" << plugin << "', "
diff --git a/src/mongo/db/fts/SConscript b/src/mongo/db/fts/SConscript
index 6534b73bdc1..7fc9298de9b 100644
--- a/src/mongo/db/fts/SConscript
+++ b/src/mongo/db/fts/SConscript
@@ -47,7 +47,6 @@ env.StaticLibrary( 'server_common', [
env.StaticLibrary('ftsmongod', [
'fts_command_mongod.cpp',
- 'fts_index.cpp',
'fts_search.cpp',
], LIBDEPS=["base","server_common"])
diff --git a/src/mongo/db/fts/fts_index.cpp b/src/mongo/db/fts/fts_index.cpp
deleted file mode 100644
index 0f6c3995fce..00000000000
--- a/src/mongo/db/fts/fts_index.cpp
+++ /dev/null
@@ -1,96 +0,0 @@
-// fts_index.cpp
-
-/**
-* Copyright (C) 2012 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "mongo/pch.h"
-
-#include "mongo/base/init.h"
-#include "mongo/db/client.h"
-#include "mongo/db/fts/fts_enabled.h"
-#include "mongo/db/fts/fts_index.h"
-#include "mongo/db/fts/fts_index_format.h"
-#include "mongo/util/mongoutils/str.h"
-#include "mongo/util/stringutils.h"
-#include "mongo/util/timer.h"
-#include "mongo/db/pdfile.h"
-
-namespace mongo {
-
- namespace fts {
-
- using namespace mongoutils;
-
- /*
- * extrapolates the weights vector
- * and extra information from the spec
- * @param plugin the index plugin for FTS
- * @param spec the index specification
- */
- FTSIndex::FTSIndex( const IndexPlugin* plugin, const IndexSpec* spec )
- : IndexType( plugin, spec ), _ftsSpec( spec->info ) {
- }
-
- void FTSIndex::getKeys( const BSONObj& obj, BSONObjSet& keys) const {
- FTSIndexFormat::getKeys( _ftsSpec, obj, &keys );
- }
-
-
- FTSIndexPlugin::FTSIndexPlugin() : IndexPlugin( INDEX_NAME ) {}
-
-
- /*
- * Adjusts spec by appending information relative to the
- * FTS Index (such as weights, index name, etc)
- * @param spec, specification object
- *
- */
- BSONObj FTSIndexPlugin::adjustIndexSpec( const BSONObj& spec ) const {
- StringData desc = cc().desc();
- if ( desc.find( "conn" ) == 0 ) {
- // this is to make sure we only complain for users
- // if you do get a text index created an a primary
- // want it to index on the secondary as well
- massert( 16633, "text search not enabled", isTextSearchEnabled() );
- }
- return FTSSpec::fixSpec( spec );
- }
-
- /*
- * Generates an FTSIndex with a spec and this plugin
- * @param spec, specification to be used
- */
- IndexType* FTSIndexPlugin::generate( const IndexSpec* spec ) const {
- return new FTSIndex( this, spec );
- }
-
- void FTSIndexPlugin::postBuildHook( const IndexSpec& spec ) const {
- string ns = spec.getDetails()->parentNS();
- NamespaceDetails* nsd = nsdetails( ns );
- if ( nsd->setUserFlag( NamespaceDetails::Flag_UsePowerOf2Sizes ) ) {
- nsd->syncUserFlags( ns );
- }
- }
-
- FTSIndexPlugin* ftsPlugin;
- MONGO_INITIALIZER(FTSIndexPlugin)(InitializerContext* context) {
- ftsPlugin = new FTSIndexPlugin();
- return Status::OK();
- }
-
- }
-
-}
diff --git a/src/mongo/db/fts/fts_index.h b/src/mongo/db/fts/fts_index.h
deleted file mode 100644
index a6a1378672f..00000000000
--- a/src/mongo/db/fts/fts_index.h
+++ /dev/null
@@ -1,64 +0,0 @@
-// fts_index.h
-
-/**
-* Copyright (C) 2012 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#pragma once
-
-#include <map>
-#include <vector>
-
-#include "mongo/db/fts/fts_spec.h"
-#include "mongo/db/fts/fts_util.h"
-#include "mongo/db/fts/stemmer.h"
-#include "mongo/db/fts/stop_words.h"
-#include "mongo/db/fts/tokenizer.h"
-#include "mongo/db/index.h"
-
-namespace mongo {
-
- namespace fts {
-
- class FTSIndex : public IndexType {
- public:
-
- // index constructor, called when user enters ensureIndex command with fts flag
- FTSIndex(const IndexPlugin *plugin, const IndexSpec* spec);
-
- void getKeys( const BSONObj& obj, BSONObjSet& keys) const;
-
- const FTSSpec& getFtsSpec() const { return _ftsSpec; }
-
- private:
-
- FTSSpec _ftsSpec;
- };
-
-
- class FTSIndexPlugin : public IndexPlugin {
- public:
- FTSIndexPlugin();
-
- IndexType* generate( const IndexSpec* spec ) const;
-
- BSONObj adjustIndexSpec( const BSONObj& spec ) const;
-
- void postBuildHook( const IndexSpec& spec ) const;
-
- };
-
- } //namespace fts
-} //namespace mongo
diff --git a/src/mongo/db/fts/fts_search.h b/src/mongo/db/fts/fts_search.h
index 6d6c4c09f5f..9bc2ede77c0 100644
--- a/src/mongo/db/fts/fts_search.h
+++ b/src/mongo/db/fts/fts_search.h
@@ -24,7 +24,6 @@
#include <queue>
#include "mongo/base/disallow_copying.h"
-#include "mongo/db/fts/fts_index.h"
#include "mongo/db/fts/fts_matcher.h"
#include "mongo/db/fts/fts_query.h"
#include "mongo/db/fts/fts_util.h"
diff --git a/src/mongo/db/geo/2d.cpp b/src/mongo/db/geo/2d.cpp
deleted file mode 100644
index 21ecfac92ae..00000000000
--- a/src/mongo/db/geo/2d.cpp
+++ /dev/null
@@ -1,458 +0,0 @@
-/**
-* Copyright (C) 2008 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "pch.h"
-
-#include <vector>
-
-#include "mongo/db/auth/action_set.h"
-#include "mongo/db/auth/action_type.h"
-#include "mongo/db/auth/privilege.h"
-#include "mongo/db/namespace-inl.h"
-#include "mongo/db/jsobj.h"
-#include "mongo/db/index.h"
-#include "mongo/util/startup_test.h"
-#include "mongo/db/commands.h"
-#include "mongo/db/pdfile.h"
-#include "mongo/db/btreecursor.h"
-#include "mongo/db/curop-inl.h"
-#include "mongo/db/matcher.h"
-#include "mongo/db/queryutil.h"
-#include "mongo/db/geo/core.h"
-#include "mongo/db/geo/geonear.h"
-#include "mongo/db/geo/hash.h"
-#include "mongo/db/geo/shapes.h"
-#include "mongo/util/timer.h"
-
-namespace mongo {
-
- enum GeoDistType {
- GEO_PLAIN,
- GEO_SPHERE
- };
-
- const string GEO2DNAME = "2d";
-
- class Geo2dType : public IndexType {
- public:
- virtual ~Geo2dType() { }
-
- Geo2dType(const IndexPlugin *plugin, const IndexSpec* spec) : IndexType(plugin, spec) {
- BSONObjIterator i(spec->keyPattern);
- while (i.more()) {
- BSONElement e = i.next();
- if (e.type() == String && GEO2DNAME == e.valuestr()) {
- uassert(13022, "can't have 2 geo field", _geo.size() == 0);
- uassert(13023, "2d has to be first in index", _other.size() == 0);
- _geo = e.fieldName();
- } else {
- int order = 1;
- if (e.isNumber()) {
- order = static_cast<int>(e.Number());
- }
- _other.push_back(make_pair(e.fieldName(), order));
- }
- }
- uassert(13024, "no geo field specified", _geo.size());
-
- double bits = configValueWithDefault(spec, "bits", 26); // for lat/long, ~ 1ft
- uassert(13028, "bits in geo index must be between 1 and 32", bits > 0 && bits <= 32);
-
- GeoHashConverter::Parameters params;
- params.bits = static_cast<unsigned>(bits);
- params.max = configValueWithDefault(spec, "max", 180.0);
- params.min = configValueWithDefault(spec, "min", -180.0);
- double numBuckets = (1024 * 1024 * 1024 * 4.0);
- params.scaling = numBuckets / (params.max - params.min);
-
- _geoHashConverter.reset(new GeoHashConverter(params));
- }
-
- // XXX: what does this do
- virtual BSONObj fixKey(const BSONObj& in) {
- if (in.firstElement().type() == BinData)
- return in;
-
- BSONObjBuilder b(in.objsize() + 16);
-
- if (in.firstElement().isABSONObj())
- _geoHashConverter->hash(in.firstElement().embeddedObject()).appendToBuilder(&b, "");
- else if (in.firstElement().type() == String)
- GeoHash(in.firstElement().valuestr()).appendToBuilder(&b, "");
- else if (in.firstElement().type() == RegEx)
- GeoHash(in.firstElement().regex()).appendToBuilder(&b, "");
- else
- return in;
-
- BSONObjIterator i(in);
- i.next();
- while (i.more())
- b.append(i.next());
- return b.obj();
- }
-
- /** Finds the key objects to put in an index */
- virtual void getKeys(const BSONObj& obj, BSONObjSet& keys) const {
- getKeys(obj, &keys, NULL);
- }
-
- /** Finds all locations in a geo-indexed object */
- // TODO: Can we just return references to the locs, if they won't change?
- void getKeys(const BSONObj& obj, vector<BSONObj>& locs) const {
- getKeys(obj, NULL, &locs);
- }
-
- /** Finds the key objects and/or locations for a geo-indexed object */
- void getKeys(const BSONObj &obj, BSONObjSet* keys, vector<BSONObj>* locs) const {
- BSONElementMSet bSet;
-
- // Get all the nested location fields, but don't return individual elements from
- // the last array, if it exists.
- obj.getFieldsDotted(_geo.c_str(), bSet, false);
-
- if (bSet.empty())
- return;
-
- for (BSONElementMSet::iterator setI = bSet.begin(); setI != bSet.end(); ++setI) {
- BSONElement geo = *setI;
-
- GEODEBUG("Element " << geo << " found for query " << _geo.c_str());
-
- if (geo.eoo() || !geo.isABSONObj())
- continue;
-
- //
- // Grammar for location lookup:
- // locs ::= [loc,loc,...,loc]|{<k>:loc,<k>:loc,...,<k>:loc}|loc
- // loc ::= { <k1> : #, <k2> : # }|[#, #]|{}
- //
- // Empty locations are ignored, preserving single-location semantics
- //
-
- BSONObj embed = geo.embeddedObject();
- if (embed.isEmpty())
- continue;
-
- // Differentiate between location arrays and locations
- // by seeing if the first element value is a number
- bool singleElement = embed.firstElement().isNumber();
-
- BSONObjIterator oi(embed);
-
- while (oi.more()) {
- BSONObj locObj;
-
- if (singleElement) {
- locObj = embed;
- } else {
- BSONElement locElement = oi.next();
-
- uassert(13654, str::stream() << "location object expected, location "
- "array not in correct format",
- locElement.isABSONObj());
-
- locObj = locElement.embeddedObject();
- if(locObj.isEmpty())
- continue;
- }
-
- BSONObjBuilder b(64);
-
- // Remember the actual location object if needed
- if (locs)
- locs->push_back(locObj);
-
- // Stop if we don't need to get anything but location objects
- if (!keys) {
- if (singleElement) break;
- else continue;
- }
-
- _geoHashConverter->hash(locObj, &obj).appendToBuilder(&b, "");
-
- // Go through all the other index keys
- for (vector<pair<string, int> >::const_iterator i = _other.begin();
- i != _other.end(); ++i) {
- // Get *all* fields for the index key
- BSONElementSet eSet;
- obj.getFieldsDotted(i->first, eSet);
-
- if (eSet.size() == 0)
- b.appendAs(_spec->missingField(), "");
- else if (eSet.size() == 1)
- b.appendAs(*(eSet.begin()), "");
- else {
- // If we have more than one key, store as an array of the objects
- BSONArrayBuilder aBuilder;
-
- for (BSONElementSet::iterator ei = eSet.begin(); ei != eSet.end();
- ++ei) {
- aBuilder.append(*ei);
- }
-
- b.append("", aBuilder.arr());
- }
- }
- keys->insert(b.obj());
- if(singleElement) break;
- }
- }
- }
-
- const IndexDetails* getDetails() const { return _spec->getDetails(); }
-
- const GeoHashConverter& getConverter() const { return *_geoHashConverter; }
-
- // XXX: make private with a getter
- string _geo;
- vector<pair<string, int> > _other;
- private:
- double configValueWithDefault(const IndexSpec* spec, const string& name, double def) {
- BSONElement e = spec->info[name];
- if (e.isNumber()) {
- return e.numberDouble();
- }
- return def;
- }
-
- scoped_ptr<GeoHashConverter> _geoHashConverter;
- };
-
- class Geo2dPlugin : public IndexPlugin {
- public:
- Geo2dPlugin() : IndexPlugin(GEO2DNAME) { }
-
- virtual IndexType* generate(const IndexSpec* spec) const {
- return new Geo2dType(this, spec);
- }
- } geo2dplugin;
-
- void __forceLinkGeoPlugin() {
- geo2dplugin.getName();
- }
-
- struct GeoUnitTest : public StartupTest {
- int round(double d) {
- return (int)(.5 + (d * 1000));
- }
-
-#define GEOHEQ(a,b) if (a.toString() != b){ cout << "[" << a.toString() << "] != [" << b << "]" << endl; verify(a == GeoHash(b)); }
-
- void run() {
- verify(!GeoHash::isBitSet(0, 0));
- verify(!GeoHash::isBitSet(0, 31));
- verify(GeoHash::isBitSet(1, 31));
-
- IndexSpec i(BSON("loc" << "2d"));
- Geo2dType g(&geo2dplugin, &i);
- const GeoHashConverter &conv = g.getConverter();
-
- {
- double x = 73.01212;
- double y = 41.352964;
- BSONObj in = BSON("x" << x << "y" << y);
- GeoHash h = conv.hash(in);
- BSONObj out = conv.unhashToBSONObj(h);
- verify(round(x) == round(out["x"].number()));
- verify(round(y) == round(out["y"].number()));
- verify(round(in["x"].number()) == round(out["x"].number()));
- verify(round(in["y"].number()) == round(out["y"].number()));
- }
- {
- double x = -73.01212;
- double y = 41.352964;
- BSONObj in = BSON("x" << x << "y" << y);
- GeoHash h = conv.hash(in);
- BSONObj out = conv.unhashToBSONObj(h);
- verify(round(x) == round(out["x"].number()));
- verify(round(y) == round(out["y"].number()));
- verify(round(in["x"].number()) == round(out["x"].number()));
- verify(round(in["y"].number()) == round(out["y"].number()));
- }
- {
- GeoHash h("0000");
- h.move(0, 1);
- GEOHEQ(h, "0001");
- h.move(0, -1);
- GEOHEQ(h, "0000");
-
- h = GeoHash("0001");
- h.move(0, 1);
- GEOHEQ(h, "0100");
- h.move(0, -1);
- GEOHEQ(h, "0001");
-
- h = GeoHash("0000");
- h.move(1, 0);
- GEOHEQ(h, "0010");
- }
- {
- Box b(5, 5, 2);
- verify("(5,5) -->> (7,7)" == b.toString());
- }
- {
- GeoHash a = conv.hash(1, 1);
- GeoHash b = conv.hash(4, 5);
- verify(5 == (int)(conv.distanceBetweenHashes(a, b)));
- a = conv.hash(50, 50);
- b = conv.hash(42, 44);
- verify(round(10) == round(conv.distanceBetweenHashes(a, b)));
- }
- {
- GeoHash x("0000");
- verify(0 == x.getHash());
- x = GeoHash(0, 1, 32);
- GEOHEQ(x, "0000000000000000000000000000000000000000000000000000000000000001")
-
- verify(GeoHash("1100").hasPrefix(GeoHash("11")));
- verify(!GeoHash("1000").hasPrefix(GeoHash("11")));
- }
- {
- GeoHash x("1010");
- GEOHEQ(x, "1010");
- GeoHash y = x + "01";
- GEOHEQ(y, "101001");
- }
- {
- GeoHash a = conv.hash(5, 5);
- GeoHash b = conv.hash(5, 7);
- GeoHash c = conv.hash(100, 100);
- BSONObj oa = a.wrap();
- BSONObj ob = b.wrap();
- BSONObj oc = c.wrap();
- verify(oa.woCompare(ob) < 0);
- verify(oa.woCompare(oc) < 0);
- }
- {
- GeoHash x("000000");
- x.move(-1, 0);
- GEOHEQ(x, "101010");
- x.move(1, -1);
- GEOHEQ(x, "010101");
- x.move(0, 1);
- GEOHEQ(x, "000000");
- }
- {
- GeoHash prefix("110011000000");
- GeoHash entry( "1100110000011100000111000001110000011100000111000001000000000000");
- verify(!entry.hasPrefix(prefix));
- entry = GeoHash("1100110000001100000111000001110000011100000111000001000000000000");
- verify(entry.toString().find(prefix.toString()) == 0);
- verify(entry.hasPrefix(GeoHash("1100")));
- verify(entry.hasPrefix(prefix));
- }
- {
- GeoHash a = conv.hash(50, 50);
- GeoHash b = conv.hash(48, 54);
- verify(round(4.47214) == round(conv.distanceBetweenHashes(a, b)));
- }
- {
- Box b(Point(29.762283, -95.364271), Point(29.764283000000002, -95.36227099999999));
- verify(b.inside(29.763, -95.363));
- verify(! b.inside(32.9570255, -96.1082497));
- verify(! b.inside(32.9570255, -96.1082497, .01));
- }
- {
- GeoHash a("11001111");
- verify(GeoHash("11") == a.commonPrefix(GeoHash("11")));
- verify(GeoHash("11") == a.commonPrefix(GeoHash("11110000")));
- }
- {
- int N = 10000;
-#if 0 // XXX: we want to make sure the two unhash versions both work, but private.
- {
- Timer t;
- for (int i = 0; i < N; i++) {
- unsigned x = (unsigned)rand();
- unsigned y = (unsigned)rand();
- GeoHash h(x, y);
- unsigned a, b;
- h.unhash(&a, &b);
- verify(a == x);
- verify(b == y);
- }
- //cout << "slow: " << t.millis() << endl;
- }
-#endif
- {
- Timer t;
- for (int i=0; i<N; i++) {
- unsigned x = (unsigned)rand();
- unsigned y = (unsigned)rand();
- GeoHash h(x, y);
- unsigned a, b;
- h.unhash(&a, &b);
- verify(a == x);
- verify(b == y);
- }
- //cout << "fast: " << t.millis() << endl;
- }
-
- }
-
- {
- // see http://en.wikipedia.org/wiki/Great-circle_distance#Worked_example
- {
- Point BNA (-86.67, 36.12);
- Point LAX (-118.40, 33.94);
-
- double dist1 = spheredist_deg(BNA, LAX);
- double dist2 = spheredist_deg(LAX, BNA);
-
- // target is 0.45306
- verify(0.45305 <= dist1 && dist1 <= 0.45307);
- verify(0.45305 <= dist2 && dist2 <= 0.45307);
- }
- {
- Point BNA (-1.5127, 0.6304);
- Point LAX (-2.0665, 0.5924);
-
- double dist1 = spheredist_rad(BNA, LAX);
- double dist2 = spheredist_rad(LAX, BNA);
-
- // target is 0.45306
- verify(0.45305 <= dist1 && dist1 <= 0.45307);
- verify(0.45305 <= dist2 && dist2 <= 0.45307);
- }
- {
- Point JFK (-73.77694444, 40.63861111);
- Point LAX (-118.40, 33.94);
-
- const double EARTH_RADIUS_KM = 6371;
- const double EARTH_RADIUS_MILES = EARTH_RADIUS_KM * 0.621371192;
- double dist = spheredist_deg(JFK, LAX) * EARTH_RADIUS_MILES;
- verify(dist > 2469 && dist < 2470);
- }
- {
- Point BNA (-86.67, 36.12);
- Point LAX (-118.40, 33.94);
- Point JFK (-73.77694444, 40.63861111);
- verify(spheredist_deg(BNA, BNA) < 1e-6);
- verify(spheredist_deg(LAX, LAX) < 1e-6);
- verify(spheredist_deg(JFK, JFK) < 1e-6);
-
- Point zero (0, 0);
- Point antizero (0,-180);
-
- // these were known to cause NaN
- verify(spheredist_deg(zero, zero) < 1e-6);
- verify(fabs(M_PI-spheredist_deg(zero, antizero)) < 1e-6);
- verify(fabs(M_PI-spheredist_deg(antizero, zero)) < 1e-6);
- }
- }
- }
- } geoUnitTest;
-}
diff --git a/src/mongo/db/geo/2d.h b/src/mongo/db/geo/2d.h
deleted file mode 100644
index d54e1f766c6..00000000000
--- a/src/mongo/db/geo/2d.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
-* Copyright (C) 2012 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-namespace mongo {
- // We need cmdObj and parsedArgs so we can print a useful error msg
- // and pull other args out.
- bool run2DGeoNear(const IndexDetails &id, const BSONObj& cmdObj,
- const GeoNearArguments &parsedArgs, string& errmsg,
- BSONObjBuilder& result);
-} // namespace mongo
diff --git a/src/mongo/db/geo/haystack.cpp b/src/mongo/db/geo/haystack.cpp
index 6088d7c8f1e..84bb4daf4d2 100644
--- a/src/mongo/db/geo/haystack.cpp
+++ b/src/mongo/db/geo/haystack.cpp
@@ -1,8 +1,5 @@
-// XXX THIS FILE IS DEPRECATED. PLEASE DON'T MODIFY.
-// db/geo/haystack.cpp
-
/**
- * Copyright (C) 2008-2012 10gen Inc.
+ * Copyright (C) 2008-2013 10gen Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
@@ -17,8 +14,6 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#include "pch.h"
-
#include <vector>
#include "mongo/db/auth/action_set.h"
@@ -28,6 +23,7 @@
#include "mongo/db/index/haystack_access_method.h"
#include "mongo/db/index/index_descriptor.h"
#include "mongo/db/index/index_access_method.h"
+#include "mongo/db/index_names.h"
#include "mongo/db/namespace-inl.h"
#include "mongo/db/jsobj.h"
#include "mongo/db/index.h"
@@ -42,268 +38,14 @@
#include "mongo/util/timer.h"
/**
- * Provides the geoHaystack index type and the command "geoSearch."
* Examines all documents in a given radius of a given point.
* Returns all documents that match a given search restriction.
* See http://dochub.mongodb.org/core/haystackindexes
*
* Use when you want to look for restaurants within 25 miles with a certain name.
- * Don't use when you want to find the closest open restaurants; see 2d.cpp for that.
+ * Don't use when you want to find the closest open restaurants.
*/
namespace mongo {
- static const string GEOSEARCHNAME = "geoHaystack";
-
- class GeoHaystackSearchHopper {
- public:
- /**
- * Constructed with a point, a max distance from that point, and a max number of
- * matched points to store.
- * @param n The centroid that we're searching
- * @param maxDistance The maximum distance to consider from that point
- * @param limit The maximum number of results to return
- * @param geoField Which field in the provided DiskLoc has the point to test.
- */
- GeoHaystackSearchHopper(const BSONObj& nearObj, double maxDistance, unsigned limit,
- const string& geoField)
- : _near(nearObj), _maxDistance(maxDistance), _limit(limit), _geoField(geoField) { }
-
- // Consider the point in loc, and keep it if it's within _maxDistance (and we have space for
- // it)
- void consider(const DiskLoc& loc) {
- if (limitReached()) return;
- Point p(loc.obj().getFieldDotted(_geoField));
- if (distance(_near, p) > _maxDistance)
- return;
- _locs.push_back(loc);
- }
-
- int appendResultsTo(BSONArrayBuilder* b) {
- for (unsigned i = 0; i <_locs.size(); i++)
- b->append(_locs[i].obj());
- return _locs.size();
- }
-
- // Have we stored as many points as we can?
- const bool limitReached() const {
- return _locs.size() >= _limit;
- }
- private:
- Point _near;
- double _maxDistance;
- unsigned _limit;
- const string _geoField;
- vector<DiskLoc> _locs;
- };
-
- /**
- * Provides the IndexType for geoSearch.
- * Maps (lat, lng) to the bucketSize-sided square bucket that contains it.
- * Usage:
- * db.foo.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 })
- * pos is the name of the field to be indexed that has lat/lng data in an array.
- * type is the name of the secondary field to be indexed.
- * bucketSize specifies the dimension of the square bucket for the data in pos.
- * ALL fields are mandatory.
- */
- class GeoHaystackSearchIndex : public IndexType {
- public:
- GeoHaystackSearchIndex(const IndexPlugin* plugin, const IndexSpec* spec)
- : IndexType(plugin, spec) {
-
- BSONElement e = spec->info["bucketSize"];
- uassert(13321, "need bucketSize", e.isNumber());
- _bucketSize = e.numberDouble();
- uassert(16455, "bucketSize cannot be zero", _bucketSize != 0.0);
-
- // Example:
- // db.foo.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 })
- BSONObjIterator i(spec->keyPattern);
- while (i.more()) {
- BSONElement e = i.next();
- if (e.type() == String && GEOSEARCHNAME == e.valuestr()) {
- uassert(13314, "can't have more than one geo field", _geoField.size() == 0);
- uassert(13315, "the geo field has to be first in index",
- _otherFields.size() == 0);
- _geoField = e.fieldName();
- } else {
- // TODO(hk): Do we want to do any checking on e.type and e.valuestr?
- uassert(13326, "geoSearch can only have 1 non-geo field for now",
- _otherFields.size() == 0);
- _otherFields.push_back(e.fieldName());
- }
- }
-
- uassert(13316, "no geo field specified", _geoField.size());
- // XXX: Fix documentation that says the other field is optional; code says it's mandatory.
- uassert(13317, "no non-geo fields specified", _otherFields.size());
- }
-
- void getKeys(const BSONObj &obj, BSONObjSet &keys) const {
- BSONElement loc = obj.getFieldDotted(_geoField);
- if (loc.eoo())
- return;
-
- uassert(13323, "latlng not an array", loc.isABSONObj());
- string root;
- {
- BSONObjIterator i(loc.Obj());
- BSONElement x = i.next();
- BSONElement y = i.next();
- root = makeString(hash(x), hash(y));
- }
-
- verify(_otherFields.size() == 1);
-
- BSONElementSet all;
-
- // This is getFieldsDotted (plural not singular) since the object we're indexing
- // may be an array.
- obj.getFieldsDotted(_otherFields[0], all);
-
- if (all.size() == 0) {
- // We're indexing a document that doesn't have the secondary non-geo field present.
- // XXX: do we want to add this even if all.size() > 0? result:empty search terms
- // match everything instead of only things w/empty search terms)
- addKey(root, BSONElement(), keys);
- } else {
- // Ex:If our secondary field is type: "foo" or type: {a:"foo", b:"bar"},
- // all.size()==1. We can query on the complete field.
- // Ex: If our secondary field is type: ["A", "B"] all.size()==2 and all has values
- // "A" and "B". The query looks for any of the fields in the array.
- for (BSONElementSet::iterator i = all.begin(); i != all.end(); ++i) {
- addKey(root, *i, keys);
- }
- }
- }
-
- void searchCommand(NamespaceDetails* nsd,
- const BSONObj& n /*near*/, double maxDistance, const BSONObj& search,
- BSONObjBuilder& result, unsigned limit) {
- Timer t;
-
- LOG(1) << "SEARCH near:" << n << " maxDistance:" << maxDistance
- << " search: " << search << endl;
- int x, y;
- {
- BSONObjIterator i(n);
- x = hash(i.next());
- y = hash(i.next());
- }
- int scale = static_cast<int>(ceil(maxDistance / _bucketSize));
-
- GeoHaystackSearchHopper hopper(n, maxDistance, limit, _geoField);
-
- long long btreeMatches = 0;
-
- // TODO(hk): Consider starting with a (or b)=0, then going to a=+-1, then a=+-2, etc.
- // Would want a HaystackKeyIterator or similar for this, but it'd be a nice
- // encapsulation allowing us to S2-ify this trivially/abstract the key details.
- for (int a = -scale; a <= scale && !hopper.limitReached(); ++a) {
- for (int b = -scale; b <= scale && !hopper.limitReached(); ++b) {
- BSONObjBuilder bb;
- bb.append("", makeString(x + a, y + b));
-
- for (unsigned i = 0; i < _otherFields.size(); i++) {
- // See if the non-geo field we're indexing on is in the provided search term.
- BSONElement e = search.getFieldDotted(_otherFields[i]);
- if (e.eoo())
- bb.appendNull("");
- else
- bb.appendAs(e, "");
- }
-
- BSONObj key = bb.obj();
-
- GEOQUADDEBUG("KEY: " << key);
-
- // TODO(hk): this keeps a set of all DiskLoc seen in this pass so that we don't
- // consider the element twice. Do we want to instead store a hash of the set?
- // Is this often big?
- set<DiskLoc> thisPass;
-
- // Lookup from key to key, inclusive.
- scoped_ptr<BtreeCursor> cursor(BtreeCursor::make(nsd,
- *getDetails(),
- key,
- key,
- true,
- 1));
- while (cursor->ok() && !hopper.limitReached()) {
- pair<set<DiskLoc>::iterator, bool> p = thisPass.insert(cursor->currLoc());
- // If a new element was inserted (haven't seen the DiskLoc before), p.second
- // is true.
- if (p.second) {
- hopper.consider(cursor->currLoc());
- GEOQUADDEBUG("\t" << cursor->current());
- btreeMatches++;
- }
- cursor->advance();
- }
- }
- }
-
- BSONArrayBuilder arr(result.subarrayStart("results"));
- int num = hopper.appendResultsTo(&arr);
- arr.done();
-
- {
- BSONObjBuilder b(result.subobjStart("stats"));
- b.append("time", t.millis());
- b.appendNumber("btreeMatches", btreeMatches);
- b.append("n", num);
- b.done();
- }
- }
-
- const IndexDetails* getDetails() const {
- return _spec->getDetails();
- }
- private:
- // TODO(hk): consider moving hash/unhash/makeString out
- int hash(const BSONElement& e) const {
- uassert(13322, "geo field is not a number", e.isNumber());
- return hash(e.numberDouble());
- }
-
- int hash(double d) const {
- d += 180;
- d /= _bucketSize;
- return static_cast<int>(d);
- }
-
- string makeString(int hashedX, int hashedY) const {
- stringstream ss;
- ss << hashedX << "_" << hashedY;
- return ss.str();
- }
-
- // Build a new BSONObj with root in it. If e is non-empty, append that to the key. Insert
- // the BSONObj into keys.
- void addKey(const string& root, const BSONElement& e, BSONObjSet& keys) const {
- BSONObjBuilder buf;
- buf.append("", root);
-
- if (e.eoo())
- buf.appendNull("");
- else
- buf.appendAs(e, "");
-
- keys.insert(buf.obj());
- }
-
- string _geoField;
- vector<string> _otherFields;
- double _bucketSize;
- };
-
- class GeoHaystackSearchIndexPlugin : public IndexPlugin {
- public:
- GeoHaystackSearchIndexPlugin() : IndexPlugin(GEOSEARCHNAME) { }
-
- virtual IndexType* generate(const IndexSpec* spec) const {
- return new GeoHaystackSearchIndex(this, spec);
- }
- } nameIndexPlugin;
class GeoHaystackSearchCommand : public Command {
public:
@@ -330,7 +72,7 @@ namespace mongo {
}
vector<int> idxs;
- nsd->findIndexByType(GEOSEARCHNAME, idxs);
+ nsd->findIndexByType(IndexNames::GEO_HAYSTACK, idxs);
if (idxs.size() == 0) {
errmsg = "no geoSearch index";
return false;
@@ -360,4 +102,5 @@ namespace mongo {
return 1;
}
} nameSearchCommand;
-}
+
+} // namespace mongo
diff --git a/src/mongo/db/geo/s2index.cpp b/src/mongo/db/geo/s2index.cpp
deleted file mode 100644
index 568a8db6145..00000000000
--- a/src/mongo/db/geo/s2index.cpp
+++ /dev/null
@@ -1,278 +0,0 @@
-// XXX THIS FILE IS DEPRECATED. PLEASE DON'T MODIFY.
-/**
-* Copyright (C) 2012 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "mongo/db/namespace-inl.h"
-#include "mongo/db/client.h"
-#include "mongo/db/curop.h"
-#include "mongo/db/jsobj.h"
-#include "mongo/db/index.h"
-#include "mongo/db/queryutil.h"
-#include "mongo/db/geo/geonear.h"
-#include "mongo/db/geo/geoparser.h"
-#include "mongo/db/geo/geoquery.h"
-#include "mongo/db/index/s2_common.h"
-#include "third_party/s2/s2.h"
-#include "third_party/s2/s2cell.h"
-#include "third_party/s2/s2polygon.h"
-#include "third_party/s2/s2polyline.h"
-#include "third_party/s2/s2regioncoverer.h"
-
-namespace {
- // Used in a handful of places in GeoSphere2DType below.
- static void keysFromRegion(S2RegionCoverer *coverer, const S2Region &region,
- vector<string> *out) {
- vector<S2CellId> covering;
- coverer->GetCovering(region, &covering);
- for (size_t i = 0; i < covering.size(); ++i) {
- out->push_back(covering[i].toString());
- }
- }
-} // namespace
-
-namespace mongo {
- class S2IndexType : public IndexType {
- public:
- // We keep track of what fields we've indexed and if they're geo or not.
- struct IndexedField {
- enum Type {
- GEO,
- LITERAL
- };
-
- Type type;
- string name;
- IndexedField(Type t, const string& n) : type(t), name(n) { }
- };
-
- S2IndexType(const string& geoIdxName, const IndexPlugin *plugin, const IndexSpec *spec,
- const S2IndexingParams &params) : IndexType(plugin, spec), _params(params) {
- int geoFields = 0;
- // Categorize the fields we're indexing and make sure we have a geo field.
- BSONObjIterator i(spec->keyPattern);
- while (i.more()) {
- BSONElement e = i.next();
- if (e.type() == String && geoIdxName == e.valuestr()) {
- _fields.push_back(IndexedField(IndexedField::GEO, e.fieldName()));
- ++geoFields;
- } else {
- _fields.push_back(IndexedField(IndexedField::LITERAL, e.fieldName()));
- }
- }
- uassert(16450, "Expect at least one geo field, spec=" + spec->keyPattern.toString(),
- geoFields >= 1);
- }
-
- virtual ~S2IndexType() { }
-
- void getKeys(const BSONObj& obj, BSONObjSet& keys) const {
- verify(_fields.size() >= 1);
-
- BSONObjSet keysToAdd;
- // We output keys in the same order as the fields we index.
- for (size_t i = 0; i < _fields.size(); ++i) {
- const IndexedField &field = _fields[i];
-
- // First, we get the keys that this field adds. Either they're added literally from
- // the value of the field, or they're transformed if the field is geo.
- BSONElementSet fieldElements;
- // false means Don't expand the last array, duh.
- obj.getFieldsDotted(field.name, fieldElements, false);
-
- BSONObjSet keysForThisField;
- if (IndexedField::GEO == field.type) {
- getGeoKeys(fieldElements, &keysForThisField);
- } else if (IndexedField::LITERAL == field.type) {
- getLiteralKeys(fieldElements, &keysForThisField);
- } else {
- verify(0);
- }
-
- // We expect there to be _spec->_missingField() present in the keys if data is
- // missing. So, this should be non-empty.
- verify(!keysForThisField.empty());
-
- // We take the Cartesian product of all of the keys. This requires that we have
- // some keys to take the Cartesian product with. If keysToAdd.empty(), we
- // initialize it.
- if (keysToAdd.empty()) {
- keysToAdd = keysForThisField;
- continue;
- }
-
- BSONObjSet updatedKeysToAdd;
- for (BSONObjSet::const_iterator it = keysToAdd.begin(); it != keysToAdd.end();
- ++it) {
- for (BSONObjSet::const_iterator newIt = keysForThisField.begin();
- newIt!= keysForThisField.end(); ++newIt) {
- BSONObjBuilder b;
- b.appendElements(*it);
- b.append(newIt->firstElement());
- updatedKeysToAdd.insert(b.obj());
- }
- }
- keysToAdd = updatedKeysToAdd;
- }
-
- if (keysToAdd.size() > _params.maxKeysPerInsert) {
- warning() << "insert of geo object generated lots of keys (" << keysToAdd.size()
- << ") consider creating larger buckets. obj="
- << obj;
- }
-
- for (BSONObjSet::const_iterator it = keysToAdd.begin(); it != keysToAdd.end(); ++it) {
- keys.insert(*it);
- }
- }
-
- const IndexDetails* getDetails() const { return _spec->getDetails(); }
-
- // These are used by the geoNear command. geoNear constructs its own cursor.
- const S2IndexingParams& getParams() const { return _params; }
- void getGeoFieldNames(vector<string> *out) const {
- for (size_t i = 0; i < _fields.size(); ++i) {
- if (IndexedField::GEO == _fields[i].type) {
- out->push_back(_fields[i].name);
- }
- }
- }
- private:
- // Get the index keys for elements that are GeoJSON.
- void getGeoKeys(const BSONElementSet &elements, BSONObjSet *out) const {
- S2RegionCoverer coverer;
- _params.configureCoverer(&coverer);
-
- // See here for GeoJSON format: geojson.org/geojson-spec.html
- for (BSONElementSet::iterator i = elements.begin(); i != elements.end(); ++i) {
- uassert(16700, "Can't parse geometry from element: " + i->toString(),
- i->isABSONObj());
- const BSONObj &obj = i->Obj();
-
- vector<string> cells;
- S2Polyline line;
- S2Cell point;
- // We only support GeoJSON polygons. Why?:
- // 1. we don't automagically do WGS84/flat -> WGS84, and
- // 2. the old polygon format must die.
- if (GeoParser::isGeoJSONPolygon(obj)) {
- S2Polygon polygon;
- GeoParser::parseGeoJSONPolygon(obj, &polygon);
- keysFromRegion(&coverer, polygon, &cells);
- } else if (GeoParser::parseLineString(obj, &line)) {
- keysFromRegion(&coverer, line, &cells);
- } else if (GeoParser::parsePoint(obj, &point)) {
- S2CellId parent(point.id().parent(_params.finestIndexedLevel));
- cells.push_back(parent.toString());
- } else {
- uasserted(16572, "Can't extract geo keys from object, malformed geometry?:"
- + obj.toString());
- }
- uassert(16673, "Unable to generate keys for (likely malformed) geometry: "
- + obj.toString(),
- cells.size() > 0);
-
- for (vector<string>::const_iterator it = cells.begin(); it != cells.end(); ++it) {
- BSONObjBuilder b;
- b.append("", *it);
- out->insert(b.obj());
- }
- }
-
- if (0 == out->size()) {
- BSONObjBuilder b;
- b.appendNull("");
- out->insert(b.obj());
- }
- }
-
- void getLiteralKeysArray(BSONObj obj, BSONObjSet *out) const {
- BSONObjIterator objIt(obj);
- if (!objIt.more()) {
- // Empty arrays are indexed as undefined.
- BSONObjBuilder b;
- b.appendUndefined("");
- out->insert(b.obj());
- } else {
- // Non-empty arrays are exploded.
- while (objIt.more()) {
- BSONObjBuilder b;
- b.appendAs(objIt.next(), "");
- out->insert(b.obj());
- }
- }
- }
-
- void getOneLiteralKey(BSONElement elt, BSONObjSet *out) const {
- if (Array == elt.type()) {
- getLiteralKeysArray(elt.Obj(), out);
- } else {
- // One thing, not an array, index as-is.
- BSONObjBuilder b;
- b.appendAs(elt, "");
- out->insert(b.obj());
- }
- }
-
- // elements is a non-geo field. Add the values literally, expanding arrays.
- void getLiteralKeys(const BSONElementSet &elements, BSONObjSet *out) const {
- if (0 == elements.size()) {
- // Missing fields are indexed as null.
- BSONObjBuilder b;
- b.appendNull("");
- out->insert(b.obj());
- } else {
- for (BSONElementSet::iterator i = elements.begin(); i != elements.end(); ++i) {
- getOneLiteralKey(*i, out);
- }
- }
- }
-
- vector<IndexedField> _fields;
- S2IndexingParams _params;
- };
-
- static const string SPHERE_2D_NAME = "2dsphere";
- class S2IndexPlugin : public IndexPlugin {
- public:
- S2IndexPlugin() : IndexPlugin(SPHERE_2D_NAME) { }
-
- virtual IndexType* generate(const IndexSpec* spec) const {
- S2IndexingParams params;
- params.maxKeysPerInsert = 200;
- // This is advisory.
- params.maxCellsInCovering = 50;
- // Near distances are specified in meters...sometimes.
- params.radius = S2IndexingParams::kRadiusOfEarthInMeters;
- // These are not advisory.
- params.finestIndexedLevel = configValueWithDefault(spec, "finestIndexedLevel",
- S2::kAvgEdge.GetClosestLevel(500.0 / params.radius));
- params.coarsestIndexedLevel = configValueWithDefault(spec, "coarsestIndexedLevel",
- S2::kAvgEdge.GetClosestLevel(100 * 1000.0 / params.radius));
- uassert(16687, "coarsestIndexedLevel must be >= 0", params.coarsestIndexedLevel >= 0);
- uassert(16688, "finestIndexedLevel must be <= 30", params.finestIndexedLevel <= 30);
- uassert(16689, "finestIndexedLevel must be >= coarsestIndexedLevel",
- params.finestIndexedLevel >= params.coarsestIndexedLevel);
- return new S2IndexType(SPHERE_2D_NAME, this, spec, params);
- }
-
- int configValueWithDefault(const IndexSpec* spec, const string& name, int def) const {
- BSONElement e = spec->info[name];
- if (e.isNumber()) { return e.numberInt(); }
- return def;
- }
- } S2IndexPluginS2D;
-
-} // namespace mongo
diff --git a/src/mongo/db/hashindex.cpp b/src/mongo/db/hashindex.cpp
deleted file mode 100644
index a658ba1467f..00000000000
--- a/src/mongo/db/hashindex.cpp
+++ /dev/null
@@ -1,112 +0,0 @@
-// XXX THIS FILE IS DEPRECATED. PLEASE DON'T MODIFY.
-
-// mongo/db/hashindex.cpp
-
-/**
-* Copyright (C) 2012 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "mongo/db/hashindex.h"
-
-#include "mongo/db/btreecursor.h"
-#include "mongo/db/json.h"
-#include "mongo/db/queryutil.h"
-
-namespace mongo {
-
- const string HashedIndexType::HASHED_INDEX_TYPE_IDENTIFIER = "hashed";
-
- HashedIndexType::HashedIndexType( const IndexPlugin* plugin , const IndexSpec* spec ) :
- IndexType( plugin , spec ) , _keyPattern( spec->keyPattern ) {
-
- //change these if single-field limitation lifted later
- uassert( 16241 , "Currently only single field hashed index supported." ,
- _keyPattern.toBSON().nFields() == 1 );
- uassert( 16242 , "Currently hashed indexes cannot guarantee uniqueness. Use a regular index." ,
- ! (spec->info).getField("unique").booleanSafe() );
-
- //Default _seed to 0 if "seed" is not included in the index spec
- //or if the value of "seed" is not a number
- _seed = (spec->info).getField("seed").numberInt();
-
- //Default _isSparse to false if "sparse" is not included in the index spec
- //or if the value of "sparse" is not a boolean
- _isSparse = (spec->info).getField("sparse").booleanSafe();
-
- //In case we have hashed indexes based on other hash functions in
- //the future, we store a hashVersion number. If hashVersion changes,
- // "makeSingleKey" will need to change accordingly.
- //Defaults to 0 if "hashVersion" is not included in the index spec
- //or if the value of "hashversion" is not a number
- _hashVersion = (spec->info).getField("hashVersion").numberInt();
-
- //Get the hashfield name
- BSONElement firstElt = spec->keyPattern.firstElement();
- massert( 16243 , "error: no hashed index field" ,
- firstElt.str().compare( HASHED_INDEX_TYPE_IDENTIFIER ) == 0 );
- _hashedField = firstElt.fieldName();
-
- // Explicit null valued fields and missing fields are both represented in hashed indexes
- // using the hash value of the null BSONElement. This is partly for historical reasons
- // (hash of null was used in the initial release of hashed indexes and changing would alter
- // the data format). Additionally, in certain places the hashed index code and the index
- // bound calculation code assume null and missing are indexed identically.
- BSONObj nullObj = BSON( "" << BSONNULL );
- _missingKey = BSON( "" << makeSingleKey( nullObj.firstElement(), _seed, _hashVersion ) );
- }
-
- HashedIndexType::~HashedIndexType() { }
-
- void HashedIndexType::getKeys( const BSONObj &obj, BSONObjSet &keys ) const {
- string hashedFieldCopy = string( _hashedField );
- const char* hashedFieldCopyPtr = hashedFieldCopy.c_str();
- BSONElement fieldVal = obj.getFieldDottedOrArray( hashedFieldCopyPtr );
-
- uassert( 16244 , "Error: hashed indexes do not currently support array values" , fieldVal.type() != Array );
-
- if ( ! fieldVal.eoo() ) {
- BSONObj key = BSON( "" << makeSingleKey( fieldVal , _seed , _hashVersion ) );
- keys.insert( key );
- }
- else if (! _isSparse ) {
- keys.insert( _missingKey.copy() );
- }
- }
-
- /* This class registers HASHED_INDEX_NAME in a global map of special index types
- * Using this pattern, any index with the pattern, {fieldname : HASHED_INDEX_NAME}
- * will be recognized as a HashedIndexType and the associated methods will be used.
- */
- class HashedIndexPlugin : public IndexPlugin {
- public:
-
- HashedIndexPlugin() : IndexPlugin( HashedIndexType::HASHED_INDEX_TYPE_IDENTIFIER ) {}
-
- virtual IndexType* generate( const IndexSpec* spec ) const {
- return new HashedIndexType( this , spec );
- }
-
- } hashedIndexPlugin;
-
-
- long long int HashedIndexType::makeSingleKey( const BSONElement& e ,
- HashSeed seed ,
- HashVersion v ) {
- massert( 16245 , "Only HashVersion 0 has been defined" , v == 0 );
- return BSONElementHasher::hash64( e , seed );
- }
-
-}
-
diff --git a/src/mongo/db/hashindex.h b/src/mongo/db/hashindex.h
deleted file mode 100644
index f7bfa4b4552..00000000000
--- a/src/mongo/db/hashindex.h
+++ /dev/null
@@ -1,105 +0,0 @@
-// XXX THIS FILE IS DEPRECATED. PLEASE DON'T MODIFY WITHOUT TALKING TO HK
-
-// hashindex.h
-
-/**
-* Copyright (C) 2012 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*/
-
-#include "mongo/db/hasher.h"
-#include "mongo/db/index.h"
-#include "mongo/db/keypattern.h"
-#include "mongo/db/matcher.h"
-#include "mongo/db/namespace-inl.h"
-#include "mongo/db/pdfile.h"
-
-namespace mongo {
-
- /* This is an index where the keys are hashes of a given field.
- *
- * Optional arguments:
- * "seed" : int (default = 0, a seed for the hash function)
- * "hashVersion : int (default = 0, determines which hash function to use)
- *
- * Example use in the mongo shell:
- * > db.foo.ensureIndex({a : "hashed"}, {seed : 3, hashVersion : 0})
- *
- * LIMITATION: Only works with a single field. The HashedIndexType
- * constructor uses uassert to ensure that the spec has the form
- * {<fieldname> : "hashed"}, and not, for example,
- * { a : "hashed" , b : 1}
- *
- * LIMITATION: Cannot be used as a unique index.
- * The HashedIndexType constructor uses uassert to ensure that
- * the spec does not contain {"unique" : true}
- *
- * LIMITATION: Cannot be used to index arrays.
- * The getKeys function uasserts that value being inserted
- * is not an array. This index will not be built if any
- * array values of the hashed field exist.
- *
- */
- class HashedIndexType : public IndexType{
- public:
-
- static const string HASHED_INDEX_TYPE_IDENTIFIER;
- typedef int HashVersion;
-
- /* Creates a new HashedIndex around a HashedIndexPlugin
- * and an IndexSpec. New HashedIndexTypes are created via
- * a factory method in the HashedIndexPlugin class.
- */
- HashedIndexType( const IndexPlugin* plugin , const IndexSpec* spec );
- virtual ~HashedIndexType();
-
- /* The input is "obj" which should have a field corresponding to the hashedfield.
- * The output is a BSONObj with a single BSONElement whose value is the hash
- * Eg if this is an index on "a" we have
- * obj is {a : 45} --> key becomes {"" : hash(45) }
- *
- * Limitation: arrays values are not currently supported. This function uasserts
- * that the value is not an array, and errors out in that case.
- */
- void getKeys( const BSONObj &obj, BSONObjSet &keys ) const;
-
- /* A field missing from a document is represented by the hash value of a null BSONElement.
- */
- BSONElement missingField() const { return _missingKey.firstElement(); }
-
- /* Takes a BSONElement, seed and hashVersion, and outputs the
- * 64-bit hash used for this index
- * E.g. if the element is {a : 3} this outputs v1-hash(3)
- * */
- static long long int makeSingleKey( const BSONElement& e ,
- HashSeed seed ,
- HashVersion v = 0 );
-
- /* Since the keys for this index are hashes, documents are not stored in order,
- * thus we will need to perform scanAndOrder whenever the "order" is non-empty.
- */
- bool scanAndOrderRequired( const BSONObj& query , const BSONObj& order ) const {
- return ! order.isEmpty();
- }
-
- private:
- string _hashedField;
- KeyPattern _keyPattern;
- HashSeed _seed; //defaults to zero if not in the IndexSpec
- HashVersion _hashVersion; //defaults to zero if not in the IndexSpec
- bool _isSparse;
- BSONObj _missingKey;
- };
-
-}
diff --git a/src/mongo/db/index.cpp b/src/mongo/db/index.cpp
index c5ff0cb80e0..3a9c2b1fa9f 100644
--- a/src/mongo/db/index.cpp
+++ b/src/mongo/db/index.cpp
@@ -26,6 +26,8 @@
#include "mongo/db/auth/authorization_manager.h"
#include "mongo/db/background.h"
#include "mongo/db/btree.h"
+#include "mongo/db/fts/fts_enabled.h"
+#include "mongo/db/fts/fts_spec.h"
#include "mongo/db/index/index_access_method.h"
#include "mongo/db/index/index_cursor.h"
#include "mongo/db/index/index_descriptor.h"
@@ -128,7 +130,7 @@ namespace mongo {
}
static bool needToUpgradeMinorVersion(const string& newPluginName) {
- if (IndexPlugin::existedBefore24(newPluginName))
+ if (IndexNames::existedBefore24(newPluginName))
return false;
DataFileHeader* dfh = cc().database()->getFile(0)->getHeader();
@@ -146,8 +148,8 @@ namespace mongo {
for ( ; cursor && cursor->ok(); cursor->advance()) {
const BSONObj index = cursor->current();
const BSONObj key = index.getObjectField("key");
- const string plugin = IndexPlugin::findPluginName(key);
- if (IndexPlugin::existedBefore24(plugin))
+ const string plugin = KeyPattern::findPluginName(key);
+ if (IndexNames::existedBefore24(plugin))
continue;
const string errmsg = str::stream()
@@ -243,13 +245,11 @@ namespace mongo {
return false;
}
- string pluginName = IndexPlugin::findPluginName( key );
- IndexPlugin * plugin = NULL;
+ string pluginName = KeyPattern::findPluginName( key );
if (pluginName.size()) {
- plugin = IndexPlugin::get(pluginName);
uassert(16734, str::stream() << "Unknown index plugin '" << pluginName << "' "
- << "in index "<< key
- , plugin);
+ << "in index "<< key,
+ IndexNames::isKnownName(pluginName));
if (needToUpgradeMinorVersion(pluginName))
upgradeMinorVersionOrAssert(pluginName);
@@ -257,8 +257,15 @@ namespace mongo {
{
BSONObj o = io;
- if ( plugin ) {
- o = plugin->adjustIndexSpec(o);
+ if (IndexNames::TEXT == pluginName || IndexNames::TEXT_INTERNAL == pluginName) {
+ StringData desc = cc().desc();
+ if ( desc.find( "conn" ) == 0 ) {
+ // this is to make sure we only complain for users
+ // if you do get a text index created an a primary
+ // want it to index on the secondary as well
+ massert(16808, "text search not enabled", fts::isTextSearchEnabled() );
+ }
+ o = fts::FTSSpec::fixSpec(o);
}
BSONObjBuilder b;
int v = DefaultIndexVersionNumber;
@@ -295,23 +302,17 @@ namespace mongo {
}
void IndexSpec::reset(const IndexDetails * details) {
- const DataFileHeader* dfh = cc().database()->getFile(0)->getHeader();
- IndexSpec::PluginRules rules = dfh->versionMinor == PDFILE_VERSION_MINOR_24_AND_NEWER
- ? IndexSpec::RulesFor24
- : IndexSpec::RulesFor22
- ;
-
_details = details;
- reset(details->info, rules);
+ reset(details->info);
}
- void IndexSpec::reset(const BSONObj& _info, PluginRules rules) {
+ void IndexSpec::reset(const BSONObj& _info) {
info = _info;
keyPattern = info["key"].embeddedObjectUserCheck();
if ( keyPattern.objsize() == 0 ) {
out() << info.toString() << endl;
verify(false);
}
- _init(rules);
+ _init();
}
}
diff --git a/src/mongo/db/index/2d_index_cursor.cpp b/src/mongo/db/index/2d_index_cursor.cpp
index 1c63db7ab46..48d7bf72c64 100644
--- a/src/mongo/db/index/2d_index_cursor.cpp
+++ b/src/mongo/db/index/2d_index_cursor.cpp
@@ -322,7 +322,7 @@ namespace mongo {
scoped_ptr<BtreeCursor> _cursor;
scoped_ptr<FieldRangeSet> _frs;
- scoped_ptr<IndexSpec> _spec;
+ BSONObj _keyPattern;
BSONObj key() { return _cursor->currKey(); }
@@ -390,11 +390,11 @@ namespace mongo {
}
BSONObj iSpec = bob.obj();
- min._spec.reset(new IndexSpec(iSpec));
- max._spec.reset(new IndexSpec(iSpec));
+ min._keyPattern = iSpec;
+ max._keyPattern = iSpec;
- shared_ptr<FieldRangeVector> frvMin(new FieldRangeVector(*min._frs, *min._spec, -1));
- shared_ptr<FieldRangeVector> frvMax(new FieldRangeVector(*max._frs, *max._spec, 1));
+ shared_ptr<FieldRangeVector> frvMin(new FieldRangeVector(*min._frs, min._keyPattern, -1));
+ shared_ptr<FieldRangeVector> frvMax(new FieldRangeVector(*max._frs, max._keyPattern, 1));
min._cursor.reset(BtreeCursor::make(nsdetails(descriptor->parentNS()),
descriptor->getOnDisk(), frvMin, 0, -1));
diff --git a/src/mongo/db/index/catalog_hack.h b/src/mongo/db/index/catalog_hack.h
index 17eaacf78e5..2b306b75462 100644
--- a/src/mongo/db/index/catalog_hack.h
+++ b/src/mongo/db/index/catalog_hack.h
@@ -27,6 +27,7 @@
#include "mongo/db/index/s2_access_method.h"
#include "mongo/db/index_names.h"
#include "mongo/db/keypattern.h"
+#include "mongo/db/pdfile.h"
namespace mongo {
@@ -35,12 +36,61 @@ namespace mongo {
*/
class CatalogHack {
public:
+ static bool shouldOverridePlugin(const BSONObj& keyPattern) {
+ string pluginName = KeyPattern::findPluginName(keyPattern);
+ bool known = IndexNames::isKnownName(pluginName);
+
+ if (NULL == cc().database()) {
+ return false;
+ }
+
+ const DataFileHeader* dfh = cc().database()->getFile(0)->getHeader();
+
+ if (dfh->versionMinor == PDFILE_VERSION_MINOR_24_AND_NEWER) {
+ // RulesFor24
+ // This assert will be triggered when downgrading from a future version that
+ // supports an index plugin unsupported by this version.
+ uassert(16736, str::stream() << "Invalid index type '" << pluginName << "' "
+ << "in index " << keyPattern,
+ known);
+ return false;
+ } else {
+ // RulesFor22
+ if (!known) {
+ log() << "warning: can't find plugin [" << pluginName << "]" << endl;
+ return true;
+ }
+
+ if (!IndexNames::existedBefore24(pluginName)) {
+ warning() << "Treating index " << keyPattern << " as ascending since "
+ << "it was created before 2.4 and '" << pluginName << "' "
+ << "was not a valid type at that time."
+ << endl;
+ return true;
+ } else {
+ return false;
+ }
+ }
+ }
+
+ static string findPluginName(const BSONObj& keyPattern) {
+ if (shouldOverridePlugin(keyPattern)) {
+ return "";
+ } else {
+ return KeyPattern::findPluginName(keyPattern);
+ }
+ }
+
static IndexDescriptor* getDescriptor(NamespaceDetails* nsd, int idxNo) {
IndexDetails& id = nsd->idx(idxNo);
return new IndexDescriptor(nsd, idxNo, &id, id.info.obj());
}
static BtreeBasedAccessMethod* getBtreeBasedIndex(IndexDescriptor* desc) {
+ if (shouldOverridePlugin(desc->keyPattern())) {
+ return new BtreeAccessMethod(desc);
+ }
+
string type = KeyPattern::findPluginName(desc->keyPattern());
if (IndexNames::HASHED == type) {
return new HashAccessMethod(desc);
@@ -62,6 +112,10 @@ namespace mongo {
}
static IndexAccessMethod* getIndex(IndexDescriptor* desc) {
+ if (shouldOverridePlugin(desc->keyPattern())) {
+ return new BtreeAccessMethod(desc);
+ }
+
string type = KeyPattern::findPluginName(desc->keyPattern());
if (IndexNames::HASHED == type) {
return new HashAccessMethod(desc);
diff --git a/src/mongo/db/index/hash_access_method.cpp b/src/mongo/db/index/hash_access_method.cpp
index d547f7fce1b..13a966fecf0 100644
--- a/src/mongo/db/index/hash_access_method.cpp
+++ b/src/mongo/db/index/hash_access_method.cpp
@@ -28,6 +28,20 @@ namespace mongo {
return BSONElementHasher::hash64(e, seed);
}
+ BSONObj HashAccessMethod::getMissingField(const IndexDetails& details) {
+ BSONObj infoObj = details.info.obj();
+ int hashVersion = infoObj["hashVersion"].numberInt();
+ HashSeed seed = infoObj["seed"].numberInt();
+
+ // Explicit null valued fields and missing fields are both represented in hashed indexes
+ // using the hash value of the null BSONElement. This is partly for historical reasons
+ // (hash of null was used in the initial release of hashed indexes and changing would alter
+ // the data format). Additionally, in certain places the hashed index code and the index
+ // bound calculation code assume null and missing are indexed identically.
+ BSONObj nullObj = BSON("" << BSONNULL);
+ return BSON("" << makeSingleKey(nullObj.firstElement(), seed, hashVersion));
+ }
+
HashAccessMethod::HashAccessMethod(IndexDescriptor* descriptor)
: BtreeBasedAccessMethod(descriptor) {
@@ -56,11 +70,6 @@ namespace mongo {
firstElt.str().compare(HASHED_INDEX_TYPE_IDENTIFIER) == 0);
_hashedField = firstElt.fieldName();
- // Explicit null valued fields and missing fields are both represented in hashed indexes
- // using the hash value of the null BSONElement. This is partly for historical reasons
- // (hash of null was used in the initial release of hashed indexes and changing would alter
- // the data format). Additionally, in certain places the hashed index code and the index
- // bound calculation code assume null and missing are indexed identically.
BSONObj nullObj = BSON("" << BSONNULL);
_missingKey = BSON("" << makeSingleKey(nullObj.firstElement(), _seed, _hashVersion));
}
diff --git a/src/mongo/db/index/hash_access_method.h b/src/mongo/db/index/hash_access_method.h
index 5a49b69bfe7..19060cf3f96 100644
--- a/src/mongo/db/index/hash_access_method.h
+++ b/src/mongo/db/index/hash_access_method.h
@@ -43,6 +43,10 @@ namespace mongo {
return Status::OK();
}
+ // Our missing field is different than the default missing field, this needs to be
+ // exposed in s/d_split.cpp. That's the only thing that calls this.
+ static BSONObj getMissingField(const IndexDetails& details);
+
/**
* Hashing function used by both this class and the cursors we create.
*/
@@ -60,9 +64,6 @@ namespace mongo {
// _hashVersion defaults to zero.
int _hashVersion;
- // What key do we insert when the field is missing?
- // TODO: fix migration code to do the right thing.
- // TODO: see http://codereview.10gen.com/9497028/patch/3001/4007
BSONObj _missingKey;
};
diff --git a/src/mongo/db/index/hash_index_cursor.cpp b/src/mongo/db/index/hash_index_cursor.cpp
index 7909f5c843f..682ce9f8ca3 100644
--- a/src/mongo/db/index/hash_index_cursor.cpp
+++ b/src/mongo/db/index/hash_index_cursor.cpp
@@ -67,7 +67,6 @@ namespace mongo {
inObj.done();
BSONObj newQuery = newQueryBuilder.obj();
- // FieldRangeVector needs an IndexSpec so we make it one.
BSONObjBuilder specBuilder;
BSONObjIterator it(_descriptor->keyPattern());
while (it.more()) {
@@ -75,12 +74,11 @@ namespace mongo {
specBuilder.append(e.fieldName(), 1);
}
BSONObj spec = specBuilder.obj();
- IndexSpec specForFRV(spec);
//Use the point-intervals of the new query to create a Btree cursor
FieldRangeSet newfrs( "" , newQuery , true, true );
shared_ptr<FieldRangeVector> newVector(
- new FieldRangeVector( newfrs , specForFRV, 1 ) );
+ new FieldRangeVector( newfrs , spec, 1 ) );
_oldCursor.reset(
BtreeCursor::make(nsdetails(_descriptor->parentNS()),
diff --git a/src/mongo/db/index/s2_access_method.cpp b/src/mongo/db/index/s2_access_method.cpp
index 619775566c5..ca4fc60d2c8 100644
--- a/src/mongo/db/index/s2_access_method.cpp
+++ b/src/mongo/db/index/s2_access_method.cpp
@@ -110,7 +110,7 @@ namespace mongo {
getLiteralKeys(fieldElements, &keysForThisField);
}
- // We expect there to be _spec->_missingField() present in the keys if data is
+ // We expect there to be the missing field element present in the keys if data is
// missing. So, this should be non-empty.
verify(!keysForThisField.empty());
diff --git a/src/mongo/db/index/s2_near_cursor.cpp b/src/mongo/db/index/s2_near_cursor.cpp
index 7983ee4a915..780735e4b37 100644
--- a/src/mongo/db/index/s2_near_cursor.cpp
+++ b/src/mongo/db/index/s2_near_cursor.cpp
@@ -53,8 +53,7 @@ namespace mongo {
BSONElement e = specIt.next();
specBuilder.append(e.fieldName(), 1);
}
- BSONObj spec = specBuilder.obj();
- _specForFRV = IndexSpec(spec);
+ _specForFRV = specBuilder.obj();
specIt = BSONObjIterator(_descriptor->keyPattern());
while (specIt.more()) {
diff --git a/src/mongo/db/index/s2_near_cursor.h b/src/mongo/db/index/s2_near_cursor.h
index 860b92a3a16..5d922db18c1 100644
--- a/src/mongo/db/index/s2_near_cursor.h
+++ b/src/mongo/db/index/s2_near_cursor.h
@@ -117,7 +117,7 @@ namespace mongo {
S2IndexingParams _params;
// We also pass this to the FieldRangeVector ctor.
- IndexSpec _specForFRV;
+ BSONObj _specForFRV;
// Geo-related variables.
// What's the max distance (arc length) we're willing to look for results?
diff --git a/src/mongo/db/index/s2_simple_cursor.cpp b/src/mongo/db/index/s2_simple_cursor.cpp
index e6a6a4e9250..25c88aa8f3f 100644
--- a/src/mongo/db/index/s2_simple_cursor.cpp
+++ b/src/mongo/db/index/s2_simple_cursor.cpp
@@ -42,7 +42,6 @@ namespace mongo {
// false means we want to filter OUT geoFieldsToNuke, not filter to include only that.
_filteredQuery = query.filterFieldsUndotted(geoFieldsToNuke.obj(), false);
- // FieldRangeVector needs an IndexSpec so we make it one.
BSONObjBuilder specBuilder;
BSONObjIterator i(_descriptor->keyPattern());
while (i.more()) {
@@ -50,7 +49,6 @@ namespace mongo {
specBuilder.append(e.fieldName(), 1);
}
BSONObj spec = specBuilder.obj();
- IndexSpec specForFRV(spec);
BSONObj frsObj;
@@ -74,7 +72,7 @@ namespace mongo {
frsObj = frsObjBuilder.obj();
FieldRangeSet frs(_descriptor->parentNS().c_str(), frsObj, false, false);
- shared_ptr<FieldRangeVector> frv(new FieldRangeVector(frs, specForFRV, 1));
+ shared_ptr<FieldRangeVector> frv(new FieldRangeVector(frs, spec, 1));
_btreeCursor.reset(BtreeCursor::make(nsdetails(_descriptor->parentNS()),
_descriptor->getOnDisk(), frv, 0, 1));
next();
diff --git a/src/mongo/db/index_names.h b/src/mongo/db/index_names.h
index db16ab0067a..c2eef144ad9 100644
--- a/src/mongo/db/index_names.h
+++ b/src/mongo/db/index_names.h
@@ -34,6 +34,28 @@ namespace mongo {
static const string TEXT;
static const string TEXT_INTERNAL;
static const string HASHED;
+
+ /**
+ * True if is a regular (non-plugin) index or uses a plugin that existed before 2.4.
+ * These plugins are grandfathered in and allowed to exist in DBs with
+ * PDFILE_MINOR_VERSION_22_AND_OLDER
+ */
+ static bool existedBefore24(const string& name) {
+ return name.empty()
+ || name == IndexNames::GEO_2D
+ || name == IndexNames::GEO_HAYSTACK
+ || name == IndexNames::HASHED;
+ }
+
+ static bool isKnownName(const string& name) {
+ return name.empty()
+ || name == IndexNames::GEO_2D
+ || name == IndexNames::GEO_2DSPHERE
+ || name == IndexNames::GEO_HAYSTACK
+ || name == IndexNames::TEXT
+ || name == IndexNames::TEXT_INTERNAL
+ || name == IndexNames::HASHED;
+ }
};
} // namespace mongo
diff --git a/src/mongo/db/index_selection.cpp b/src/mongo/db/index_selection.cpp
index deaa130d255..431d51f8e0a 100644
--- a/src/mongo/db/index_selection.cpp
+++ b/src/mongo/db/index_selection.cpp
@@ -16,6 +16,7 @@
#include "mongo/db/index_selection.h"
+#include "mongo/db/index/catalog_hack.h"
#include "mongo/db/index_names.h"
#include "mongo/db/jsobj.h"
#include "mongo/db/keypattern.h"
@@ -27,68 +28,10 @@ namespace mongo {
const FieldRangeSet& queryConstraints,
const BSONObj& order) {
- string type = KeyPattern::findPluginName(keyPattern);
+ string type = CatalogHack::findPluginName(keyPattern);
BSONObj query = queryConstraints.originalQuery();
- if (IndexNames::HASHED == type) {
- /* This index is only considered "HELPFUL" for a query
- * if it's the union of at least one equality constraint on the
- * hashed field. Otherwise it's considered USELESS.
- * Example queries (supposing the indexKey is {a : "hashed"}):
- * {a : 3} HELPFUL
- * {a : 3 , b : 3} HELPFUL
- * {a : {$in : [3,4]}} HELPFUL
- * {a : {$gte : 3, $lte : 3}} HELPFUL
- * {} USELESS
- * {b : 3} USELESS
- * {a : {$gt : 3}} USELESS
- */
- BSONElement firstElt = keyPattern.firstElement();
- if (queryConstraints.isPointIntervalSet(firstElt.fieldName())) {
- return HELPFUL;
- } else {
- return USELESS;
- }
- } else if (IndexNames::GEO_2DSPHERE == type) {
- BSONObjIterator i(keyPattern);
- while (i.more()) {
- BSONElement ie = i.next();
-
- if (ie.type() != String || IndexNames::GEO_2DSPHERE != ie.valuestr()) {
- continue;
- }
-
- BSONElement e = query.getFieldDotted(ie.fieldName());
- // Some locations are given to us as arrays. Sigh.
- if (Array == e.type()) { return HELPFUL; }
- if (Object != e.type()) { continue; }
- // getGtLtOp is horribly misnamed and really means get the operation.
- switch (e.embeddedObject().firstElement().getGtLtOp()) {
- case BSONObj::opNEAR:
- return OPTIMAL;
- case BSONObj::opWITHIN: {
- BSONElement elt = e.embeddedObject().firstElement();
- if (Object != elt.type()) { continue; }
- const char* fname = elt.embeddedObject().firstElement().fieldName();
- if (mongoutils::str::equals("$geometry", fname)
- || mongoutils::str::equals("$centerSphere", fname)) {
- return OPTIMAL;
- } else {
- return USELESS;
- }
- }
- case BSONObj::opGEO_INTERSECTS:
- return OPTIMAL;
- default:
- return USELESS;
- }
- }
- return USELESS;
- } else if (IndexNames::TEXT == type || IndexNames::TEXT_INTERNAL == type) {
- return USELESS;
- } else if (IndexNames::GEO_HAYSTACK == type) {
- return USELESS;
- } else if ("" == type) {
+ if ("" == type) {
// This is a quick first pass to determine the suitability of the index. It produces
// some false positives (returns HELPFUL for some indexes which are not particularly).
// When we return HELPFUL a more precise determination of utility is done by the query
@@ -160,6 +103,64 @@ namespace mongo {
default:
return USELESS;
}
+ } else if (IndexNames::HASHED == type) {
+ /* This index is only considered "HELPFUL" for a query
+ * if it's the union of at least one equality constraint on the
+ * hashed field. Otherwise it's considered USELESS.
+ * Example queries (supposing the indexKey is {a : "hashed"}):
+ * {a : 3} HELPFUL
+ * {a : 3 , b : 3} HELPFUL
+ * {a : {$in : [3,4]}} HELPFUL
+ * {a : {$gte : 3, $lte : 3}} HELPFUL
+ * {} USELESS
+ * {b : 3} USELESS
+ * {a : {$gt : 3}} USELESS
+ */
+ BSONElement firstElt = keyPattern.firstElement();
+ if (queryConstraints.isPointIntervalSet(firstElt.fieldName())) {
+ return HELPFUL;
+ } else {
+ return USELESS;
+ }
+ } else if (IndexNames::GEO_2DSPHERE == type) {
+ BSONObjIterator i(keyPattern);
+ while (i.more()) {
+ BSONElement ie = i.next();
+
+ if (ie.type() != String || IndexNames::GEO_2DSPHERE != ie.valuestr()) {
+ continue;
+ }
+
+ BSONElement e = query.getFieldDotted(ie.fieldName());
+ // Some locations are given to us as arrays. Sigh.
+ if (Array == e.type()) { return HELPFUL; }
+ if (Object != e.type()) { continue; }
+ // getGtLtOp is horribly misnamed and really means get the operation.
+ switch (e.embeddedObject().firstElement().getGtLtOp()) {
+ case BSONObj::opNEAR:
+ return OPTIMAL;
+ case BSONObj::opWITHIN: {
+ BSONElement elt = e.embeddedObject().firstElement();
+ if (Object != elt.type()) { continue; }
+ const char* fname = elt.embeddedObject().firstElement().fieldName();
+ if (mongoutils::str::equals("$geometry", fname)
+ || mongoutils::str::equals("$centerSphere", fname)) {
+ return OPTIMAL;
+ } else {
+ return USELESS;
+ }
+ }
+ case BSONObj::opGEO_INTERSECTS:
+ return OPTIMAL;
+ default:
+ return USELESS;
+ }
+ }
+ return USELESS;
+ } else if (IndexNames::TEXT == type || IndexNames::TEXT_INTERNAL == type) {
+ return USELESS;
+ } else if (IndexNames::GEO_HAYSTACK == type) {
+ return USELESS;
} else {
cout << "Can't find index for keypattern " << keyPattern << endl;
verify(0);
diff --git a/src/mongo/db/indexkey.cpp b/src/mongo/db/indexkey.cpp
index b0203713ab4..56de9511ec7 100644
--- a/src/mongo/db/indexkey.cpp
+++ b/src/mongo/db/indexkey.cpp
@@ -25,6 +25,7 @@
#include "../util/text.h"
#include "mongo/db/client.h"
#include "mongo/db/database.h"
+#include "mongo/db/index/catalog_hack.h"
#include "mongo/db/pdfile.h"
#include "mongo/db/queryutil.h"
@@ -35,53 +36,7 @@ namespace mongo {
*/
const int DefaultIndexVersionNumber = 1;
- map<string,IndexPlugin*> * IndexPlugin::_plugins;
-
- IndexType::IndexType( const IndexPlugin * plugin , const IndexSpec * spec )
- : _plugin( plugin ) , _spec( spec ) {
-
- }
-
- IndexType::~IndexType() {
- }
-
- BSONElement IndexType::missingField() const {
- return _spec->_nullElt;
- }
-
- const BSONObj& IndexType::keyPattern() const {
- return _spec->keyPattern;
- }
-
- IndexPlugin::IndexPlugin( const string& name )
- : _name( name ) {
- if ( ! _plugins )
- _plugins = new map<string,IndexPlugin*>();
- (*_plugins)[name] = this;
- }
-
- string IndexPlugin::findPluginName( const BSONObj& keyPattern ) {
- string pluginName = "";
-
- BSONObjIterator i( keyPattern );
-
- while( i.more() ) {
- BSONElement e = i.next();
- if ( e.type() != String )
- continue;
-
- uassert( 13007 , "can only have 1 index plugin / bad index key pattern" , pluginName.size() == 0 || pluginName == e.String() );
- pluginName = e.String();
- }
-
- return pluginName;
- }
-
- int IndexType::compare( const BSONObj& l , const BSONObj& r ) const {
- return l.woCompare( r , _spec->keyPattern );
- }
-
- void IndexSpec::_init(PluginRules rules) {
+ void IndexSpec::_init() {
verify( keyPattern.objsize() );
// some basics
@@ -120,53 +75,14 @@ namespace mongo {
_undefinedObj = b.obj();
_undefinedElt = _undefinedObj.firstElement();
}
-
- {
- // handle plugins
- string pluginName = IndexPlugin::findPluginName( keyPattern );
- if ( pluginName.size() ) {
- IndexPlugin * plugin = IndexPlugin::get( pluginName );
-
- switch (rules) {
- case NoPlugins:
- uasserted(16735,
- str::stream()
- << "Attempting to use index type '" << pluginName << "' "
- << "where index types are not allowed (1 or -1 only).");
- break;
-
- case RulesFor22: {
- if ( ! plugin ) {
- log() << "warning: can't find plugin [" << pluginName << "]" << endl;
- }
-
- if (!IndexPlugin::existedBefore24(pluginName)) {
- warning() << "Treating index " << info << " as ascending since "
- << "it was created before 2.4 and '" << pluginName << "' "
- << "was not a valid type at that time."
- << endl;
-
- plugin = NULL;
- }
- break;
- }
- case RulesFor24:
- // This assert will be triggered when downgrading from a future version that
- // supports an index plugin unsupported by this version.
- uassert(16736, str::stream() << "Invalid index type '" << pluginName << "' "
- << "in index " << info
- , plugin);
- break;
- }
-
- if (plugin)
- _indexType.reset( plugin->generate( this ) );
- }
- }
_finishedInit = true;
}
+ string IndexSpec::getTypeName() const {
+ return CatalogHack::findPluginName(_details->keyPattern());
+ }
+
string IndexSpec::toString() const {
stringstream s;
s << "IndexSpec @ " << hex << this << dec << ", "
@@ -177,298 +93,7 @@ namespace mongo {
<< "Info: " << info;
return s.str();
}
-
- void assertParallelArrays( const char *first, const char *second ) {
- stringstream ss;
- ss << "cannot index parallel arrays [" << first << "] [" << second << "]";
- uasserted( ParallelArraysCode , ss.str() );
- }
-
- class KeyGeneratorV0 {
- public:
- KeyGeneratorV0( const IndexSpec &spec ) : _spec( spec ) {}
-
- void getKeys( const BSONObj &obj, BSONObjSet &keys ) const {
- if ( _spec._indexType.get() ) { //plugin (eg geo)
- _spec._indexType->getKeys( obj , keys );
- return;
- }
- vector<const char*> fieldNames( _spec._fieldNames );
- vector<BSONElement> fixed( _spec._fixed );
- _getKeys( fieldNames , fixed , obj, keys );
- if ( keys.empty() && ! _spec._sparse )
- keys.insert( _spec._nullKey );
- }
-
- private:
- void _getKeys( vector<const char*> fieldNames , vector<BSONElement> fixed , const BSONObj &obj, BSONObjSet &keys ) const {
- BSONElement arrElt;
- unsigned arrIdx = ~0;
- int numNotFound = 0;
-
- for( unsigned i = 0; i < fieldNames.size(); ++i ) {
- if ( *fieldNames[ i ] == '\0' )
- continue;
-
- BSONElement e = obj.getFieldDottedOrArray( fieldNames[ i ] );
-
- if ( e.eoo() ) {
- e = _spec._nullElt; // no matching field
- numNotFound++;
- }
-
- if ( e.type() != Array )
- fieldNames[ i ] = ""; // no matching field or non-array match
-
- if ( *fieldNames[ i ] == '\0' )
- fixed[ i ] = e; // no need for further object expansion (though array expansion still possible)
-
- if ( e.type() == Array && arrElt.eoo() ) { // we only expand arrays on a single path -- track the path here
- arrIdx = i;
- arrElt = e;
- }
-
- // enforce single array path here
- if ( e.type() == Array && e.rawdata() != arrElt.rawdata() ) {
- assertParallelArrays( e.fieldName(), arrElt.fieldName() );
- }
- }
-
- bool allFound = true; // have we found elements for all field names in the key spec?
- for( vector<const char*>::const_iterator i = fieldNames.begin(); i != fieldNames.end(); ++i ) {
- if ( **i != '\0' ) {
- allFound = false;
- break;
- }
- }
-
- if ( _spec._sparse && numNotFound == _spec._nFields ) {
- // we didn't find any fields
- // so we're not going to index this document
- return;
- }
-
- bool insertArrayNull = false;
-
- if ( allFound ) {
- if ( arrElt.eoo() ) {
- // no terminal array element to expand
- BSONObjBuilder b(_spec._sizeTracker);
- for( vector< BSONElement >::iterator i = fixed.begin(); i != fixed.end(); ++i )
- b.appendAs( *i, "" );
- keys.insert( b.obj() );
- }
- else {
- // terminal array element to expand, so generate all keys
- BSONObjIterator i( arrElt.embeddedObject() );
- if ( i.more() ) {
- while( i.more() ) {
- BSONObjBuilder b(_spec._sizeTracker);
- for( unsigned j = 0; j < fixed.size(); ++j ) {
- if ( j == arrIdx )
- b.appendAs( i.next(), "" );
- else
- b.appendAs( fixed[ j ], "" );
- }
- keys.insert( b.obj() );
- }
- }
- else if ( fixed.size() > 1 ) {
- insertArrayNull = true;
- }
- }
- }
- else {
- // nonterminal array element to expand, so recurse
- verify( !arrElt.eoo() );
- BSONObjIterator i( arrElt.embeddedObject() );
- if ( i.more() ) {
- while( i.more() ) {
- BSONElement e = i.next();
- if ( e.type() == Object ) {
- _getKeys( fieldNames, fixed, e.embeddedObject(), keys );
- }
- }
- }
- else {
- insertArrayNull = true;
- }
- }
-
- if ( insertArrayNull ) {
- // x : [] - need to insert undefined
- BSONObjBuilder b(_spec._sizeTracker);
- for( unsigned j = 0; j < fixed.size(); ++j ) {
- if ( j == arrIdx ) {
- b.appendUndefined( "" );
- }
- else {
- BSONElement e = fixed[j];
- if ( e.eoo() )
- b.appendNull( "" );
- else
- b.appendAs( e , "" );
- }
- }
- keys.insert( b.obj() );
- }
- }
-
- const IndexSpec &_spec;
- };
-
- class KeyGeneratorV1 {
- public:
- KeyGeneratorV1( const IndexSpec &spec ) : _spec( spec ) {}
-
- void getKeys( const BSONObj &obj, BSONObjSet &keys ) const {
- if ( _spec._indexType.get() ) { //plugin (eg geo)
- _spec._indexType->getKeys( obj , keys );
- return;
- }
- vector<const char*> fieldNames( _spec._fieldNames );
- vector<BSONElement> fixed( _spec._fixed );
- _getKeys( fieldNames , fixed , obj, keys );
- if ( keys.empty() && ! _spec._sparse )
- keys.insert( _spec._nullKey );
- }
-
- private:
- /**
- * @param arrayNestedArray - set if the returned element is an array nested directly within arr.
- */
- BSONElement extractNextElement( const BSONObj &obj, const BSONObj &arr, const char *&field, bool &arrayNestedArray ) const {
- string firstField = mongoutils::str::before( field, '.' );
- bool haveObjField = !obj.getField( firstField ).eoo();
- BSONElement arrField = arr.getField( firstField );
- bool haveArrField = !arrField.eoo();
-
- // An index component field name cannot exist in both a document array and one of that array's children.
- uassert( 15855,
- mongoutils::str::stream() <<
- "Ambiguous field name found in array (do not use numeric field names in "
- "embedded elements in an array), field: '" << arrField.fieldName() <<
- "' for array: " << arr,
- !haveObjField || !haveArrField );
-
- arrayNestedArray = false;
- if ( haveObjField ) {
- return obj.getFieldDottedOrArray( field );
- }
- else if ( haveArrField ) {
- if ( arrField.type() == Array ) {
- arrayNestedArray = true;
- }
- return arr.getFieldDottedOrArray( field );
- }
- return BSONElement();
- }
-
- void _getKeysArrEltFixed( vector<const char*> &fieldNames , vector<BSONElement> &fixed , const BSONElement &arrEntry, BSONObjSet &keys, int numNotFound, const BSONElement &arrObjElt, const set< unsigned > &arrIdxs, bool mayExpandArrayUnembedded ) const {
- // set up any terminal array values
- for( set<unsigned>::const_iterator j = arrIdxs.begin(); j != arrIdxs.end(); ++j ) {
- if ( *fieldNames[ *j ] == '\0' ) {
- fixed[ *j ] = mayExpandArrayUnembedded ? arrEntry : arrObjElt;
- }
- }
- // recurse
- _getKeys( fieldNames, fixed, ( arrEntry.type() == Object ) ? arrEntry.embeddedObject() : BSONObj(), keys, numNotFound, arrObjElt.embeddedObject() );
- }
-
- /**
- * @param fieldNames - fields to index, may be postfixes in recursive calls
- * @param fixed - values that have already been identified for their index fields
- * @param obj - object from which keys should be extracted, based on names in fieldNames
- * @param keys - set where index keys are written
- * @param numNotFound - number of index fields that have already been identified as missing
- * @param array - array from which keys should be extracted, based on names in fieldNames
- * If obj and array are both nonempty, obj will be one of the elements of array.
- */
- void _getKeys( vector<const char*> fieldNames , vector<BSONElement> fixed , const BSONObj &obj, BSONObjSet &keys, int numNotFound = 0, const BSONObj &array = BSONObj() ) const {
- BSONElement arrElt;
- set<unsigned> arrIdxs;
- bool mayExpandArrayUnembedded = true;
- for( unsigned i = 0; i < fieldNames.size(); ++i ) {
- if ( *fieldNames[ i ] == '\0' ) {
- continue;
- }
-
- bool arrayNestedArray;
- // Extract element matching fieldName[ i ] from object xor array.
- BSONElement e = extractNextElement( obj, array, fieldNames[ i ], arrayNestedArray );
-
- if ( e.eoo() ) {
- // if field not present, set to null
- fixed[ i ] = _spec._nullElt;
- // done expanding this field name
- fieldNames[ i ] = "";
- numNotFound++;
- }
- else if ( e.type() == Array ) {
- arrIdxs.insert( i );
- if ( arrElt.eoo() ) {
- // we only expand arrays on a single path -- track the path here
- arrElt = e;
- }
- else if ( e.rawdata() != arrElt.rawdata() ) {
- // enforce single array path here
- assertParallelArrays( e.fieldName(), arrElt.fieldName() );
- }
- if ( arrayNestedArray ) {
- mayExpandArrayUnembedded = false;
- }
- }
- else {
- // not an array - no need for further expansion
- fixed[ i ] = e;
- }
- }
-
- if ( arrElt.eoo() ) {
- // No array, so generate a single key.
- if ( _spec._sparse && numNotFound == _spec._nFields ) {
- return;
- }
- BSONObjBuilder b(_spec._sizeTracker);
- for( vector< BSONElement >::iterator i = fixed.begin(); i != fixed.end(); ++i ) {
- b.appendAs( *i, "" );
- }
- keys.insert( b.obj() );
- }
- else if ( arrElt.embeddedObject().firstElement().eoo() ) {
- // Empty array, so set matching fields to undefined.
- _getKeysArrEltFixed( fieldNames, fixed, _spec._undefinedElt, keys, numNotFound, arrElt, arrIdxs, true );
- }
- else {
- // Non empty array that can be expanded, so generate a key for each member.
- BSONObj arrObj = arrElt.embeddedObject();
- BSONObjIterator i( arrObj );
- while( i.more() ) {
- _getKeysArrEltFixed( fieldNames, fixed, i.next(), keys, numNotFound, arrElt, arrIdxs, mayExpandArrayUnembedded );
- }
- }
- }
-
- const IndexSpec &_spec;
- };
- void IndexSpec::getKeys( const BSONObj &obj, BSONObjSet &keys ) const {
- switch( indexVersion() ) {
- case 0: {
- KeyGeneratorV0 g( *this );
- g.getKeys( obj, keys );
- break;
- }
- case 1: {
- KeyGeneratorV1 g( *this );
- g.getKeys( obj, keys );
- break;
- }
- default:
- massert( 15869, "Invalid index version for key generation.", false );
- }
- }
-
int IndexSpec::indexVersion() const {
if ( !info.hasField( "v" ) ) {
return DefaultIndexVersionNumber;
@@ -476,8 +101,4 @@ namespace mongo {
return IndexDetails::versionForIndexObj( info );
}
- bool IndexType::scanAndOrderRequired( const BSONObj& query , const BSONObj& order ) const {
- return ! order.isEmpty();
- }
-
}
diff --git a/src/mongo/db/indexkey.h b/src/mongo/db/indexkey.h
index 2acd38e998d..7fed236db7e 100644
--- a/src/mongo/db/indexkey.h
+++ b/src/mongo/db/indexkey.h
@@ -22,6 +22,7 @@
#include "mongo/db/diskloc.h"
#include "mongo/db/index_names.h"
+#include "mongo/db/keypattern.h"
#include "mongo/db/jsobj.h"
namespace mongo {
@@ -32,126 +33,16 @@ namespace mongo {
class Cursor;
class IndexSpec;
- class IndexType; // TODO: this name sucks
- class IndexPlugin;
class IndexDetails;
class FieldRangeSet;
enum IndexSuitability { USELESS = 0 , HELPFUL = 1 , OPTIMAL = 2 };
- /**
- * this represents an instance of a index plugin
- * done this way so parsing, etc... can be cached
- * so if there is a FTS IndexPlugin, for each index using FTS
- * there will be 1 of these, and it can have things pre-parsed, etc...
- */
- class IndexType : boost::noncopyable {
- public:
- IndexType( const IndexPlugin * plugin , const IndexSpec * spec );
- virtual ~IndexType();
-
- virtual void getKeys( const BSONObj &obj, BSONObjSet &keys ) const = 0;
-
- /**
- * Returns the element placed in an index key when indexing a field absent from a document.
- * By default this is a null BSONElement.
- */
- virtual BSONElement missingField() const;
-
- /** optional op : changes query to match what's in the index */
- virtual BSONObj fixKey( const BSONObj& in ) { return in; }
-
- /** optional op : compare 2 objects with regards to this index */
- virtual int compare( const BSONObj& l , const BSONObj& r ) const;
-
- /** @return plugin */
- const IndexPlugin * getPlugin() const { return _plugin; }
-
- const BSONObj& keyPattern() const;
-
- virtual bool scanAndOrderRequired( const BSONObj& query , const BSONObj& order ) const ;
-
- protected:
- const IndexPlugin * _plugin;
- const IndexSpec * _spec;
- };
-
- /**
- * this represents a plugin
- * a plugin could be something like full text search, sparse index, etc...
- * 1 of these exists per type of index per server
- * 1 IndexType is created per index using this plugin
- */
- class IndexPlugin : boost::noncopyable {
- public:
- IndexPlugin( const string& name );
- virtual ~IndexPlugin() {}
-
- virtual IndexType* generate( const IndexSpec * spec ) const = 0;
-
- string getName() const { return _name; }
-
- /**
- * @return new keyPattern
- * if nothing changes, should return keyPattern
- */
- virtual BSONObj adjustIndexSpec( const BSONObj& spec ) const { return spec; }
-
- /**
- * Hook function to run after an index that uses this plugin is built.
- *
- * This will be called with an active write context (and lock) on the database.
- *
- * @param spec The IndexSpec of the newly built index.
- */
- virtual void postBuildHook( const IndexSpec& spec ) const { }
-
- // ------- static below -------
-
- static IndexPlugin* get( const string& name ) {
- if ( ! _plugins )
- return 0;
- map<string,IndexPlugin*>::iterator i = _plugins->find( name );
- if ( i == _plugins->end() )
- return 0;
- return i->second;
- }
-
- /**
- * @param keyPattern { x : "fts" }
- * @return "" or the name
- */
- static string findPluginName( const BSONObj& keyPattern );
-
- /**
- * True if is a regular (non-plugin) index or uses a plugin that existed before 2.4.
- * These plugins are grandfathered in and allowed to exist in DBs with
- * PDFILE_MINOR_VERSION_22_AND_OLDER
- */
- static bool existedBefore24(const string& name) {
- return name.empty()
- || name == IndexNames::GEO_2D
- || name == IndexNames::GEO_HAYSTACK
- || name == IndexNames::HASHED
- ;
- }
-
- private:
- string _name;
- static map<string,IndexPlugin*> * _plugins;
- };
-
/* precomputed details about an index, used for inserting keys on updates
stored/cached in NamespaceDetailsTransient, or can be used standalone
*/
class IndexSpec {
public:
- enum PluginRules {
- NoPlugins,
- RulesFor22, // if !IndexPlugin::existedBefore24() treat as ascending
- RulesFor24, // allow new plugins but error if unknown
- };
-
BSONObj keyPattern; // e.g., { name : 1 }
BSONObj info; // this is the same as IndexDetails::info.obj()
@@ -159,47 +50,26 @@ namespace mongo {
: _details(0) , _finishedInit(false) {
}
- explicit IndexSpec(const BSONObj& k, const BSONObj& m=BSONObj(),
- PluginRules rules=RulesFor24)
+ explicit IndexSpec(const BSONObj& k, const BSONObj& m=BSONObj())
: keyPattern(k) , info(m) , _details(0) , _finishedInit(false) {
- _init(rules);
+ _init();
}
/**
this is a DiscLoc of an IndexDetails info
should have a key field
*/
- explicit IndexSpec(const DiskLoc& loc, PluginRules rules=RulesFor24) {
- reset(loc, rules);
+ explicit IndexSpec(const DiskLoc& loc) {
+ reset(loc);
}
- void reset(const BSONObj& info, PluginRules rules=RulesFor24);
+ void reset(const BSONObj& info);
void reset(const IndexDetails * details); // determines rules based on pdfile version
- void reset(const DiskLoc& infoLoc, PluginRules rules=RulesFor24) {
- reset(infoLoc.obj(), rules);
+ void reset(const DiskLoc& infoLoc) {
+ reset(infoLoc.obj());
}
- void getKeys( const BSONObj &obj, BSONObjSet &keys ) const;
-
- /**
- * Returns the element placed in an index key when indexing a field absent from a document.
- * By default this is a null BSONElement.
- */
- BSONElement missingField() const {
- if ( _indexType.get() )
- return _indexType->missingField();
- return _nullElt;
- }
-
- string getTypeName() const {
- if ( _indexType.get() )
- return _indexType->getPlugin()->getName();
- return "";
- }
-
- IndexType* getType() const {
- return _indexType.get();
- }
+ string getTypeName() const;
const IndexDetails * getDetails() const {
return _details;
@@ -226,12 +96,10 @@ namespace mongo {
int _nFields; // number of fields in the index
bool _sparse; // if the index is sparse
- shared_ptr<IndexType> _indexType;
const IndexDetails * _details;
- void _init(PluginRules rules);
+ void _init();
- friend class IndexType;
friend class KeyGeneratorV0;
friend class KeyGeneratorV1;
public:
diff --git a/src/mongo/db/pdfile.cpp b/src/mongo/db/pdfile.cpp
index bda45e2047c..d592991a014 100644
--- a/src/mongo/db/pdfile.cpp
+++ b/src/mongo/db/pdfile.cpp
@@ -45,6 +45,7 @@ _ disallow system* manipulations from the database.
#include "mongo/db/db.h"
#include "mongo/db/dbhelpers.h"
#include "mongo/db/extsort.h"
+#include "mongo/db/index_names.h"
#include "mongo/db/index_update.h"
#include "mongo/db/index/catalog_hack.h"
#include "mongo/db/index/index_descriptor.h"
@@ -1530,12 +1531,13 @@ namespace mongo {
tableToIndex->addIndex(tabletoidxns.c_str());
getDur().writingInt(tableToIndex->indexBuildsInProgress) -= 1;
- IndexType* indexType = idx.getSpec().getType();
- const IndexPlugin *plugin = indexType ? indexType->getPlugin() : NULL;
- if (plugin) {
- plugin->postBuildHook( idx.getSpec() );
+ // If it's an FTS index, we want to set the power of 2 flag.
+ string pluginName = KeyPattern::findPluginName(idx.keyPattern());
+ if (IndexNames::TEXT == pluginName || IndexNames::TEXT_INTERNAL == pluginName) {
+ if (tableToIndex->setUserFlag(NamespaceDetails::Flag_UsePowerOf2Sizes)) {
+ tableToIndex->syncUserFlags(idx.parentNS());
+ }
}
-
}
catch (...) {
// Generally, this will be called as an exception from building the index bubbles up.
diff --git a/src/mongo/db/query_optimizer_internal.cpp b/src/mongo/db/query_optimizer_internal.cpp
index 0bed4fbdaf8..dcd7516b357 100644
--- a/src/mongo/db/query_optimizer_internal.cpp
+++ b/src/mongo/db/query_optimizer_internal.cpp
@@ -20,6 +20,7 @@
#include "mongo/client/dbclientinterface.h"
#include "mongo/db/db.h"
+#include "mongo/db/index/catalog_hack.h"
#include "mongo/db/index_selection.h"
#include "mongo/db/pagefault.h"
#include "mongo/db/parsed_query.h"
@@ -543,12 +544,13 @@ namespace mongo {
while( i.more() ) {
int j = i.pos();
IndexDetails& ii = i.next();
- const IndexSpec& spec = ii.getSpec();
- if (special.has(spec.getTypeName()) &&
- (USELESS != IndexSelection::isSuitableFor(spec.keyPattern,
+ BSONObj keyPattern = ii.keyPattern();
+ string pluginName = CatalogHack::findPluginName(keyPattern);
+ if (special.has(pluginName) &&
+ (USELESS != IndexSelection::isSuitableFor(keyPattern,
_qps.frsp().frsForIndex(d, j), _qps.order()))) {
uassert( 16330, "'special' query operator not allowed", _allowSpecial );
- _qps.setSinglePlan( newPlan( d, j, BSONObj(), BSONObj(), spec.getTypeName()));
+ _qps.setSinglePlan( newPlan( d, j, BSONObj(), BSONObj(), pluginName));
return true;
}
}
@@ -1495,7 +1497,8 @@ namespace mongo {
while( i.more() ) {
IndexDetails& ii = i.next();
if ( indexWorks( ii.keyPattern(), min.isEmpty() ? max : min, ret.first, ret.second ) ) {
- if ( ii.getSpec().getType() == 0 ) {
+ if ( ii.getSpec().getTypeName().empty()) {
+ //if ( ii.getSpec().getType() == 0 ) {
id = &ii;
keyPattern = ii.keyPattern();
break;
diff --git a/src/mongo/db/query_plan.cpp b/src/mongo/db/query_plan.cpp
index c3ee9d63cb5..6bb157369a6 100644
--- a/src/mongo/db/query_plan.cpp
+++ b/src/mongo/db/query_plan.cpp
@@ -88,7 +88,6 @@ namespace mongo {
_endKeyInclusive(),
_utility( Helpful ),
_special( special ),
- _type( 0 ),
_startOrEndSpec() {
}
@@ -112,24 +111,24 @@ namespace mongo {
return;
}
+ _descriptor.reset(CatalogHack::getDescriptor(_d, _idxNo));
_index = &_d->idx(_idxNo);
// If the parsing or index indicates this is a special query, don't continue the processing
if (!_special.empty() ||
- ( _index->getSpec().getType() && (USELESS !=
- IndexSelection::isSuitableFor(_index->getSpec().keyPattern, _frs, _order)))) {
+ ( ("" != CatalogHack::findPluginName(_descriptor->keyPattern())) && (USELESS !=
+ IndexSelection::isSuitableFor(_descriptor->keyPattern(), _frs, _order)))) {
- _type = _index->getSpec().getType();
- if (_special.empty()) _special = _index->getSpec().getType()->getPlugin()->getName();
+ _specialIndexName = CatalogHack::findPluginName(_descriptor->keyPattern());
+ if (_special.empty()) _special = _specialIndexName;
- massert( 13040 , (string)"no type for special: " + _special , _type );
+ massert( 13040 , (string)"no type for special: " + _special , "" != _specialIndexName);
// hopefully safe to use original query in these contexts;
// don't think we can mix special with $or clause separation yet
- _scanAndOrderRequired = _type->scanAndOrderRequired( _originalQuery , _order );
+ _scanAndOrderRequired = !_order.isEmpty();
return;
}
- const IndexSpec &idxSpec = _index->getSpec();
BSONObjIterator o( _order );
BSONObjIterator k( idxKey );
if ( !o.moreWithEOO() )
@@ -192,7 +191,7 @@ doneCheckOrder:
if ( !_scanAndOrderRequired &&
( optimalIndexedQueryCount == _frs.numNonUniversalRanges() ) )
_utility = Optimal;
- _frv.reset( new FieldRangeVector( _frs, idxSpec, _direction ) );
+ _frv.reset( new FieldRangeVector( _frs, _descriptor->keyPattern(), _direction ) );
if ( // If all field range constraints are on indexed fields and ...
_utility == Optimal &&
@@ -208,7 +207,7 @@ doneCheckOrder:
if ( originalFrsp ) {
_originalFrv.reset( new FieldRangeVector( originalFrsp->frsForIndex( _d, _idxNo ),
- idxSpec,
+ _descriptor->keyPattern(),
_direction ) );
}
else {
@@ -231,7 +230,7 @@ doneCheckOrder:
_utility = Unhelpful;
}
- if ( idxSpec.isSparse() && hasPossibleExistsFalsePredicate() ) {
+ if ( _descriptor->isSparse() && hasPossibleExistsFalsePredicate() ) {
_utility = Disallowed;
}
@@ -244,7 +243,7 @@ doneCheckOrder:
shared_ptr<Cursor> QueryPlan::newCursor( const DiskLoc& startLoc,
bool requestIntervalCursor ) const {
- if ( _type ) {
+ if ("" != _specialIndexName) {
// hopefully safe to use original query in these contexts - don't think we can mix type
// with $or clause separation yet
int numWanted = 0;
@@ -253,6 +252,7 @@ doneCheckOrder:
numWanted = _parsedQuery->getSkip() + _parsedQuery->getNumToReturn();
}
+ // Why do we get new objects here? Because EmulatedCursor takes ownership of them.
IndexDescriptor* descriptor = CatalogHack::getDescriptor(_d, _idxNo);
IndexAccessMethod* iam = CatalogHack::getIndex(descriptor);
return shared_ptr<Cursor>(EmulatedCursor::make(descriptor, iam, _originalQuery,
@@ -284,7 +284,7 @@ doneCheckOrder:
_direction >= 0 ? 1 : -1 ) );
}
- if ( _index->getSpec().getType() ) {
+ if ( !_index->getSpec().getTypeName().empty()) { //_index->getSpec().getType() ) {
return shared_ptr<Cursor>( BtreeCursor::make( _d,
*_index,
_frv->startKey(),
diff --git a/src/mongo/db/query_plan.h b/src/mongo/db/query_plan.h
index 62cd4c7c567..3c6350cc180 100644
--- a/src/mongo/db/query_plan.h
+++ b/src/mongo/db/query_plan.h
@@ -17,6 +17,7 @@
#pragma once
#include "mongo/db/diskloc.h"
+#include "mongo/db/index/index_descriptor.h"
#include "mongo/db/indexkey.h"
#include "mongo/db/jsobj.h"
#include "mongo/db/matcher.h"
@@ -29,7 +30,6 @@ namespace mongo {
class FieldRangeSet;
class FieldRangeSetPair;
class IndexDetails;
- class IndexType;
class NamespaceDetails;
class ParsedQuery;
struct QueryPlanSummary;
@@ -176,10 +176,11 @@ namespace mongo {
bool _endKeyInclusive;
Utility _utility;
string _special;
- IndexType* _type;
bool _startOrEndSpec;
shared_ptr<Projection::KeyOnly> _keyFieldsOnly;
mutable shared_ptr<CoveredIndexMatcher> _matcher; // Lazy initialization.
+ auto_ptr<IndexDescriptor> _descriptor;
+ string _specialIndexName;
};
std::ostream &operator<< ( std::ostream& out, const QueryPlan::Utility& utility );
diff --git a/src/mongo/db/query_runner.cpp b/src/mongo/db/query_runner.cpp
index d94ff3efa23..c649bca957e 100644
--- a/src/mongo/db/query_runner.cpp
+++ b/src/mongo/db/query_runner.cpp
@@ -15,6 +15,7 @@
*/
#include "mongo/db/query_runner.h"
+
#include "mongo/db/btree.h"
#include "mongo/db/index.h"
#include "mongo/db/jsobj.h"
diff --git a/src/mongo/db/queryutil.cpp b/src/mongo/db/queryutil.cpp
index dffb52c34bd..6ac3433caf4 100644
--- a/src/mongo/db/queryutil.cpp
+++ b/src/mongo/db/queryutil.cpp
@@ -1160,15 +1160,26 @@ namespace mongo {
return true;
}
- FieldRangeVector::FieldRangeVector( const FieldRangeSet &frs, const IndexSpec &indexSpec,
+ FieldRangeVector::FieldRangeVector( const FieldRangeSet &frs, BSONObj keyPattern,
int direction ) :
- _indexSpec( indexSpec ),
+ _keyPattern(keyPattern),
_direction( direction >= 0 ? 1 : -1 ),
_hasAllIndexedRanges( true ) {
- verify( frs.matchPossibleForIndex( _indexSpec.keyPattern ) );
+ verify( frs.matchPossibleForIndex( keyPattern));
_queries = frs._queries;
- BSONObjIterator i( _indexSpec.keyPattern );
+
+ // For key generation
+ BSONObjIterator it(_keyPattern);
+ while (it.more()) {
+ BSONElement elt = it.next();
+ _fieldNames.push_back(elt.fieldName());
+ _fixed.push_back(BSONElement());
+ }
+
+ _keyGenerator.reset(new BtreeKeyGeneratorV1(_fieldNames, _fixed, false));
+
map<string,BSONElement> topFieldElemMatchContexts;
+ BSONObjIterator i(keyPattern);
while( i.more() ) {
BSONElement e = i.next();
const FieldRange *range = &frs.range( e.fieldName() );
@@ -1262,7 +1273,7 @@ namespace mongo {
BSONObj FieldRangeVector::startKey() const {
BSONObjBuilder b;
- BSONObjIterator keys( _indexSpec.keyPattern );
+ BSONObjIterator keys(_keyPattern);
vector<FieldRange>::const_iterator i = _ranges.begin();
for( ; i != _ranges.end(); ++i, ++keys ) {
// Append lower bounds until an exclusive bound is found.
@@ -1300,7 +1311,7 @@ namespace mongo {
BSONObj FieldRangeVector::endKey() const {
BSONObjBuilder b;
- BSONObjIterator keys( _indexSpec.keyPattern );
+ BSONObjIterator keys(_keyPattern);
vector<FieldRange>::const_iterator i = _ranges.begin();
for( ; i != _ranges.end(); ++i, ++keys ) {
// Append upper bounds until an exclusive bound is found.
@@ -1338,7 +1349,7 @@ namespace mongo {
BSONObj FieldRangeVector::obj() const {
BSONObjBuilder b;
- BSONObjIterator k( _indexSpec.keyPattern );
+ BSONObjIterator k(_keyPattern);
for( int i = 0; i < (int)_ranges.size(); ++i ) {
BSONArrayBuilder a( b.subarrayStart( k.next().fieldName() ) );
for( vector<FieldInterval>::const_iterator j = _ranges[ i ].intervals().begin();
@@ -1514,7 +1525,7 @@ namespace mongo {
bool FieldRangeVector::matchesKey( const BSONObj &key ) const {
BSONObjIterator j( key );
- BSONObjIterator k( _indexSpec.keyPattern );
+ BSONObjIterator k(_keyPattern);
for( int l = 0; l < (int)_ranges.size(); ++l ) {
int number = (int) k.next().number();
bool forward = ( number >= 0 ? 1 : -1 ) * ( _direction >= 0 ? 1 : -1 ) > 0;
@@ -1526,16 +1537,16 @@ namespace mongo {
}
bool FieldRangeVector::matches( const BSONObj &obj ) const {
-
bool ok = false;
+ BSONObjSet keys;
+ _keyGenerator->getKeys(obj, &keys);
+
// TODO The representation of matching keys could potentially be optimized
// more for the case at hand. (For example, we can potentially consider
// fields individually instead of constructing several bson objects using
// multikey arrays.) But getKeys() canonically defines the key set for a
// given object and for now we are using it as is.
- BSONObjSet keys;
- _indexSpec.getKeys( obj, keys );
for( BSONObjSet::const_iterator i = keys.begin(); i != keys.end(); ++i ) {
if ( matchesKey( *i ) ) {
ok = true;
@@ -1551,8 +1562,9 @@ namespace mongo {
BSONObj FieldRangeVector::firstMatch( const BSONObj &obj ) const {
// NOTE Only works in forward direction.
verify( _direction >= 0 );
- BSONObjSet keys( BSONObjCmp( _indexSpec.keyPattern ) );
- _indexSpec.getKeys( obj, keys );
+ BSONObjCmp oc(_keyPattern);
+ BSONObjSet keys(oc);
+ _keyGenerator->getKeys(obj, &keys);
for( BSONObjSet::const_iterator i = keys.begin(); i != keys.end(); ++i ) {
if ( matchesKey( *i ) ) {
return *i;
@@ -1563,7 +1575,7 @@ namespace mongo {
string FieldRangeVector::toString() const {
BSONObjBuilder bob;
- BSONObjIterator i( _indexSpec.keyPattern );
+ BSONObjIterator i(_keyPattern);
for( vector<FieldRange>::const_iterator r = _ranges.begin();
r != _ranges.end() && i.more(); ++r ) {
BSONElement e = i.next();
@@ -1585,7 +1597,7 @@ namespace mongo {
// TODO optimize more SERVER-5450.
int FieldRangeVectorIterator::advance( const BSONObj &curr ) {
BSONObjIterator j( curr );
- BSONObjIterator o( _v._indexSpec.keyPattern );
+ BSONObjIterator o( _v._keyPattern);
// track first field for which we are not at the end of the valid values,
// since we may need to advance from the key prefix ending with this field
int latestNonEndpoint = -1;
diff --git a/src/mongo/db/queryutil.h b/src/mongo/db/queryutil.h
index 381c1a14d1a..c53082c06af 100644
--- a/src/mongo/db/queryutil.h
+++ b/src/mongo/db/queryutil.h
@@ -17,9 +17,10 @@
#include "jsobj.h"
#include "indexkey.h"
+#include "mongo/db/index/btree_key_generator.h"
namespace mongo {
-
+
//maximum number of intervals produced by $in queries.
static const unsigned MAX_IN_COMBINATIONS = 4000000;
@@ -433,7 +434,7 @@ namespace mongo {
* @param indexSpec The index spec (key pattern and info)
* @param direction The direction of index traversal
*/
- FieldRangeVector( const FieldRangeSet &frs, const IndexSpec &indexSpec, int direction );
+ FieldRangeVector( const FieldRangeSet &frs, BSONObj keyPattern, int direction );
/**
* Methods for identifying compound start and end btree bounds describing this field range
@@ -499,8 +500,6 @@ namespace mongo {
/** @return a client readable representation of 'this' */
BSONObj obj() const;
- const IndexSpec& getSpec(){ return _indexSpec; }
-
/**
* @return true iff the provided document matches valid ranges on all
* of this FieldRangeVector's fields, which is the case iff this document
@@ -541,11 +540,15 @@ namespace mongo {
int matchingLowElement( const BSONElement &e, int i, bool direction, bool &lowEquality ) const;
bool matchesElement( const BSONElement &e, int i, bool direction ) const;
vector<FieldRange> _ranges;
- const IndexSpec _indexSpec;
+ BSONObj _keyPattern;
int _direction;
vector<BSONObj> _queries; // make sure mem owned
bool _hasAllIndexedRanges;
friend class FieldRangeVectorIterator;
+
+ vector<const char*> _fieldNames;
+ vector<BSONElement> _fixed;
+ scoped_ptr<BtreeKeyGenerator> _keyGenerator;
};
/**
diff --git a/src/mongo/db/scanandorder.cpp b/src/mongo/db/scanandorder.cpp
index 03cf5fd0346..d3ed2beffa2 100644
--- a/src/mongo/db/scanandorder.cpp
+++ b/src/mongo/db/scanandorder.cpp
@@ -103,7 +103,7 @@ namespace mongo {
void ScanAndOrder::_addIfBetter(const BSONObj& k, const BSONObj& o, const BestMap::iterator& i,
const DiskLoc* loc) {
const BSONObj& worstBestKey = i->first;
- int cmp = worstBestKey.woCompare(k, _order._spec.keyPattern);
+ int cmp = worstBestKey.woCompare(k, _order._keyPattern);
if ( cmp > 0 ) {
// k is better, 'upgrade'
_validateAndUpdateApproxSize( -i->first.objsize() + -i->second.objsize() );
diff --git a/src/mongo/db/scanandorder.h b/src/mongo/db/scanandorder.h
index c3a9e834d5a..4bcfb4e44f9 100644
--- a/src/mongo/db/scanandorder.h
+++ b/src/mongo/db/scanandorder.h
@@ -32,12 +32,11 @@ namespace mongo {
class KeyType : boost::noncopyable {
public:
- IndexSpec _spec;
+ BSONObj _keyPattern;
FieldRangeVector _keyCutter;
public:
- KeyType(const BSONObj &pattern, const FieldRangeSet &frs):
- _spec(pattern, BSONObj(), IndexSpec::NoPlugins),
- _keyCutter(frs, _spec, 1) {
+ KeyType(const BSONObj &pattern, const FieldRangeSet &frs)
+ : _keyPattern(pattern), _keyCutter(frs, pattern, 1) {
verify(!pattern.isEmpty());
}
diff --git a/src/mongo/dbtests/cursortests.cpp b/src/mongo/dbtests/cursortests.cpp
index 83e49908df7..382d7c7e9d9 100644
--- a/src/mongo/dbtests/cursortests.cpp
+++ b/src/mongo/dbtests/cursortests.cpp
@@ -52,8 +52,8 @@ namespace CursorTests {
}
}
// orphan idxSpec for this test
- IndexSpec *idxSpec = new IndexSpec( BSON( "a" << 1 ) );
- return new FieldRangeVector( s, *idxSpec, direction );
+ BSONObj kp = BSON( "a" << 1 );
+ return new FieldRangeVector( s, kp, direction );
}
DBDirectClient _c;
private:
@@ -173,9 +173,7 @@ namespace CursorTests {
Client::WriteContext ctx( ns() );
FieldRangeSet frs( ns(), spec, true, true );
- // orphan spec for this test.
- IndexSpec *idxSpec = new IndexSpec( idx() );
- boost::shared_ptr< FieldRangeVector > frv( new FieldRangeVector( frs, *idxSpec, direction() ) );
+ boost::shared_ptr< FieldRangeVector > frv( new FieldRangeVector( frs, idx(), direction() ) );
scoped_ptr<BtreeCursor> c( BtreeCursor::make( nsdetails( ns() ),
nsdetails( ns() )->idx( 1 ),
frv,
@@ -287,8 +285,8 @@ namespace CursorTests {
void run() {
_c.dropCollection( ns() );
// Set up a compound index with some data.
- IndexSpec idx( BSON( "a" << 1 << "b" << 1 ) );
- _c.ensureIndex( ns(), idx.keyPattern );
+ BSONObj kp = BSON( "a" << 1 << "b" << 1 );
+ _c.ensureIndex( ns(), kp);
for( int i = 0; i < 300; ++i ) {
_c.insert( ns(), BSON( "a" << i << "b" << i ) );
}
@@ -300,7 +298,7 @@ namespace CursorTests {
// of 'a' in the index and check for an index key with that value for 'a' and 'b'
// equal to 30.
FieldRangeSet frs( ns(), BSON( "b" << 30 ), true, true );
- boost::shared_ptr<FieldRangeVector> frv( new FieldRangeVector( frs, idx, 1 ) );
+ boost::shared_ptr<FieldRangeVector> frv( new FieldRangeVector( frs, kp, 1 ) );
Client::WriteContext ctx( ns() );
scoped_ptr<BtreeCursor> c( BtreeCursor::make( nsdetails( ns() ),
nsdetails( ns() )->idx(1),
diff --git a/src/mongo/dbtests/namespacetests.cpp b/src/mongo/dbtests/namespacetests.cpp
index e13a25202f3..82fe647cd87 100644
--- a/src/mongo/dbtests/namespacetests.cpp
+++ b/src/mongo/dbtests/namespacetests.cpp
@@ -23,9 +23,9 @@
#include "../db/db.h"
#include "../db/json.h"
-#include "mongo/db/hashindex.h"
#include "mongo/db/index_selection.h"
#include "mongo/db/index/btree_key_generator.h"
+#include "mongo/db/index/hash_access_method.h"
#include "mongo/db/queryutil.h"
#include "dbtests.h"
@@ -955,11 +955,11 @@ namespace NamespaceTests {
class IndexedQueryField {
public:
void run() {
- IndexSpec spec( BSON( "a" << 1 ), BSONObj() );
+ BSONObj kp( BSON( "a" << 1 ) );
FieldRangeSet frs( "n/a", BSON( "a" << 2 ), true , true );
// Checking a return value of HELPFUL instead of OPTIMAL is descriptive rather than
// normative. See SERVER-4485.
- ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSONObj() ) );
+ ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(kp, frs, BSONObj() ) );
}
};
@@ -967,9 +967,9 @@ namespace NamespaceTests {
class NoIndexedQueryField {
public:
void run() {
- IndexSpec spec( BSON( "a" << 1 ), BSONObj() );
+ BSONObj kp( BSON( "a" << 1 ) );
FieldRangeSet frs( "n/a", BSON( "b" << 2 ), true , true );
- ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSONObj() ) );
+ ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(kp, frs, BSONObj() ) );
}
};
@@ -977,9 +977,9 @@ namespace NamespaceTests {
class ChildOfIndexQueryField {
public:
void run() {
- IndexSpec spec( BSON( "a" << 1 ), BSONObj() );
+ BSONObj kp(BSON( "a" << 1 ));
FieldRangeSet frs( "n/a", BSON( "a.b" << 2 ), true , true );
- ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSONObj() ) );
+ ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(kp, frs, BSONObj() ) );
}
};
@@ -987,9 +987,9 @@ namespace NamespaceTests {
class ParentOfIndexQueryField {
public:
void run() {
- IndexSpec spec( BSON( "a.b" << 1 ), BSONObj() );
+ BSONObj kp(BSON( "a.b" << 1 ));
FieldRangeSet frs( "n/a", BSON( "a" << 2 ), true , true );
- ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSONObj() ) );
+ ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(kp, frs, BSONObj() ) );
}
};
@@ -1000,9 +1000,9 @@ namespace NamespaceTests {
class ObjectMatchCompletingIndexField {
public:
void run() {
- IndexSpec spec( BSON( "a.b" << 1 ), BSONObj() );
+ BSONObj kp(BSON( "a.b" << 1 ));
FieldRangeSet frs( "n/a", BSON( "a" << BSON( "b" << 2 ) ), true , true );
- ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSONObj() ) );
+ ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(kp, frs, BSONObj() ) );
}
};
@@ -1010,9 +1010,9 @@ namespace NamespaceTests {
class IndexedOrderField {
public:
void run() {
- IndexSpec spec( BSON( "a" << 1 ), BSONObj() );
+ BSONObj kp(BSON( "a" << 1 ));
FieldRangeSet frs( "n/a", BSONObj(), true , true );
- ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSON( "a" << 1 ) ) );
+ ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(kp, frs, BSON( "a" << 1 ) ) );
}
};
@@ -1020,9 +1020,9 @@ namespace NamespaceTests {
class IndexedReverseOrderField {
public:
void run() {
- IndexSpec spec( BSON( "a" << -1 ), BSONObj() );
+ BSONObj kp(BSON( "a" << -1 ));
FieldRangeSet frs( "n/a", BSONObj(), true , true );
- ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSON( "a" << 1 ) ) );
+ ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(kp, frs, BSON( "a" << 1 ) ) );
}
};
@@ -1033,9 +1033,9 @@ namespace NamespaceTests {
class NonPrefixIndexedOrderField {
public:
void run() {
- IndexSpec spec( BSON( "a" << 1 ), BSONObj() );
+ BSONObj kp( BSON( "a" << 1 ));
FieldRangeSet frs( "n/a", BSONObj(), true , true );
- ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSON( "b" << 1 << "a" << 1 ) ) );
+ ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(kp, frs, BSON( "b" << 1 << "a" << 1 ) ) );
}
};
@@ -1043,9 +1043,9 @@ namespace NamespaceTests {
class NoIndexedOrderField {
public:
void run() {
- IndexSpec spec( BSON( "a" << 1 ), BSONObj() );
+ BSONObj kp(BSON( "a" << 1 ));
FieldRangeSet frs( "n/a", BSONObj(), true , true );
- ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSON( "b" << 1 ) ) );
+ ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(kp, frs, BSON( "b" << 1 ) ) );
}
};
@@ -1053,9 +1053,9 @@ namespace NamespaceTests {
class ChildOfIndexOrderField {
public:
void run() {
- IndexSpec spec( BSON( "a" << 1 ), BSONObj() );
+ BSONObj kp( BSON( "a" << 1 ));
FieldRangeSet frs( "n/a", BSONObj(), true , true );
- ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSON( "a.b" << 1 ) ) );
+ ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(kp, frs, BSON( "a.b" << 1 ) ) );
}
};
@@ -1063,9 +1063,9 @@ namespace NamespaceTests {
class ParentOfIndexOrderField {
public:
void run() {
- IndexSpec spec( BSON( "a.b" << 1 ), BSONObj() );
+ BSONObj kp( BSON( "a.b" << 1 ) );
FieldRangeSet frs( "n/a", BSONObj(), true , true );
- ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(spec.keyPattern, frs, BSON( "a" << 1 ) ) );
+ ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(kp, frs, BSON( "a" << 1 ) ) );
}
};
@@ -1073,13 +1073,13 @@ namespace NamespaceTests {
class NumericFieldSuitability {
public:
void run() {
- IndexSpec spec( BSON( "1" << 1 ), BSONObj() );
+ BSONObj kp( BSON( "1" << 1 ));
FieldRangeSet frs1( "n/a", BSON( "1" << 2 ), true , true );
- ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(spec.keyPattern, frs1, BSONObj() ) );
+ ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(kp, frs1, BSONObj() ) );
FieldRangeSet frs2( "n/a", BSON( "01" << 3), true , true );
- ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(spec.keyPattern, frs2, BSON( "01" << 1 ) ) );
+ ASSERT_EQUALS( USELESS, IndexSelection::isSuitableFor(kp, frs2, BSON( "01" << 1 ) ) );
FieldRangeSet frs3( "n/a", BSONObj() , true , true );
- ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(spec.keyPattern, frs3, BSON( "1" << 1 ) ) );
+ ASSERT_EQUALS( HELPFUL, IndexSelection::isSuitableFor(kp, frs3, BSON( "1" << 1 ) ) );
}
};
@@ -1471,63 +1471,6 @@ namespace NamespaceTests {
// GeoHaystack does not implement its own suitability(). See SERVER-8645.
} // namespace IndexSpecSuitability
-
- namespace IndexSpecTests {
-
- /** A missing field is represented as null in a btree index. */
- class BtreeIndexMissingField {
- public:
- void run() {
- IndexSpec spec( BSON( "a" << 1 ) );
- ASSERT_EQUALS( jstNULL, spec.missingField().type() );
- }
- };
-
- /** A missing field is represented as null in a 2d index. */
- class TwoDIndexMissingField {
- public:
- void run() {
- IndexSpec spec( BSON( "a" << "2d" ) );
- ASSERT_EQUALS( jstNULL, spec.missingField().type() );
- }
- };
-
- /** A missing field is represented with the hash of null in a hashed index. */
- class HashedIndexMissingField {
- public:
- void run() {
- IndexSpec spec( BSON( "a" << "hashed" ) );
- BSONObj nullObj = BSON( "a" << BSONNULL );
- BSONObjSet nullFieldKeySet;
- spec.getKeys( nullObj, nullFieldKeySet );
- BSONElement nullFieldFromKey = nullFieldKeySet.begin()->firstElement();
- ASSERT_EQUALS( HashedIndexType::makeSingleKey( nullObj.firstElement(), 0 ),
- nullFieldFromKey.Long() );
- ASSERT_EQUALS( NumberLong, spec.missingField().type() );
- ASSERT_EQUALS( nullFieldFromKey, spec.missingField() );
- }
- };
-
- /**
- * A missing field is represented with the hash of null in a hashed index. This hash value
- * depends on the hash seed.
- */
- class HashedIndexMissingFieldAlternateSeed {
- public:
- void run() {
- IndexSpec spec( BSON( "a" << "hashed" ), BSON( "seed" << 0x5eed ) );
- BSONObj nullObj = BSON( "a" << BSONNULL );
- BSONObjSet nullFieldKeySet;
- spec.getKeys( BSONObj(), nullFieldKeySet );
- BSONElement nullFieldFromKey = nullFieldKeySet.begin()->firstElement();
- ASSERT_EQUALS( HashedIndexType::makeSingleKey( nullObj.firstElement(), 0x5eed ),
- nullFieldFromKey.Long() );
- ASSERT_EQUALS( NumberLong, spec.missingField().type() );
- ASSERT_EQUALS( nullFieldFromKey, spec.missingField() );
- }
- };
-
- } // namespace IndexSpecTests
namespace NamespaceDetailsTests {
@@ -2397,10 +2340,6 @@ namespace NamespaceTests {
add< IndexSpecSuitability::Hashed::EqualityInsideNonStandaloneSingletonOr >();
add< IndexSpecSuitability::Hashed::EqualityInsideNonSingletonOr >();
add< IndexSpecSuitability::Hashed::EqualityOutsideOr >();
- add< IndexSpecTests::BtreeIndexMissingField >();
- add< IndexSpecTests::TwoDIndexMissingField >();
- add< IndexSpecTests::HashedIndexMissingField >();
- add< IndexSpecTests::HashedIndexMissingFieldAlternateSeed >();
add< NamespaceDetailsTests::Create >();
add< NamespaceDetailsTests::SingleAlloc >();
add< NamespaceDetailsTests::Realloc >();
diff --git a/src/mongo/dbtests/queryoptimizercursortests.cpp b/src/mongo/dbtests/queryoptimizercursortests.cpp
index 39073090db0..43062b6a555 100644
--- a/src/mongo/dbtests/queryoptimizercursortests.cpp
+++ b/src/mongo/dbtests/queryoptimizercursortests.cpp
@@ -32,7 +32,6 @@
#include "mongo/dbtests/dbtests.h"
namespace mongo {
- void __forceLinkGeoPlugin();
shared_ptr<Cursor> newQueryOptimizerCursor( const char *ns, const BSONObj &query,
const BSONObj &order = BSONObj(),
const QueryPlanSelectionPolicy &planPolicy =
@@ -4826,7 +4825,6 @@ namespace QueryOptimizerCursorTests {
All() : Suite( "queryoptimizercursor" ) {}
void setupTests() {
- __forceLinkGeoPlugin();
add<CachedMatchCounter::Count>();
add<CachedMatchCounter::Accumulate>();
add<CachedMatchCounter::Dedup>();
diff --git a/src/mongo/dbtests/queryoptimizertests.cpp b/src/mongo/dbtests/queryoptimizertests.cpp
index 760ea484fa3..d4edbb16fc6 100644
--- a/src/mongo/dbtests/queryoptimizertests.cpp
+++ b/src/mongo/dbtests/queryoptimizertests.cpp
@@ -1059,7 +1059,6 @@ namespace {
All() : Suite( "queryoptimizer" ) {}
void setupTests() {
- __forceLinkGeoPlugin();
add<QueryPlanTests::ToString>();
add<QueryPlanTests::NoIndex>();
add<QueryPlanTests::SimpleOrder>();
diff --git a/src/mongo/dbtests/queryoptimizertests2.cpp b/src/mongo/dbtests/queryoptimizertests2.cpp
index eb031e95a5a..f671063d972 100644
--- a/src/mongo/dbtests/queryoptimizertests2.cpp
+++ b/src/mongo/dbtests/queryoptimizertests2.cpp
@@ -30,7 +30,6 @@
namespace mongo {
extern void runQuery(Message& m, QueryMessage& q, Message &response );
- extern void __forceLinkGeoPlugin();
} // namespace mongo
namespace {
@@ -794,7 +793,6 @@ namespace {
All() : Suite( "queryoptimizer2" ) {}
void setupTests() {
- __forceLinkGeoPlugin();
add<QueryPlanSetTests::ToString>();
add<QueryPlanSetTests::NoIndexes>();
add<QueryPlanSetTests::Optimal>();
diff --git a/src/mongo/dbtests/queryutiltests.cpp b/src/mongo/dbtests/queryutiltests.cpp
index 10e8d95a75d..9074afa204a 100644
--- a/src/mongo/dbtests/queryutiltests.cpp
+++ b/src/mongo/dbtests/queryutiltests.cpp
@@ -1760,7 +1760,7 @@ namespace QueryUtilTests {
void run() {
BSONObj obj = BSON( "a" << 1 );
FieldRangeSet fieldRangeSet( "", obj, true, true );
- IndexSpec indexSpec( BSON( "a" << 1 ) );
+ BSONObj indexSpec( BSON( "a" << 1 ) );
FieldRangeVector fieldRangeVector( fieldRangeSet, indexSpec, 1 );
fieldRangeVector.toString(); // Just test that we don't crash.
}
@@ -1799,7 +1799,7 @@ namespace QueryUtilTests {
private:
bool rangesRepresented( const BSONObj& index, bool singleKey, const BSONObj& query ) {
FieldRangeSet fieldRangeSet( "", query, singleKey, true );
- IndexSpec indexSpec( index );
+ BSONObj indexSpec( index );
FieldRangeVector fieldRangeVector( fieldRangeSet, indexSpec, 1 );
return fieldRangeVector.hasAllIndexedRanges();
}
@@ -1811,12 +1811,12 @@ namespace QueryUtilTests {
void run() {
// Equality on a single field is a single interval.
FieldRangeVector frv1( FieldRangeSet( "dummy", BSON( "a" << 5 ), true, true ),
- IndexSpec( BSON( "a" << 1 ) ),
+ ( BSON( "a" << 1 ) ),
1 );
ASSERT( frv1.isSingleInterval() );
// Single interval on a single field is a single interval.
FieldRangeVector frv2( FieldRangeSet( "dummy", BSON( "a" << GT << 5 ), true, true ),
- IndexSpec( BSON( "a" << 1 ) ),
+ ( BSON( "a" << 1 ) ),
1 );
ASSERT( frv2.isSingleInterval() );
// Multiple intervals on a single field is not a single interval.
@@ -1824,7 +1824,7 @@ namespace QueryUtilTests {
fromjson( "{a:{$in:[4,5]}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 ) ),
+ ( BSON( "a" << 1 ) ),
1 );
ASSERT( !frv3.isSingleInterval() );
@@ -1833,7 +1833,7 @@ namespace QueryUtilTests {
BSON( "a" << 5 << "b" << 6 ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 ) ),
1 );
ASSERT( frv4.isSingleInterval() );
// Equality on first field and single interval on second field is a compound
@@ -1842,7 +1842,7 @@ namespace QueryUtilTests {
BSON( "a" << 5 << "b" << GT << 6 ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 ) ),
1 );
ASSERT( frv5.isSingleInterval() );
// Single interval on first field and single interval on second field is not a
@@ -1851,7 +1851,7 @@ namespace QueryUtilTests {
BSON( "a" << LT << 5 << "b" << GT << 6 ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 ) ),
1 );
ASSERT( !frv6.isSingleInterval() );
// Multiple intervals on two fields is not a compound single interval.
@@ -1859,7 +1859,7 @@ namespace QueryUtilTests {
fromjson( "{a:{$in:[4,5]},b:{$in:[7,8]}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 ) ),
1 );
ASSERT( !frv7.isSingleInterval() );
@@ -1868,7 +1868,7 @@ namespace QueryUtilTests {
BSON( "a" << 5 ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 ) ),
1 );
ASSERT( frv8.isSingleInterval() );
// With missing second field is still a single compound interval.
@@ -1876,7 +1876,7 @@ namespace QueryUtilTests {
BSON( "b" << 5 ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 ) ),
1 );
ASSERT( !frv9.isSingleInterval() );
@@ -1886,7 +1886,7 @@ namespace QueryUtilTests {
fromjson( "{a:5,b:6,c:{$gt:7}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 << "c" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 << "c" << 1 ) ),
1 );
ASSERT( frv10.isSingleInterval() );
@@ -1895,7 +1895,7 @@ namespace QueryUtilTests {
fromjson( "{a:5,b:{$gt:7}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 << "c" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 << "c" << 1 ) ),
1 );
ASSERT( frv11.isSingleInterval() );
// Equality, then single interval, then missing, then missing is a compound single
@@ -1904,7 +1904,7 @@ namespace QueryUtilTests {
fromjson( "{a:5,b:{$gt:7}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 <<
+ ( BSON( "a" << 1 <<
"b" << 1 <<
"c" << 1 <<
"d" << 1 ) ),
@@ -1916,7 +1916,7 @@ namespace QueryUtilTests {
fromjson( "{a:5,b:{$gt:7}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 <<
+ ( BSON( "a" << 1 <<
"b" << 1 <<
"c" << 1 <<
"d" << -1 ) ),
@@ -1928,7 +1928,7 @@ namespace QueryUtilTests {
fromjson( "{a:5,b:{$gt:7},d:{$gt:1}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 <<
+ ( BSON( "a" << 1 <<
"b" << 1 <<
"c" << 1 <<
"d" << 1 ) ),
@@ -1943,7 +1943,7 @@ namespace QueryUtilTests {
void run() {
// Equality on a single field.
FieldRangeVector frv1( FieldRangeSet( "dummy", BSON( "a" << 5 ), true, true ),
- IndexSpec( BSON( "a" << 1 ) ),
+ ( BSON( "a" << 1 ) ),
1 );
ASSERT_EQUALS( BSON( "" << 5 ), frv1.startKey() );
ASSERT( frv1.startKeyInclusive() );
@@ -1951,7 +1951,7 @@ namespace QueryUtilTests {
ASSERT( frv1.endKeyInclusive() );
// Single interval on a single field.
FieldRangeVector frv2( FieldRangeSet( "dummy", BSON( "a" << GT << 5 ), true, true ),
- IndexSpec( BSON( "a" << 1 ) ),
+ ( BSON( "a" << 1 ) ),
1 );
ASSERT_EQUALS( BSON( "" << 5 ), frv2.startKey() );
ASSERT( !frv2.startKeyInclusive() );
@@ -1963,7 +1963,7 @@ namespace QueryUtilTests {
BSON( "a" << 5 << "b" << 6 ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 ) ),
1 );
ASSERT_EQUALS( BSON( "" << 5 << "" << 6 ), frv3.startKey() );
ASSERT( frv3.startKeyInclusive() );
@@ -1974,7 +1974,7 @@ namespace QueryUtilTests {
BSON( "a" << 5 << "b" << LT << 6 ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 ) ),
1 );
ASSERT_EQUALS( BSON( "" << 5 << "" << -numeric_limits<double>::max() ),
frv4.startKey() );
@@ -1988,7 +1988,7 @@ namespace QueryUtilTests {
BSON( "a" << 5 ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 ) ),
1 );
ASSERT_EQUALS( BSON( "" << 5 << "" << MINKEY ), frv5.startKey() );
ASSERT( frv5.startKeyInclusive() );
@@ -1999,7 +1999,7 @@ namespace QueryUtilTests {
fromjson( "{a:5,b:{$gt:7}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 << "b" << 1 << "c" << 1 ) ),
+ ( BSON( "a" << 1 << "b" << 1 << "c" << 1 ) ),
1 );
ASSERT_EQUALS( BSON( "" << 5 << "" << 7 << "" << MAXKEY ), frv6.startKey() );
ASSERT( !frv6.startKeyInclusive() );
@@ -2013,7 +2013,7 @@ namespace QueryUtilTests {
fromjson( "{a:5,b:{$gt:7}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 <<
+ ( BSON( "a" << 1 <<
"b" << 1 <<
"c" << 1 <<
"d" << 1 ) ),
@@ -2034,7 +2034,7 @@ namespace QueryUtilTests {
fromjson( "{a:5,b:{$gt:7,$lt:10}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 <<
+ ( BSON( "a" << 1 <<
"b" << 1 <<
"c" << 1 <<
"d" << -1 ) ),
@@ -2051,7 +2051,7 @@ namespace QueryUtilTests {
fromjson( "{a:5,b:{$gt:7,$lt:10}}" ),
true,
true ),
- IndexSpec( BSON( "a" << 1 <<
+ ( BSON( "a" << 1 <<
"b" << 1 <<
"c" << 1 <<
"d" << -1 ) ),
@@ -2075,8 +2075,7 @@ namespace QueryUtilTests {
virtual ~Base() {}
void run() {
FieldRangeSet fieldRangeSet( "", query(), true, true );
- IndexSpec indexSpec( index(), BSONObj() );
- FieldRangeVector fieldRangeVector( fieldRangeSet, indexSpec, 1 );
+ FieldRangeVector fieldRangeVector( fieldRangeSet, index(), 1 );
_iterator.reset( new FieldRangeVectorIterator( fieldRangeVector,
singleIntervalLimit() ) );
_iterator->advance( fieldRangeVector.startKey() );
diff --git a/src/mongo/s/d_split.cpp b/src/mongo/s/d_split.cpp
index 5a42aa1e8f9..98e4847e3fa 100644
--- a/src/mongo/s/d_split.cpp
+++ b/src/mongo/s/d_split.cpp
@@ -31,6 +31,7 @@
#include "mongo/db/btreecursor.h"
#include "mongo/db/clientcursor.h"
#include "mongo/db/commands.h"
+#include "mongo/db/index/hash_access_method.h"
#include "mongo/db/instance.h"
#include "mongo/db/jsobj.h"
#include "mongo/s/chunk.h" // for static genID only
@@ -134,7 +135,14 @@ namespace mongo {
// this index.
// NOTE A local copy of 'missingField' is made because IndexSpec objects may be
// invalidated during a db lock yield.
- BSONObj missingFieldObj = idx->getSpec().missingField().wrap();
+ BSONObj missingFieldObj;
+ if (IndexNames::HASHED == KeyPattern::findPluginName(kp.toBSON())) {
+ missingFieldObj = HashAccessMethod::getMissingField(*idx);
+ } else {
+ BSONObjBuilder b;
+ b.appendNull("");
+ missingFieldObj = b.obj();
+ }
BSONElement missingField = missingFieldObj.firstElement();
// for now, the only check is that all shard keys are filled