summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHarishabd Khalsa <hk@Harishabds-MacBook-Pro.local>2012-10-15 14:10:08 -0400
committerHari Khalsa <hkhalsa@10gen.com>2012-10-23 11:41:33 -0400
commit2bb94caf04dc5bf7a37368118feaaf17976eb982 (patch)
tree411ff39f341b7d97ff4e486a88298e24034964c2
parent4bfc3bc15e23eeca6292cbc018075dcb10e1cd86 (diff)
downloadmongo-2bb94caf04dc5bf7a37368118feaaf17976eb982.tar.gz
This is my first Geo cleanup change.
-rw-r--r--docs/building.md2
-rw-r--r--jstests/geo_haystack3.js28
-rw-r--r--src/mongo/SConscript5
-rw-r--r--src/mongo/db/geo/2d.cpp2332
-rw-r--r--src/mongo/db/geo/core.h549
-rw-r--r--src/mongo/db/geo/hash.cpp602
-rw-r--r--src/mongo/db/geo/hash.h194
-rw-r--r--src/mongo/db/geo/hash_test.cpp72
-rw-r--r--src/mongo/db/geo/haystack.cpp377
-rw-r--r--src/mongo/db/geo/shapes.cpp459
-rw-r--r--src/mongo/db/geo/shapes.h104
11 files changed, 2589 insertions, 2135 deletions
diff --git a/docs/building.md b/docs/building.md
index 7b4fc2e7657..949832e524f 100644
--- a/docs/building.md
+++ b/docs/building.md
@@ -9,7 +9,7 @@ For detail information about building, please see [the wiki](http://dochub.mongo
If you want to build everything (mongod, mongo, tools, etc):
- $ scons .
+ $ scons all
If you only want to build the database:
diff --git a/jstests/geo_haystack3.js b/jstests/geo_haystack3.js
new file mode 100644
index 00000000000..f5a2ab7becb
--- /dev/null
+++ b/jstests/geo_haystack3.js
@@ -0,0 +1,28 @@
+t = db.geo_haystack3
+t.drop()
+
+t.insert({ pos : { long : 34, lat : 33 }})
+t.insert({ pos : { long : 34.2, lat : 33.3 }, type : ["bar", "restaurant" ]})
+t.insert({ pos : { long : 34.2, lat : 37.3 }, type : ["bar", "chicken" ]})
+t.insert({ pos : { long : 59.1, lat : 87.2 }, type : ["baz", "office" ]})
+t.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 })
+
+// This only matches the first insert. What do we want? First 3 or just the first?
+res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : {}, limit : 30 })
+assert.eq(1, res.stats.n, "Right # of matches");
+assert.eq(34, res.results[0].pos.long, "expected longitude");
+assert.eq(33, res.results[0].pos.lat, "expected latitude");
+
+// This matches the middle 2 of the 4 elements above.
+res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : { type : "bar" },
+ limit : 2 })
+assert.eq(2, res.stats.n, "Right # of matches");
+assert.eq("bar", res.results[0].type[0], "expected value for type");
+assert.eq("bar", res.results[1].type[0], "expected value for type");
+assert.neq(res.results[0].type[1], res.results[1].type[1], "should get 2 diff results");
+
+// This is a test for the limit being reached/only 1 returned.
+res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : { type : "bar" },
+ limit : 1 })
+assert.eq(1, res.stats.n, "Right # of matches");
+assert.eq("bar", res.results[0].type[0], "expected value for type");
diff --git a/src/mongo/SConscript b/src/mongo/SConscript
index 382ecb6190f..f6c4595025a 100644
--- a/src/mongo/SConscript
+++ b/src/mongo/SConscript
@@ -388,10 +388,15 @@ serverOnlyFiles += [ "s/d_logic.cpp",
env.StaticLibrary("defaultversion", "s/default_version.cpp")
+env.StaticLibrary("geometry", [ "db/geo/hash.cpp", "db/geo/shapes.cpp" ], LIBDEPS = [ "bson" ])
+
+env.CppUnitTest("hash_test", [ "db/geo/hash_test.cpp" ], LIBDEPS = ["geometry"],)
+
env.StaticLibrary("serveronly", serverOnlyFiles,
LIBDEPS=["coreshard",
"dbcmdline",
"defaultversion",
+ "geometry",
'$BUILD_DIR/third_party/shim_snappy'])
# These files go into mongos and mongod only, not into the shell or any tools.
diff --git a/src/mongo/db/geo/2d.cpp b/src/mongo/db/geo/2d.cpp
index 0bca16db138..152de7f6ff1 100644
--- a/src/mongo/db/geo/2d.cpp
+++ b/src/mongo/db/geo/2d.cpp
@@ -17,18 +17,20 @@
*/
#include "pch.h"
-#include "../namespace-inl.h"
-#include "../jsobj.h"
-#include "../index.h"
-#include "../../util/startup_test.h"
-#include "../commands.h"
-#include "../pdfile.h"
-#include "../btree.h"
-#include "../curop-inl.h"
-#include "../matcher.h"
-#include "../queryutil.h"
-#include "core.h"
-#include "../../util/timer.h"
+#include "mongo/db/namespace-inl.h"
+#include "mongo/db/jsobj.h"
+#include "mongo/db/index.h"
+#include "mongo/util/startup_test.h"
+#include "mongo/db/commands.h"
+#include "mongo/db/pdfile.h"
+#include "mongo/db/btree.h"
+#include "mongo/db/curop-inl.h"
+#include "mongo/db/matcher.h"
+#include "mongo/db/queryutil.h"
+#include "mongo/db/geo/core.h"
+#include "mongo/db/geo/hash.h"
+#include "mongo/db/geo/shapes.h"
+#include "mongo/util/timer.h"
// Note: we use indexinterface herein to talk to the btree code. In the future it would be nice to
// be able to use the V1 key class (see key.h) instead of toBson() which has some cost.
@@ -40,55 +42,14 @@ namespace mongo {
class GeoKeyNode {
GeoKeyNode();
public:
- GeoKeyNode( DiskLoc bucket, int keyOfs, DiskLoc r, BSONObj k) : _bucket( bucket ), _keyOfs( keyOfs ), recordLoc(r), _key(k) { }
+ GeoKeyNode(DiskLoc bucket, int keyOfs, DiskLoc r, BSONObj k)
+ : _bucket(bucket), _keyOfs(keyOfs), recordLoc(r), _key(k) { }
const DiskLoc _bucket;
const int _keyOfs;
const DiskLoc recordLoc;
const BSONObj _key;
};
- // just use old indexes for geo for now. todo.
-// typedef BtreeBucket<V0> GeoBtreeBucket;
-// typedef GeoBtreeBucket::KeyNode GeoKeyNode;
-
-//#define BTREE btree<V0>
-
-#if 0
-# define CDEBUG -1
-#else
-# define CDEBUG 10
-#endif
-
-#if 0
-# define GEODEBUGGING
-# define GEODEBUG(x) cout << x << endl;
-# define GEODEBUGPRINT(x) PRINT(x)
- inline void PREFIXDEBUG(GeoHash prefix, const GeoConvert* g) {
- if (!prefix.constrains()) {
- cout << "\t empty prefix" << endl;
- return ;
- }
-
- Point ll (g, prefix); // lower left
- prefix.move(1,1);
- Point tr (g, prefix); // top right
-
- Point center ( (ll._x+tr._x)/2, (ll._y+tr._y)/2 );
- double radius = fabs(ll._x - tr._x) / 2;
-
- cout << "\t ll: " << ll.toString() << " tr: " << tr.toString()
- << " center: " << center.toString() << " radius: " << radius << endl;
-
- }
-#else
-# define GEODEBUG(x)
-# define GEODEBUGPRINT(x)
-# define PREFIXDEBUG(x, y)
-#endif
-
- const double EARTH_RADIUS_KM = 6371;
- const double EARTH_RADIUS_MILES = EARTH_RADIUS_KM * 0.621371192;
-
enum GeoDistType {
GEO_PLAIN,
GEO_SPHERE
@@ -100,124 +61,95 @@ namespace mongo {
cos(deg2rad(max(-89.0, y - maxDistDegrees))));
}
- GeoBitSets geoBitSets;
-
const string GEO2DNAME = "2d";
- class Geo2dType : public IndexType , public GeoConvert {
+ class Geo2dType : public IndexType {
public:
virtual ~Geo2dType() { }
- Geo2dType( const IndexPlugin * plugin , const IndexSpec* spec )
- : IndexType( plugin , spec ) {
-
+ Geo2dType(const IndexPlugin *plugin, const IndexSpec* spec) : IndexType(plugin, spec) {
BSONObjBuilder orderBuilder;
- BSONObjIterator i( spec->keyPattern );
- while ( i.more() ) {
+ BSONObjIterator i(spec->keyPattern);
+ while (i.more()) {
BSONElement e = i.next();
- if ( e.type() == String && GEO2DNAME == e.valuestr() ) {
- uassert( 13022 , "can't have 2 geo field" , _geo.size() == 0 );
- uassert( 13023 , "2d has to be first in index" , _other.size() == 0 );
+ if (e.type() == String && GEO2DNAME == e.valuestr()) {
+ uassert(13022, "can't have 2 geo field", _geo.size() == 0);
+ uassert(13023, "2d has to be first in index", _other.size() == 0);
_geo = e.fieldName();
+ } else {
+ _other.push_back(e.fieldName());
}
- else {
- _other.push_back( e.fieldName() );
- }
- orderBuilder.append( "" , 1 );
+ orderBuilder.append("", 1);
}
+ uassert(13024, "no geo field specified", _geo.size());
- uassert( 13024 , "no geo field specified" , _geo.size() );
-
- double bits = _configval( spec , "bits" , 26 ); // for lat/long, ~ 1ft
-
- uassert( 13028 , "bits in geo index must be between 1 and 32" , bits > 0 && bits <= 32 );
-
- _bits = (unsigned) bits;
-
- _max = _configval( spec , "max" , 180.0 );
- _min = _configval( spec , "min" , -180.0 );
+ double bits = configValueWithDefault(spec, "bits", 26); // for lat/long, ~ 1ft
+ uassert(13028, "bits in geo index must be between 1 and 32", bits > 0 && bits <= 32);
+ GeoHashConverter::Parameters params;
+ params.bits = static_cast<unsigned>(bits);
+ params.max = configValueWithDefault(spec, "max", 180.0);
+ params.min = configValueWithDefault(spec, "min", -180.0);
double numBuckets = (1024 * 1024 * 1024 * 4.0);
+ params.scaling = numBuckets / (params.max - params.min);
- _scaling = numBuckets / ( _max - _min );
+ _geoHashConverter.reset(new GeoHashConverter(params));
_order = orderBuilder.obj();
-
- GeoHash a(0, 0, _bits);
- GeoHash b = a;
- b.move(1, 1);
-
- // Epsilon is 1/100th of a bucket size
- // TODO: Can we actually find error bounds for the sqrt function?
- double epsilon = 0.001 / _scaling;
- _error = distance(a, b) + epsilon;
-
- // Error in radians
- _errorSphere = deg2rad( _error );
-
- }
-
- double _configval( const IndexSpec* spec , const string& name , double def ) {
- BSONElement e = spec->info[name];
- if ( e.isNumber() ) {
- return e.numberDouble();
- }
- return def;
}
- virtual BSONObj fixKey( const BSONObj& in ) {
- if ( in.firstElement().type() == BinData )
+ // XXX: what does this do
+ virtual BSONObj fixKey(const BSONObj& in) {
+ if (in.firstElement().type() == BinData)
return in;
- BSONObjBuilder b(in.objsize()+16);
+ BSONObjBuilder b(in.objsize() + 16);
- if ( in.firstElement().isABSONObj() )
- _hash( in.firstElement().embeddedObject() ).append( b , "" );
- else if ( in.firstElement().type() == String )
- GeoHash( in.firstElement().valuestr() ).append( b , "" );
- else if ( in.firstElement().type() == RegEx )
- GeoHash( in.firstElement().regex() ).append( b , "" );
+ if (in.firstElement().isABSONObj())
+ _geoHashConverter->hash(in.firstElement().embeddedObject()).appendToBuilder(&b, "");
+ else if (in.firstElement().type() == String)
+ GeoHash(in.firstElement().valuestr()).appendToBuilder(&b, "");
+ else if (in.firstElement().type() == RegEx)
+ GeoHash(in.firstElement().regex()).appendToBuilder(&b, "");
else
return in;
BSONObjIterator i(in);
i.next();
- while ( i.more() )
- b.append( i.next() );
+ while (i.more())
+ b.append(i.next());
return b.obj();
}
/** Finds the key objects to put in an index */
- virtual void getKeys( const BSONObj& obj, BSONObjSet& keys ) const {
- getKeys( obj, &keys, NULL );
+ virtual void getKeys(const BSONObj& obj, BSONObjSet& keys) const {
+ getKeys(obj, &keys, NULL);
}
/** Finds all locations in a geo-indexed object */
// TODO: Can we just return references to the locs, if they won't change?
- void getKeys( const BSONObj& obj, vector< BSONObj >& locs ) const {
- getKeys( obj, NULL, &locs );
+ void getKeys(const BSONObj& obj, vector<BSONObj>& locs) const {
+ getKeys(obj, NULL, &locs);
}
/** Finds the key objects and/or locations for a geo-indexed object */
- void getKeys( const BSONObj &obj, BSONObjSet* keys, vector< BSONObj >* locs ) const {
-
+ void getKeys(const BSONObj &obj, BSONObjSet* keys, vector<BSONObj>* locs) const {
BSONElementMSet bSet;
// Get all the nested location fields, but don't return individual elements from
// the last array, if it exists.
obj.getFieldsDotted(_geo.c_str(), bSet, false);
- if( bSet.empty() )
+ if (bSet.empty())
return;
- for( BSONElementMSet::iterator setI = bSet.begin(); setI != bSet.end(); ++setI ) {
-
+ for (BSONElementMSet::iterator setI = bSet.begin(); setI != bSet.end(); ++setI) {
BSONElement geo = *setI;
- GEODEBUG( "Element " << geo << " found for query " << _geo.c_str() );
+ GEODEBUG("Element " << geo << " found for query " << _geo.c_str());
- if ( geo.eoo() || ! geo.isABSONObj() )
+ if (geo.eoo() || !geo.isABSONObj())
continue;
//
@@ -229,7 +161,7 @@ namespace mongo {
//
BSONObj embed = geo.embeddedObject();
- if ( embed.isEmpty() )
+ if (embed.isEmpty())
continue;
// Differentiate between location arrays and locations
@@ -238,231 +170,78 @@ namespace mongo {
BSONObjIterator oi(embed);
- while( oi.more() ) {
-
+ while (oi.more()) {
BSONObj locObj;
- if( singleElement ) locObj = embed;
- else {
+ if (singleElement) {
+ locObj = embed;
+ } else {
BSONElement locElement = oi.next();
- uassert( 13654, str::stream() << "location object expected, location array not in correct format",
- locElement.isABSONObj() );
+ uassert(13654, str::stream() << "location object expected, location "
+ "array not in correct format",
+ locElement.isABSONObj());
locObj = locElement.embeddedObject();
-
- if( locObj.isEmpty() )
+ if(locObj.isEmpty())
continue;
}
BSONObjBuilder b(64);
// Remember the actual location object if needed
- if( locs )
- locs->push_back( locObj );
+ if (locs)
+ locs->push_back(locObj);
// Stop if we don't need to get anything but location objects
- if( ! keys ) {
- if( singleElement ) break;
+ if (!keys) {
+ if (singleElement) break;
else continue;
}
- _hash( locObj, &obj ).append( b , "" );
+ _geoHashConverter->hash(locObj, &obj).appendToBuilder(&b, "");
// Go through all the other index keys
- for ( vector<string>::const_iterator i = _other.begin(); i != _other.end(); ++i ) {
-
+ for (vector<string>::const_iterator i = _other.begin(); i != _other.end(); ++i) {
// Get *all* fields for the index key
BSONElementSet eSet;
- obj.getFieldsDotted( *i, eSet );
-
+ obj.getFieldsDotted(*i, eSet);
- if ( eSet.size() == 0 )
- b.appendAs( _spec->missingField(), "" );
- else if ( eSet.size() == 1 )
- b.appendAs( *(eSet.begin()), "" );
+ if (eSet.size() == 0)
+ b.appendAs(_spec->missingField(), "");
+ else if (eSet.size() == 1)
+ b.appendAs(*(eSet.begin()), "");
else {
-
// If we have more than one key, store as an array of the objects
-
BSONArrayBuilder aBuilder;
- for( BSONElementSet::iterator ei = eSet.begin(); ei != eSet.end(); ++ei ) {
- aBuilder.append( *ei );
+ for (BSONElementSet::iterator ei = eSet.begin(); ei != eSet.end();
+ ++ei) {
+ aBuilder.append(*ei);
}
- BSONArray arr = aBuilder.arr();
-
- b.append( "", arr );
-
+ b.append("", aBuilder.arr());
}
-
}
-
- keys->insert( b.obj() );
-
- if( singleElement ) break;
-
+ keys->insert(b.obj());
+ if(singleElement) break;
}
}
-
- }
-
- BSONObj _fromBSONHash( const BSONElement& e ) const {
- return _unhash( _tohash( e ) );
- }
-
- BSONObj _fromBSONHash( const BSONObj& o ) const {
- return _unhash( _tohash( o.firstElement() ) );
- }
-
- GeoHash _tohash( const BSONElement& e ) const {
- if ( e.isABSONObj() )
- return _hash( e.embeddedObject() );
-
- return GeoHash( e , _bits );
- }
-
- GeoHash _hash( const BSONObj& o ) const {
- return _hash( o , NULL );
- }
-
- GeoHash _hash( const BSONObj& o, const BSONObj* src ) const {
-
- BSONObjIterator i(o);
- uassert( 13067 ,
- str::stream() << "geo field is empty"
- << ( src ? causedBy((*src).toString()) : "" ),
-
- i.more() );
-
- BSONElement x = i.next();
- uassert( 13068 ,
- str::stream() << "geo field only has 1 element"
- << causedBy( src ? (*src).toString() : x.toString() ),
-
- i.more() );
-
- BSONElement y = i.next();
- uassert( 13026 ,
- str::stream() << "geo values have to be numbers"
- << causedBy( src ? (*src).toString() :
- BSON_ARRAY( x << y ).toString() ),
-
- x.isNumber() && y.isNumber() );
-
- uassert( 13027 ,
- str::stream() << "point not in interval of [ " << _min << ", " << _max << " ]"
- << causedBy( src ? (*src).toString() :
- BSON_ARRAY( x.number() << y.number() ).toString() ),
-
- x.number() <= _max && x.number() >= _min &&
- y.number() <= _max && y.number() >= _min );
-
- return GeoHash( _convert(x.number()), _convert(y.number()) , _bits );
- }
-
- GeoHash hash( const Point& p ) const {
- return hash( p._x, p._y );
- }
-
- GeoHash hash( double x, double y ) const {
-
- uassert( 16433,
- str::stream() << "point not in interval of [ " << _min << ", " << _max << " ]"
- << causedBy( BSON_ARRAY( x << y ).toString() ),
-
- x <= _max && x >= _min &&
- y <= _max && y >= _min );
-
- return GeoHash( _convert(x), _convert(y) , _bits );
- }
-
- BSONObj _unhash( const GeoHash& h ) const {
- unsigned x , y;
- h.unhash( x , y );
- BSONObjBuilder b;
- b.append( "x" , _unconvert( x ) );
- b.append( "y" , _unconvert( y ) );
- return b.obj();
- }
-
- unsigned _convert( double in ) const {
-
- verify( in <= _max && in >= _min );
-
- if (in == _max) {
- // prevent aliasing with _min by moving inside the "box"
- // makes 180 == 179.999 (roughly)
- in -= _error / 2;
- }
-
- in -= _min;
- verify( in >= 0 );
- return (unsigned)(in * _scaling);
- }
-
- double _unconvert( unsigned in ) const {
- double x = in;
- x /= _scaling;
- x += _min;
- return x;
- }
-
- void unhash( const GeoHash& h , double& x , double& y ) const {
- unsigned a,b;
- h.unhash(a,b);
- x = _unconvert( a );
- y = _unconvert( b );
- }
-
- double distance( const GeoHash& a , const GeoHash& b ) const {
- double ax,ay,bx,by;
- unhash( a , ax , ay );
- unhash( b , bx , by );
-
- double dx = bx - ax;
- double dy = by - ay;
-
- return sqrt( ( dx * dx ) + ( dy * dy ) );
- }
-
- double sizeDiag( const GeoHash& a ) const {
- GeoHash b = a;
- b.move( 1 , 1 );
- return distance( a , b );
- }
-
- double sizeEdge( const GeoHash& a ) const {
-
- if( ! a.constrains() )
- return _max - _min;
-
- double ax,ay,bx,by;
- GeoHash b = a;
- b.move( 1 , 1 );
- unhash( a, ax, ay );
- unhash( b, bx, by );
-
- // _min and _max are a singularity
- if (bx == _min)
- bx = _max;
-
- return (fabs(ax-bx));
}
const IndexDetails* getDetails() const {
return _spec->getDetails();
}
- virtual shared_ptr<Cursor> newCursor( const BSONObj& query , const BSONObj& order , int numWanted ) const;
+ virtual shared_ptr<Cursor> newCursor(const BSONObj& query, const BSONObj& order,
+ int numWanted) const;
- virtual IndexSuitability suitability( const BSONObj& query , const BSONObj& order ) const {
+ virtual IndexSuitability suitability(const BSONObj& query, const BSONObj& order) const {
BSONElement e = query.getFieldDotted(_geo.c_str());
- switch ( e.type() ) {
+ switch (e.type()) {
case Object: {
BSONObj sub = e.embeddedObject();
- switch ( sub.firstElement().getGtLtOp() ) {
+ switch (sub.firstElement().getGtLtOp()) {
case BSONObj::opNEAR:
case BSONObj::opWITHIN:
return OPTIMAL;
@@ -481,370 +260,30 @@ namespace mongo {
}
}
+ const GeoHashConverter& getConverter() const { return *_geoHashConverter; }
+
+ // XXX: make private with a getter
string _geo;
vector<string> _other;
-
- unsigned _bits;
- double _max;
- double _min;
- double _scaling;
-
- BSONObj _order;
- double _error;
- double _errorSphere;
- };
-
- class Box {
- public:
-
- Box( const Geo2dType * g , const GeoHash& hash )
- : _min( g , hash ) ,
- _max( _min._x + g->sizeEdge( hash ) , _min._y + g->sizeEdge( hash ) ) {
- }
-
- Box( double x , double y , double size )
- : _min( x , y ) ,
- _max( x + size , y + size ) {
- }
-
- Box( Point min , Point max )
- : _min( min ) , _max( max ) {
- }
-
- Box() {}
-
- BSONArray toBSON() const {
- return BSON_ARRAY( BSON_ARRAY( _min._x << _min._y ) << BSON_ARRAY( _max._x << _max._y ) );
- }
-
- string toString() const {
- StringBuilder buf;
- buf << _min.toString() << " -->> " << _max.toString();
- return buf.str();
- }
-
- bool between( double min , double max , double val , double fudge=0) const {
- return val + fudge >= min && val <= max + fudge;
- }
-
- bool onBoundary( double bound, double val, double fudge = 0 ) const {
- return ( val >= bound - fudge && val <= bound + fudge );
- }
-
- bool mid( double amin , double amax , double bmin , double bmax , bool min , double& res ) const {
- verify( amin <= amax );
- verify( bmin <= bmax );
-
- if ( amin < bmin ) {
- if ( amax < bmin )
- return false;
- res = min ? bmin : amax;
- return true;
- }
- if ( amin > bmax )
- return false;
- res = min ? amin : bmax;
- return true;
- }
-
- double intersects( const Box& other ) const {
-
- Point boundMin(0,0);
- Point boundMax(0,0);
-
- if ( mid( _min._x , _max._x , other._min._x , other._max._x , true , boundMin._x ) == false ||
- mid( _min._x , _max._x , other._min._x , other._max._x , false , boundMax._x ) == false ||
- mid( _min._y , _max._y , other._min._y , other._max._y , true , boundMin._y ) == false ||
- mid( _min._y , _max._y , other._min._y , other._max._y , false , boundMax._y ) == false )
- return 0;
-
- Box intersection( boundMin , boundMax );
-
- return intersection.area() / area();
- }
-
- double area() const {
- return ( _max._x - _min._x ) * ( _max._y - _min._y );
- }
-
- double maxDim() const {
- return max( _max._x - _min._x, _max._y - _min._y );
- }
-
- Point center() const {
- return Point( ( _min._x + _max._x ) / 2 ,
- ( _min._y + _max._y ) / 2 );
- }
-
- void truncate( const Geo2dType* g ) {
- if( _min._x < g->_min ) _min._x = g->_min;
- if( _min._y < g->_min ) _min._y = g->_min;
- if( _max._x > g->_max ) _max._x = g->_max;
- if( _max._y > g->_max ) _max._y = g->_max;
- }
-
- void fudge( const Geo2dType* g ) {
- _min._x -= g->_error;
- _min._y -= g->_error;
- _max._x += g->_error;
- _max._y += g->_error;
- }
-
- bool onBoundary( Point p, double fudge = 0 ) {
- return onBoundary( _min._x, p._x, fudge ) ||
- onBoundary( _max._x, p._x, fudge ) ||
- onBoundary( _min._y, p._y, fudge ) ||
- onBoundary( _max._y, p._y, fudge );
- }
-
- bool inside( Point p , double fudge = 0 ) {
- bool res = inside( p._x , p._y , fudge );
- //cout << "is : " << p.toString() << " in " << toString() << " = " << res << endl;
- return res;
- }
-
- bool inside( double x , double y , double fudge = 0 ) {
- return
- between( _min._x , _max._x , x , fudge ) &&
- between( _min._y , _max._y , y , fudge );
- }
-
- bool contains(const Box& other, double fudge=0) {
- return inside(other._min, fudge) && inside(other._max, fudge);
- }
-
- Point _min;
- Point _max;
- };
-
-
- class Polygon {
- public:
-
- Polygon( void ) : _centroidCalculated( false ) {}
-
- Polygon( vector<Point> points ) : _centroidCalculated( false ),
- _points( points ) { }
-
- void add( Point p ) {
- _centroidCalculated = false;
- _points.push_back( p );
- }
-
- int size( void ) const {
- return _points.size();
- }
-
- /**
- * Determine if the point supplied is contained by the current polygon.
- *
- * The algorithm uses a ray casting method.
- */
- bool contains( const Point& p ) const {
- return contains( p, 0 ) > 0;
- }
-
- int contains( const Point &p, double fudge ) const {
-
- Box fudgeBox( Point( p._x - fudge, p._y - fudge ), Point( p._x + fudge, p._y + fudge ) );
-
- int counter = 0;
- Point p1 = _points[0];
- for ( int i = 1; i <= size(); i++ ) {
- Point p2 = _points[i % size()];
-
- GEODEBUG( "Doing intersection check of " << fudgeBox.toString() << " with seg " << p1.toString() << " to " << p2.toString() );
-
- // We need to check whether or not this segment intersects our error box
- if( fudge > 0 &&
- // Points not too far below box
- fudgeBox._min._y <= std::max( p1._y, p2._y ) &&
- // Points not too far above box
- fudgeBox._max._y >= std::min( p1._y, p2._y ) &&
- // Points not too far to left of box
- fudgeBox._min._x <= std::max( p1._x, p2._x ) &&
- // Points not too far to right of box
- fudgeBox._max._x >= std::min( p1._x, p2._x ) ) {
-
- GEODEBUG( "Doing detailed check" );
-
- // If our box contains one or more of these points, we need to do an exact check.
- if( fudgeBox.inside(p1) ) {
- GEODEBUG( "Point 1 inside" );
- return 0;
- }
- if( fudgeBox.inside(p2) ) {
- GEODEBUG( "Point 2 inside" );
- return 0;
- }
-
- // Do intersection check for vertical sides
- if ( p1._y != p2._y ) {
-
- double invSlope = ( p2._x - p1._x ) / ( p2._y - p1._y );
-
- double xintersT = ( fudgeBox._max._y - p1._y ) * invSlope + p1._x;
- if( fudgeBox._min._x <= xintersT && fudgeBox._max._x >= xintersT ) {
- GEODEBUG( "Top intersection @ " << xintersT );
- return 0;
- }
-
- double xintersB = ( fudgeBox._min._y - p1._y ) * invSlope + p1._x;
- if( fudgeBox._min._x <= xintersB && fudgeBox._max._x >= xintersB ) {
- GEODEBUG( "Bottom intersection @ " << xintersB );
- return 0;
- }
-
- }
-
- // Do intersection check for horizontal sides
- if( p1._x != p2._x ) {
-
- double slope = ( p2._y - p1._y ) / ( p2._x - p1._x );
-
- double yintersR = ( p1._x - fudgeBox._max._x ) * slope + p1._y;
- if( fudgeBox._min._y <= yintersR && fudgeBox._max._y >= yintersR ) {
- GEODEBUG( "Right intersection @ " << yintersR );
- return 0;
- }
-
- double yintersL = ( p1._x - fudgeBox._min._x ) * slope + p1._y;
- if( fudgeBox._min._y <= yintersL && fudgeBox._max._y >= yintersL ) {
- GEODEBUG( "Left intersection @ " << yintersL );
- return 0;
- }
-
- }
-
- }
- else if( fudge == 0 ){
-
- // If this is an exact vertex, we won't intersect, so check this
- if( p._y == p1._y && p._x == p1._x ) return 1;
- else if( p._y == p2._y && p._x == p2._x ) return 1;
-
- // If this is a horizontal line we won't intersect, so check this
- if( p1._y == p2._y && p._y == p1._y ){
- // Check that the x-coord lies in the line
- if( p._x >= std::min( p1._x, p2._x ) && p._x <= std::max( p1._x, p2._x ) ) return 1;
- }
-
- }
-
- // Normal intersection test.
- // TODO: Invert these for clearer logic?
- if ( p._y > std::min( p1._y, p2._y ) ) {
- if ( p._y <= std::max( p1._y, p2._y ) ) {
- if ( p._x <= std::max( p1._x, p2._x ) ) {
- if ( p1._y != p2._y ) {
- double xinters = (p._y-p1._y)*(p2._x-p1._x)/(p2._y-p1._y)+p1._x;
- // Special case of point on vertical line
- if ( p1._x == p2._x && p._x == p1._x ){
-
- // Need special case for the vertical edges, for example:
- // 1) \e pe/----->
- // vs.
- // 2) \ep---e/----->
- //
- // if we count exact as intersection, then 1 is in but 2 is out
- // if we count exact as no-int then 1 is out but 2 is in.
-
- return 1;
- }
- else if( p1._x == p2._x || p._x <= xinters ) {
- counter++;
- }
- }
- }
- }
- }
-
- p1 = p2;
- }
-
- if ( counter % 2 == 0 ) {
- return -1;
- }
- else {
- return 1;
- }
- }
-
- /**
- * Calculate the centroid, or center of mass of the polygon object.
- */
- Point centroid( void ) {
-
- /* Centroid is cached, it won't change betwen points */
- if ( _centroidCalculated ) {
- return _centroid;
- }
-
- Point cent;
- double signedArea = 0.0;
- double area = 0.0; // Partial signed area
-
- /// For all vertices except last
- int i = 0;
- for ( i = 0; i < size() - 1; ++i ) {
- area = _points[i]._x * _points[i+1]._y - _points[i+1]._x * _points[i]._y ;
- signedArea += area;
- cent._x += ( _points[i]._x + _points[i+1]._x ) * area;
- cent._y += ( _points[i]._y + _points[i+1]._y ) * area;
- }
-
- // Do last vertex
- area = _points[i]._x * _points[0]._y - _points[0]._x * _points[i]._y;
- cent._x += ( _points[i]._x + _points[0]._x ) * area;
- cent._y += ( _points[i]._y + _points[0]._y ) * area;
- signedArea += area;
- signedArea *= 0.5;
- cent._x /= ( 6 * signedArea );
- cent._y /= ( 6 * signedArea );
-
- _centroidCalculated = true;
- _centroid = cent;
-
- return cent;
- }
-
- Box bounds( void ) {
-
- // TODO: Cache this
-
- _bounds._max = _points[0];
- _bounds._min = _points[0];
-
- for ( int i = 1; i < size(); i++ ) {
-
- _bounds._max._x = max( _bounds._max._x, _points[i]._x );
- _bounds._max._y = max( _bounds._max._y, _points[i]._y );
- _bounds._min._x = min( _bounds._min._x, _points[i]._x );
- _bounds._min._y = min( _bounds._min._y, _points[i]._y );
-
+ private:
+ double configValueWithDefault(const IndexSpec* spec, const string& name, double def) {
+ BSONElement e = spec->info[name];
+ if (e.isNumber()) {
+ return e.numberDouble();
}
-
- return _bounds;
-
+ return def;
}
- private:
-
- bool _centroidCalculated;
- Point _centroid;
-
- Box _bounds;
-
- vector<Point> _points;
+ scoped_ptr<GeoHashConverter> _geoHashConverter;
+ BSONObj _order;
};
class Geo2dPlugin : public IndexPlugin {
public:
- Geo2dPlugin() : IndexPlugin( GEO2DNAME ) {
- }
+ Geo2dPlugin() : IndexPlugin(GEO2DNAME) { }
- virtual IndexType* generate( const IndexSpec* spec ) const {
- return new Geo2dType( this , spec );
+ virtual IndexType* generate(const IndexSpec* spec) const {
+ return new Geo2dType(this, spec);
}
} geo2dplugin;
@@ -852,35 +291,32 @@ namespace mongo {
geo2dplugin.getName();
}
-
-
class GeoHopper;
class GeoPoint {
public:
-
- GeoPoint() : _distance( -1 ), _exact( false ), _dirty( false )
- {}
+ GeoPoint() : _distance(-1), _exact(false), _dirty(false) { }
//// Distance not used ////
- GeoPoint( const GeoKeyNode& node )
- : _key( node._key ) , _loc( node.recordLoc ) , _o( node.recordLoc.obj() ), _distance( -1 ) , _exact( false ), _dirty( false ), _bucket( node._bucket ), _pos( node._keyOfs ) {
- }
+ GeoPoint(const GeoKeyNode& node)
+ : _key(node._key), _loc(node.recordLoc), _o(node.recordLoc.obj()),
+ _distance(-1), _exact(false), _dirty(false), _bucket(node._bucket),
+ _pos(node._keyOfs) { }
//// Immediate initialization of distance ////
- GeoPoint( const GeoKeyNode& node, double distance, bool exact )
- : _key( node._key ) , _loc( node.recordLoc ) , _o( node.recordLoc.obj() ), _distance( distance ), _exact( exact ), _dirty( false ) {
+ GeoPoint(const GeoKeyNode& node, double distance, bool exact)
+ : _key(node._key), _loc(node.recordLoc), _o(node.recordLoc.obj()), _distance(distance), _exact(exact), _dirty(false) {
}
- GeoPoint( const GeoPoint& pt, double distance, bool exact )
- : _key( pt.key() ) , _loc( pt.loc() ) , _o( pt.obj() ), _distance( distance ), _exact( exact ), _dirty( false ) {
+ GeoPoint(const GeoPoint& pt, double distance, bool exact)
+ : _key(pt.key()), _loc(pt.loc()), _o(pt.obj()), _distance(distance), _exact(exact), _dirty(false) {
}
- bool operator<( const GeoPoint& other ) const {
- if( _distance != other._distance ) return _distance < other._distance;
- if( _exact != other._exact ) return _exact < other._exact;
+ bool operator<(const GeoPoint& other) const {
+ if(_distance != other._distance) return _distance < other._distance;
+ if(_exact != other._exact) return _exact < other._exact;
return _loc < other._loc;
}
@@ -901,7 +337,7 @@ namespace mongo {
}
DiskLoc loc() const {
- verify( ! _dirty );
+ verify(! _dirty);
return _loc;
}
@@ -922,17 +358,17 @@ namespace mongo {
}
string toString() const {
- return str::stream() << "Point from " << _key << " - " << _o << " dist : " << _distance << ( _exact ? " (ex)" : " (app)" );
+ return str::stream() << "Point from " << _key << " - " << _o << " dist : " << _distance << (_exact ? " (ex)" : " (app)");
}
// TODO: Recover from yield by finding all the changed disk locs here, modifying the _seenPts array.
// Not sure yet the correct thing to do about _seen.
// Definitely need to re-find our current max/min locations too
- bool unDirty( const Geo2dType* g, DiskLoc& oldLoc ){
+ bool unDirty(const Geo2dType* g, DiskLoc& oldLoc){
- verify( _dirty );
- verify( ! _id.isEmpty() );
+ verify(_dirty);
+ verify(! _id.isEmpty());
oldLoc = _loc;
_loc = DiskLoc();
@@ -940,20 +376,20 @@ namespace mongo {
// Fast undirty
IndexInterface& ii = g->getDetails()->idxInterface();
// Check this position and the one immediately preceding
- for( int i = 0; i < 2; i++ ){
- if( _pos - i < 0 ) continue;
+ for(int i = 0; i < 2; i++){
+ if(_pos - i < 0) continue;
// log() << "bucket : " << _bucket << " pos " << _pos << endl;
BSONObj key;
DiskLoc loc;
- ii.keyAt( _bucket, _pos - i, key, loc );
+ ii.keyAt(_bucket, _pos - i, key, loc);
// log() << "Loc: " << loc << " Key : " << key << endl;
- if( loc.isNull() ) continue;
+ if(loc.isNull()) continue;
- if( key.binaryEqual( _key ) && loc.obj()["_id"].wrap( "" ).binaryEqual( _id ) ){
+ if(key.binaryEqual(_key) && loc.obj()["_id"].wrap("").binaryEqual(_id)){
_pos = _pos - i;
_loc = loc;
_dirty = false;
@@ -963,13 +399,13 @@ namespace mongo {
}
// Slow undirty
- scoped_ptr<BtreeCursor> cursor( BtreeCursor::make( nsdetails( g->getDetails()->parentNS().c_str() ),
- *( g->getDetails() ), _key, _key, true, 1 ) );
+ scoped_ptr<BtreeCursor> cursor(BtreeCursor::make(nsdetails(g->getDetails()->parentNS().c_str()),
+ *(g->getDetails()), _key, _key, true, 1));
int count = 0;
- while( cursor->ok() ){
+ while(cursor->ok()){
count++;
- if( cursor->current()["_id"].wrap( "" ).binaryEqual( _id ) ){
+ if(cursor->current()["_id"].wrap("").binaryEqual(_id)){
_bucket = cursor->getBucket();
_pos = cursor->getKeyOfs();
_loc = cursor->currLoc();
@@ -977,12 +413,12 @@ namespace mongo {
break;
}
else{
- LOG( CDEBUG + 1 ) << "Key doesn't match : " << cursor->current()["_id"] << " saved : " << _id << endl;
+ LOG(CDEBUG + 1) << "Key doesn't match : " << cursor->current()["_id"] << " saved : " << _id << endl;
}
cursor->advance();
}
- if( ! count ) { LOG( CDEBUG ) << "No key found for " << _key << endl; }
+ if(! count) { LOG(CDEBUG) << "No key found for " << _key << endl; }
_dirty = false;
@@ -994,13 +430,13 @@ namespace mongo {
}
bool makeDirty(){
- if( ! _dirty ){
- verify( ! obj()["_id"].eoo() );
- verify( ! _bucket.isNull() );
- verify( _pos >= 0 );
+ if(! _dirty){
+ verify(! obj()["_id"].eoo());
+ verify(! _bucket.isNull());
+ verify(_pos >= 0);
- if( _id.isEmpty() ){
- _id = obj()["_id"].wrap( "" ).getOwned();
+ if(_id.isEmpty()){
+ _id = obj()["_id"].wrap("").getOwned();
}
_o = BSONObj();
_key = _key.getOwned();
@@ -1030,19 +466,19 @@ namespace mongo {
// GeoBrowse subclasses this
class GeoAccumulator {
public:
- GeoAccumulator( const Geo2dType * g , const BSONObj& filter, bool uniqueDocs, bool needDistance )
- : _g(g) ,
- _lookedAt(0) ,
- _matchesPerfd(0) ,
- _objectsLoaded(0) ,
- _pointsLoaded(0) ,
- _found(0) ,
- _uniqueDocs( uniqueDocs ) ,
- _needDistance( needDistance )
+ GeoAccumulator(const Geo2dType * g, const BSONObj& filter, bool uniqueDocs, bool needDistance)
+ : _g(g),
+ _lookedAt(0),
+ _matchesPerfd(0),
+ _objectsLoaded(0),
+ _pointsLoaded(0),
+ _found(0),
+ _uniqueDocs(uniqueDocs),
+ _needDistance(needDistance)
{
- if ( ! filter.isEmpty() ) {
- _matcher.reset( new CoveredIndexMatcher( filter , g->keyPattern() ) );
- GEODEBUG( "Matcher is now " << _matcher->docMatcher().toString() );
+ if (! filter.isEmpty()) {
+ _matcher.reset(new CoveredIndexMatcher(filter, g->keyPattern()));
+ GEODEBUG("Matcher is now " << _matcher->docMatcher().toString());
}
}
@@ -1050,9 +486,9 @@ namespace mongo {
enum KeyResult { BAD, BORDER, GOOD };
- virtual void add( const GeoKeyNode& node ) {
+ virtual void add(const GeoKeyNode& node) {
- GEODEBUG( "\t\t\t\t checking key " << node._key.toString() )
+ GEODEBUG("\t\t\t\t checking key " << node._key.toString())
_lookedAt++;
@@ -1060,36 +496,37 @@ namespace mongo {
// Approximate distance check using key data
////
double keyD = 0;
- Point keyP( _g, GeoHash( node._key.firstElement(), _g->_bits ) );
- KeyResult keyOk = approxKeyCheck( keyP, keyD );
- if ( keyOk == BAD ) {
- GEODEBUG( "\t\t\t\t bad distance : " << node.recordLoc.obj() << "\t" << keyD );
+ //Point keyP(_g, GeoHash(node._key.firstElement(), _g->_bits));
+ Point keyP(_g->getConverter().unhashToPoint(node._key.firstElement()));
+ KeyResult keyOk = approxKeyCheck(keyP, keyD);
+ if (keyOk == BAD) {
+ GEODEBUG("\t\t\t\t bad distance : " << node.recordLoc.obj() << "\t" << keyD);
return;
}
- GEODEBUG( "\t\t\t\t good distance : " << node.recordLoc.obj() << "\t" << keyD );
+ GEODEBUG("\t\t\t\t good distance : " << node.recordLoc.obj() << "\t" << keyD);
////
// Check for match using other key (and potentially doc) criteria
////
// Remember match results for each object
- map<DiskLoc, bool>::iterator match = _matched.find( node.recordLoc );
+ map<DiskLoc, bool>::iterator match = _matched.find(node.recordLoc);
bool newDoc = match == _matched.end();
- if( newDoc ) {
+ if(newDoc) {
- GEODEBUG( "\t\t\t\t matching new doc with " << (_matcher ? _matcher->docMatcher().toString() : "(empty)" ) );
+ GEODEBUG("\t\t\t\t matching new doc with " << (_matcher ? _matcher->docMatcher().toString() : "(empty)"));
// matcher
MatchDetails details;
- if ( _matcher.get() ) {
- bool good = _matcher->matchesWithSingleKeyIndex( node._key , node.recordLoc , &details );
+ if (_matcher.get()) {
+ bool good = _matcher->matchesWithSingleKeyIndex(node._key, node.recordLoc, &details);
_matchesPerfd++;
- if ( details.hasLoadedRecord() )
+ if (details.hasLoadedRecord())
_objectsLoaded++;
- if ( ! good ) {
- GEODEBUG( "\t\t\t\t didn't match : " << node.recordLoc.obj()["_id"] );
+ if (! good) {
+ GEODEBUG("\t\t\t\t didn't match : " << node.recordLoc.obj()["_id"]);
_matched[ node.recordLoc ] = false;
return;
}
@@ -1097,12 +534,12 @@ namespace mongo {
_matched[ node.recordLoc ] = true;
- if ( ! details.hasLoadedRecord() ) // don't double count
+ if (! details.hasLoadedRecord()) // don't double count
_objectsLoaded++;
}
- else if( !((*match).second) ) {
- GEODEBUG( "\t\t\t\t previously didn't match : " << node.recordLoc.obj()["_id"] );
+ else if(!((*match).second)) {
+ GEODEBUG("\t\t\t\t previously didn't match : " << node.recordLoc.obj()["_id"]);
return;
}
@@ -1110,50 +547,43 @@ namespace mongo {
// Exact check with particular data fields
////
// Can add multiple points
- int diff = addSpecific( node , keyP, keyOk == BORDER, keyD, newDoc );
- if( diff > 0 ) _found += diff;
+ int diff = addSpecific(node, keyP, keyOk == BORDER, keyD, newDoc);
+ if(diff > 0) _found += diff;
else _found -= -diff;
}
- virtual void getPointsFor( const BSONObj& key, const BSONObj& obj, vector< BSONObj >& locsForNode, bool allPoints = false ){
-
+ virtual void getPointsFor(const BSONObj& key, const BSONObj& obj,
+ vector<BSONObj> &locsForNode, bool allPoints = false) {
// Find all the location objects from the keys
- vector< BSONObj > locs;
- _g->getKeys( obj, allPoints ? locsForNode : locs );
- _pointsLoaded++;
+ vector<BSONObj> locs;
+ _g->getKeys(obj, allPoints ? locsForNode : locs);
+ ++_pointsLoaded;
- if( allPoints ) return;
- if( locs.size() == 1 ){
- locsForNode.push_back( locs[0] );
+ if (allPoints) return;
+ if (locs.size() == 1){
+ locsForNode.push_back(locs[0]);
return;
}
// Find the particular location we want
- GeoHash keyHash( key.firstElement(), _g->_bits );
-
- // log() << "Hash: " << node.key << " and " << keyHash.getHash() << " unique " << _uniqueDocs << endl;
- for( vector< BSONObj >::iterator i = locs.begin(); i != locs.end(); ++i ) {
+ GeoHash keyHash(key.firstElement(), _g->getConverter().getBits());
+ for(vector< BSONObj >::iterator i = locs.begin(); i != locs.end(); ++i) {
// Ignore all locations not hashed to the key's hash, since we may see
// those later
- if( _g->_hash( *i ) != keyHash ) continue;
-
- locsForNode.push_back( *i );
-
+ if(_g->getConverter().hash(*i) != keyHash) continue;
+ locsForNode.push_back(*i);
}
-
}
- virtual int addSpecific( const GeoKeyNode& node, const Point& p , bool inBounds, double d, bool newDoc ) = 0;
- virtual KeyResult approxKeyCheck( const Point& p , double& keyD ) = 0;
- virtual bool exactDocCheck( const Point& p , double& d ) = 0;
+ virtual int addSpecific(const GeoKeyNode& node, const Point& p, bool inBounds, double d,
+ bool newDoc) = 0;
+ virtual KeyResult approxKeyCheck(const Point& p, double& keyD) = 0;
+ virtual bool exactDocCheck(const Point& p, double& d) = 0;
virtual bool expensiveExactCheck(){ return false; }
-
- long long found() const {
- return _found;
- }
+ long long found() const { return _found; }
const Geo2dType * _g;
map<DiskLoc, bool> _matched;
@@ -1167,10 +597,8 @@ namespace mongo {
bool _uniqueDocs;
bool _needDistance;
-
};
-
struct BtreeLocation {
BtreeLocation() { }
@@ -1182,21 +610,21 @@ namespace mongo {
return _cursor->currKey();
}
- bool hasPrefix( const GeoHash& hash ) {
+ bool hasPrefix(const GeoHash& hash) {
BSONObj k = key();
BSONElement e = k.firstElement();
- if ( e.eoo() )
+ if (e.eoo())
return false;
- return GeoHash( e ).hasPrefix( hash );
+ return GeoHash(e).hasPrefix(hash);
}
- bool checkAndAdvance( const GeoHash& hash, int& totalFound, GeoAccumulator* all ){
- if( ! _cursor->ok() || ! hasPrefix( hash ) ) return false;
+ bool checkAndAdvance(const GeoHash& hash, int& totalFound, GeoAccumulator* all){
+ if(! _cursor->ok() || ! hasPrefix(hash)) return false;
- if( all ){
+ if(all){
totalFound++;
- GeoKeyNode n( _cursor->getBucket(), _cursor->getKeyOfs(), _cursor->currLoc(), _cursor->currKey() );
- all->add( n );
+ GeoKeyNode n(_cursor->getBucket(), _cursor->getKeyOfs(), _cursor->currLoc(), _cursor->currKey());
+ all->add(n);
}
_cursor->advance();
@@ -1214,64 +642,64 @@ namespace mongo {
string toString() {
stringstream ss;
ss << "bucket: " << _cursor->getBucket().toString() << " pos: " << _cursor->getKeyOfs() <<
- ( _cursor->ok() ? ( str::stream() << " k: " << _cursor->currKey() << " o : " << _cursor->current()["_id"] ) : (string)"[none]" ) << endl;
+ (_cursor->ok() ? (str::stream() << " k: " << _cursor->currKey() << " o : " << _cursor->current()["_id"]) : (string)"[none]") << endl;
return ss.str();
}
// Returns the min and max keys which bound a particular location.
// The only time these may be equal is when we actually equal the location
// itself, otherwise our expanding algorithm will fail.
- static bool initial( const IndexDetails& id , const Geo2dType * spec ,
- BtreeLocation& min , BtreeLocation& max ,
- GeoHash start ,
- int & found , GeoAccumulator * hopper ) {
+ static bool initial(const IndexDetails& id, const Geo2dType * spec,
+ BtreeLocation& min, BtreeLocation& max,
+ GeoHash start,
+ int & found, GeoAccumulator * hopper) {
//Ordering ordering = Ordering::make(spec->_order);
// Would be nice to build this directly, but bug in max/min queries SERVER-3766 and lack of interface
// makes this easiest for now.
- BSONObj minQuery = BSON( spec->_geo << BSON( "$gt" << MINKEY << start.wrap( "$lte" ).firstElement() ) );
- BSONObj maxQuery = BSON( spec->_geo << BSON( "$lt" << MAXKEY << start.wrap( "$gt" ).firstElement() ) );
+ BSONObj minQuery = BSON(spec->_geo << BSON("$gt" << MINKEY << start.wrap("$lte").firstElement()));
+ BSONObj maxQuery = BSON(spec->_geo << BSON("$lt" << MAXKEY << start.wrap("$gt").firstElement()));
// log() << "MinQuery: " << minQuery << endl;
// log() << "MaxQuery: " << maxQuery << endl;
- min._frs.reset( new FieldRangeSet( spec->getDetails()->parentNS().c_str(),
+ min._frs.reset(new FieldRangeSet(spec->getDetails()->parentNS().c_str(),
minQuery,
true,
- false ) );
+ false));
- max._frs.reset( new FieldRangeSet( spec->getDetails()->parentNS().c_str(),
+ max._frs.reset(new FieldRangeSet(spec->getDetails()->parentNS().c_str(),
maxQuery,
true,
- false ) );
+ false));
BSONObjBuilder bob;
- bob.append( spec->_geo, 1 );
- for( vector<string>::const_iterator i = spec->_other.begin(); i != spec->_other.end(); i++ ){
- bob.append( *i, 1 );
+ bob.append(spec->_geo, 1);
+ for(vector<string>::const_iterator i = spec->_other.begin(); i != spec->_other.end(); i++){
+ bob.append(*i, 1);
}
BSONObj iSpec = bob.obj();
- min._spec.reset( new IndexSpec( iSpec ) );
- max._spec.reset( new IndexSpec( iSpec ) );
+ min._spec.reset(new IndexSpec(iSpec));
+ max._spec.reset(new IndexSpec(iSpec));
- shared_ptr<FieldRangeVector> frvMin( new FieldRangeVector( *(min._frs), *(min._spec), -1 ) );
- shared_ptr<FieldRangeVector> frvMax( new FieldRangeVector( *(max._frs), *(max._spec), 1 ) );
+ shared_ptr<FieldRangeVector> frvMin(new FieldRangeVector(*(min._frs), *(min._spec), -1));
+ shared_ptr<FieldRangeVector> frvMax(new FieldRangeVector(*(max._frs), *(max._spec), 1));
min._cursor.reset(
- BtreeCursor::make( nsdetails( spec->getDetails()->parentNS().c_str() ), *( spec->getDetails() ),
- frvMin, -1 )
- );
+ BtreeCursor::make(nsdetails(spec->getDetails()->parentNS().c_str()), *(spec->getDetails()),
+ frvMin, -1)
+ );
max._cursor.reset(
- BtreeCursor::make( nsdetails( spec->getDetails()->parentNS().c_str() ), *( spec->getDetails() ),
- frvMax, 1 )
- );
+ BtreeCursor::make(nsdetails(spec->getDetails()->parentNS().c_str()), *(spec->getDetails()),
+ frvMax, 1)
+ );
- // if( hopper ) min.checkCur( found, hopper );
- // if( hopper ) max.checkCur( found, hopper );
+ // if(hopper) min.checkCur(found, hopper);
+ // if(hopper) max.checkCur(found, hopper);
return min._cursor->ok() || max._cursor->ok();
}
@@ -1283,8 +711,8 @@ namespace mongo {
static const shared_ptr< CoveredIndexMatcher > emptyMatcher;
- GeoCursorBase( const Geo2dType * spec )
- : _spec( spec ), _id( _spec->getDetails() ) {
+ GeoCursorBase(const Geo2dType * spec)
+ : _spec(spec), _id(_spec->getDetails()) {
}
@@ -1316,12 +744,12 @@ namespace mongo {
const IndexDetails * _id;
};
- const shared_ptr< CoveredIndexMatcher > GeoCursorBase::emptyMatcher( new CoveredIndexMatcher( BSONObj(), BSONObj() ) );
+ const shared_ptr< CoveredIndexMatcher > GeoCursorBase::emptyMatcher(new CoveredIndexMatcher(BSONObj(), BSONObj()));
// TODO: Pull out the cursor bit from the browse, have GeoBrowse as field of cursor to clean up
// this hierarchy a bit. Also probably useful to look at whether GeoAccumulator can be a member instead
// of a superclass.
- class GeoBrowse : public GeoCursorBase , public GeoAccumulator {
+ class GeoBrowse : public GeoCursorBase, public GeoAccumulator {
public:
// The max points which should be added to an expanding box at one time
@@ -1329,15 +757,15 @@ namespace mongo {
// Expand states
enum State {
- START ,
- DOING_EXPAND ,
- DONE_NEIGHBOR ,
+ START,
+ DOING_EXPAND,
+ DONE_NEIGHBOR,
DONE
} _state;
- GeoBrowse( const Geo2dType * g , string type , BSONObj filter = BSONObj(), bool uniqueDocs = true, bool needDistance = false )
- : GeoCursorBase( g ), GeoAccumulator( g , filter, uniqueDocs, needDistance ) ,
- _type( type ) , _filter( filter ) , _firstCall(true), _noted( false ), _nscanned(), _nDirtied(0), _nChangedOnYield(0), _nRemovedOnYield(0), _centerPrefix(0, 0, 0) {
+ GeoBrowse(const Geo2dType * g, string type, BSONObj filter = BSONObj(), bool uniqueDocs = true, bool needDistance = false)
+ : GeoCursorBase(g), GeoAccumulator(g, filter, uniqueDocs, needDistance),
+ _type(type), _filter(filter), _firstCall(true), _noted(false), _nscanned(), _nDirtied(0), _nChangedOnYield(0), _nRemovedOnYield(0), _centerPrefix(0, 0, 0) {
// Set up the initial expand state
_state = START;
@@ -1354,110 +782,110 @@ namespace mongo {
bool filled = false;
- LOG( CDEBUG ) << "Checking cursor, in state " << (int) _state << ", first call " << _firstCall <<
+ LOG(CDEBUG) << "Checking cursor, in state " << (int) _state << ", first call " << _firstCall <<
", empty : " << _cur.isEmpty() << ", dirty : " << _cur.isDirty() << ", stack : " << _stack.size() << endl;
bool first = _firstCall;
- if ( _firstCall ) {
- fillStack( maxPointsHeuristic );
+ if (_firstCall) {
+ fillStack(maxPointsHeuristic);
filled = true;
_firstCall = false;
}
- if ( ! _cur.isCleanAndEmpty() || _stack.size() ) {
- if ( first ) {
+ if (! _cur.isCleanAndEmpty() || _stack.size()) {
+ if (first) {
++_nscanned;
}
- if( _noted && filled ) noteLocation();
+ if(_noted && filled) noteLocation();
return true;
}
- while ( moreToDo() ) {
+ while (moreToDo()) {
- LOG( CDEBUG ) << "Refilling stack..." << endl;
+ LOG(CDEBUG) << "Refilling stack..." << endl;
- fillStack( maxPointsHeuristic );
+ fillStack(maxPointsHeuristic);
filled = true;
- if ( ! _cur.isCleanAndEmpty() ) {
- if ( first ) {
+ if (! _cur.isCleanAndEmpty()) {
+ if (first) {
++_nscanned;
}
- if( _noted && filled ) noteLocation();
+ if(_noted && filled) noteLocation();
return true;
}
}
- if( _noted && filled ) noteLocation();
+ if(_noted && filled) noteLocation();
return false;
}
virtual bool advance() {
_cur._o = BSONObj();
- if ( _stack.size() ) {
+ if (_stack.size()) {
_cur = _stack.front();
_stack.pop_front();
++_nscanned;
return true;
}
- if ( ! moreToDo() )
+ if (! moreToDo())
return false;
bool filled = false;
- while ( _cur.isCleanAndEmpty() && moreToDo() ){
- fillStack( maxPointsHeuristic );
+ while (_cur.isCleanAndEmpty() && moreToDo()){
+ fillStack(maxPointsHeuristic);
filled = true;
}
- if( _noted && filled ) noteLocation();
+ if(_noted && filled) noteLocation();
return ! _cur.isCleanAndEmpty() && ++_nscanned;
}
virtual void noteLocation() {
_noted = true;
- LOG( CDEBUG ) << "Noting location with " << _stack.size() << ( _cur.isEmpty() ? "" : " + 1 " ) << " points " << endl;
+ LOG(CDEBUG) << "Noting location with " << _stack.size() << (_cur.isEmpty() ? "" : " + 1 ") << " points " << endl;
// Make sure we advance past the point we're at now,
// since the current location may move on an update/delete
- // if( _state == DOING_EXPAND ){
- // if( _min.hasPrefix( _prefix ) ){ _min.advance( -1, _foundInExp, this ); }
- // if( _max.hasPrefix( _prefix ) ){ _max.advance( 1, _foundInExp, this ); }
+ // if(_state == DOING_EXPAND){
+ // if(_min.hasPrefix(_prefix)){ _min.advance(-1, _foundInExp, this); }
+ // if(_max.hasPrefix(_prefix)){ _max.advance( 1, _foundInExp, this); }
// }
// Remember where our _max, _min are
_min.save();
_max.save();
- LOG( CDEBUG ) << "Min " << _min.toString() << endl;
- LOG( CDEBUG ) << "Max " << _max.toString() << endl;
+ LOG(CDEBUG) << "Min " << _min.toString() << endl;
+ LOG(CDEBUG) << "Max " << _max.toString() << endl;
// Dirty all our queued stuff
- for( list<GeoPoint>::iterator i = _stack.begin(); i != _stack.end(); i++ ){
+ for(list<GeoPoint>::iterator i = _stack.begin(); i != _stack.end(); i++){
- LOG( CDEBUG ) << "Undirtying stack point with id " << i->_id << endl;
+ LOG(CDEBUG) << "Undirtying stack point with id " << i->_id << endl;
- if( i->makeDirty() ) _nDirtied++;
- verify( i->isDirty() );
+ if(i->makeDirty()) _nDirtied++;
+ verify(i->isDirty());
}
// Check current item
- if( ! _cur.isEmpty() ){
- if( _cur.makeDirty() ) _nDirtied++;
+ if(! _cur.isEmpty()){
+ if(_cur.makeDirty()) _nDirtied++;
}
// Our cached matches become invalid now
_matched.clear();
}
- void fixMatches( DiskLoc oldLoc, DiskLoc newLoc ){
- map<DiskLoc, bool>::iterator match = _matched.find( oldLoc );
- if( match != _matched.end() ){
+ void fixMatches(DiskLoc oldLoc, DiskLoc newLoc){
+ map<DiskLoc, bool>::iterator match = _matched.find(oldLoc);
+ if(match != _matched.end()){
bool val = match->second;
- _matched.erase( oldLoc );
+ _matched.erase(oldLoc);
_matched[ newLoc ] = val;
}
}
@@ -1465,7 +893,7 @@ namespace mongo {
/* called before query getmore block is iterated */
virtual void checkLocation() {
- LOG( CDEBUG ) << "Restoring location with " << _stack.size() << ( ! _cur.isDirty() ? "" : " + 1 " ) << " points " << endl;
+ LOG(CDEBUG) << "Restoring location with " << _stack.size() << (! _cur.isDirty() ? "" : " + 1 ") << " points " << endl;
// We can assume an error was thrown earlier if this database somehow disappears
@@ -1473,63 +901,63 @@ namespace mongo {
_min.restore();
_max.restore();
- LOG( CDEBUG ) << "Min " << _min.toString() << endl;
- LOG( CDEBUG ) << "Max " << _max.toString() << endl;
+ LOG(CDEBUG) << "Min " << _min.toString() << endl;
+ LOG(CDEBUG) << "Max " << _max.toString() << endl;
// If the current key moved, we may have been advanced past the current point - need to check this
- // if( _state == DOING_EXPAND ){
- // if( _min.hasPrefix( _prefix ) ){ _min.advance( -1, _foundInExp, this ); }
- // if( _max.hasPrefix( _prefix ) ){ _max.advance( 1, _foundInExp, this ); }
+ // if(_state == DOING_EXPAND){
+ // if(_min.hasPrefix(_prefix)){ _min.advance(-1, _foundInExp, this); }
+ // if(_max.hasPrefix(_prefix)){ _max.advance( 1, _foundInExp, this); }
//}
// Undirty all the queued stuff
// Dirty all our queued stuff
list<GeoPoint>::iterator i = _stack.begin();
- while( i != _stack.end() ){
+ while(i != _stack.end()){
- LOG( CDEBUG ) << "Undirtying stack point with id " << i->_id << endl;
+ LOG(CDEBUG) << "Undirtying stack point with id " << i->_id << endl;
DiskLoc oldLoc;
- if( i->unDirty( _spec, oldLoc ) ){
+ if(i->unDirty(_spec, oldLoc)){
// Document is in same location
- LOG( CDEBUG ) << "Undirtied " << oldLoc << endl;
+ LOG(CDEBUG) << "Undirtied " << oldLoc << endl;
i++;
}
- else if( ! i->loc().isNull() ){
+ else if(! i->loc().isNull()){
// Re-found document somewhere else
- LOG( CDEBUG ) << "Changed location of " << i->_id << " : " << i->loc() << " vs " << oldLoc << endl;
+ LOG(CDEBUG) << "Changed location of " << i->_id << " : " << i->loc() << " vs " << oldLoc << endl;
_nChangedOnYield++;
- fixMatches( oldLoc, i->loc() );
+ fixMatches(oldLoc, i->loc());
i++;
}
else {
// Can't re-find document
- LOG( CDEBUG ) << "Removing document " << i->_id << endl;
+ LOG(CDEBUG) << "Removing document " << i->_id << endl;
_nRemovedOnYield++;
_found--;
- verify( _found >= 0 );
+ verify(_found >= 0);
// Can't find our key again, remove
- i = _stack.erase( i );
+ i = _stack.erase(i);
}
}
- if( _cur.isDirty() ){
- LOG( CDEBUG ) << "Undirtying cur point with id : " << _cur._id << endl;
+ if(_cur.isDirty()){
+ LOG(CDEBUG) << "Undirtying cur point with id : " << _cur._id << endl;
}
// Check current item
DiskLoc oldLoc;
- if( _cur.isDirty() && ! _cur.unDirty( _spec, oldLoc ) ){
- if( _cur.loc().isNull() ){
+ if(_cur.isDirty() && ! _cur.unDirty(_spec, oldLoc)){
+ if(_cur.loc().isNull()){
// Document disappeared!
- LOG( CDEBUG ) << "Removing cur point " << _cur._id << endl;
+ LOG(CDEBUG) << "Removing cur point " << _cur._id << endl;
_nRemovedOnYield++;
advance();
@@ -1537,23 +965,23 @@ namespace mongo {
else{
// Document moved
- LOG( CDEBUG ) << "Changed location of cur point " << _cur._id << " : " << _cur.loc() << " vs " << oldLoc << endl;
+ LOG(CDEBUG) << "Changed location of cur point " << _cur._id << " : " << _cur.loc() << " vs " << oldLoc << endl;
_nChangedOnYield++;
- fixMatches( oldLoc, _cur.loc() );
+ fixMatches(oldLoc, _cur.loc());
}
}
_noted = false;
}
- virtual Record* _current() { verify(ok()); LOG( CDEBUG + 1 ) << "_current " << _cur._loc.obj()["_id"] << endl; return _cur._loc.rec(); }
- virtual BSONObj current() { verify(ok()); LOG( CDEBUG + 1 ) << "current " << _cur._o << endl; return _cur._o; }
- virtual DiskLoc currLoc() { verify(ok()); LOG( CDEBUG + 1 ) << "currLoc " << _cur._loc << endl; return _cur._loc; }
+ virtual Record* _current() { verify(ok()); LOG(CDEBUG + 1) << "_current " << _cur._loc.obj()["_id"] << endl; return _cur._loc.rec(); }
+ virtual BSONObj current() { verify(ok()); LOG(CDEBUG + 1) << "current " << _cur._o << endl; return _cur._o; }
+ virtual DiskLoc currLoc() { verify(ok()); LOG(CDEBUG + 1) << "currLoc " << _cur._loc << endl; return _cur._loc; }
virtual BSONObj currKey() const { return _cur._key; }
virtual CoveredIndexMatcher* matcher() const {
- if( _matcher.get() ) return _matcher.get();
+ if(_matcher.get()) return _matcher.get();
else return GeoCursorBase::emptyMatcher.get();
}
@@ -1564,102 +992,108 @@ namespace mongo {
virtual bool supportGetMore() { return true; }
+ // XXX: make private, move into geohashconverter
+ Box makeBox(const GeoHash &hash) const {
+ double sizeEdge = _g->getConverter().sizeEdge(hash);
+ Point min(_g->getConverter().unhashToPoint(hash));
+ Point max(min.x + sizeEdge, min.y + sizeEdge);
+ return Box(min, max);
+ }
+
// Fills the stack, but only checks a maximum number of maxToCheck points at a time.
// Further calls to this function will continue the expand/check neighbors algorithm.
- virtual void fillStack( int maxToCheck, int maxToAdd = -1, bool onlyExpand = false ) {
+ virtual void fillStack(int maxToCheck, int maxToAdd = -1, bool onlyExpand = false) {
#ifdef GEODEBUGGING
log() << "Filling stack with maximum of " << maxToCheck << ", state : " << (int) _state << endl;
#endif
- if( maxToAdd < 0 ) maxToAdd = maxToCheck;
+ if(maxToAdd < 0) maxToAdd = maxToCheck;
int maxFound = _foundInExp + maxToCheck;
- verify( maxToCheck > 0 );
- verify( maxFound > 0 );
- verify( _found <= 0x7fffffff ); // conversion to int
+ verify(maxToCheck > 0);
+ verify(maxFound > 0);
+ verify(_found <= 0x7fffffff); // conversion to int
int maxAdded = static_cast<int>(_found) + maxToAdd;
- verify( maxAdded >= 0 ); // overflow check
+ verify(maxAdded >= 0); // overflow check
bool isNeighbor = _centerPrefix.constrains();
// Starting a box expansion
- if ( _state == START ) {
+ if (_state == START) {
// Get the very first hash point, if required
- if( ! isNeighbor )
+ if(! isNeighbor)
_prefix = expandStartHash();
- GEODEBUG( "initializing btree" );
+ GEODEBUG("initializing btree");
#ifdef GEODEBUGGING
- log() << "Initializing from b-tree with hash of " << _prefix << " @ " << Box( _g, _prefix ) << endl;
+ log() << "Initializing from b-tree with hash of " << _prefix << " @ " << Box(_g, _prefix) << endl;
#endif
- if ( ! BtreeLocation::initial( *_id , _spec , _min , _max , _prefix , _foundInExp , this ) )
+ if (! BtreeLocation::initial(*_id, _spec, _min, _max, _prefix, _foundInExp, this))
_state = isNeighbor ? DONE_NEIGHBOR : DONE;
else {
_state = DOING_EXPAND;
_lastPrefix.reset();
}
- GEODEBUG( (_state == DONE_NEIGHBOR || _state == DONE ? "not initialized" : "initializedFig") );
+ GEODEBUG((_state == DONE_NEIGHBOR || _state == DONE ? "not initialized" : "initializedFig"));
}
// Doing the actual box expansion
- if ( _state == DOING_EXPAND ) {
+ if (_state == DOING_EXPAND) {
- while ( true ) {
+ while (true) {
- GEODEBUG( "box prefix [" << _prefix << "]" );
+ GEODEBUG("box prefix [" << _prefix << "]");
#ifdef GEODEBUGGING
- if( _prefix.constrains() ) {
- log() << "current expand box : " << Box( _g, _prefix ).toString() << endl;
+ if(_prefix.constrains()) {
+ log() << "current expand box : " << Box(_g, _prefix).toString() << endl;
}
else {
log() << "max expand box." << endl;
}
#endif
- GEODEBUG( "expanding box points... ");
+ GEODEBUG("expanding box points... ");
// Record the prefix we're actively exploring...
- _expPrefix.reset( new GeoHash( _prefix ) );
+ _expPrefix.reset(new GeoHash(_prefix));
// Find points inside this prefix
- while ( _min.checkAndAdvance( _prefix, _foundInExp, this ) && _foundInExp < maxFound && _found < maxAdded );
- while ( _max.checkAndAdvance( _prefix, _foundInExp, this ) && _foundInExp < maxFound && _found < maxAdded );
+ while (_min.checkAndAdvance(_prefix, _foundInExp, this) && _foundInExp < maxFound && _found < maxAdded);
+ while (_max.checkAndAdvance(_prefix, _foundInExp, this) && _foundInExp < maxFound && _found < maxAdded);
#ifdef GEODEBUGGING
- log() << "finished expand, checked : " << ( maxToCheck - ( maxFound - _foundInExp ) )
- << " found : " << ( maxToAdd - ( maxAdded - _found ) )
+ log() << "finished expand, checked : " << (maxToCheck - (maxFound - _foundInExp))
+ << " found : " << (maxToAdd - (maxAdded - _found))
<< " max : " << maxToCheck << " / " << maxToAdd << endl;
#endif
- GEODEBUG( "finished expand, found : " << ( maxToAdd - ( maxAdded - _found ) ) );
- if( _foundInExp >= maxFound || _found >= maxAdded ) return;
+ GEODEBUG("finished expand, found : " << (maxToAdd - (maxAdded - _found)));
+ if(_foundInExp >= maxFound || _found >= maxAdded) return;
// We've searched this prefix fully, remember
- _lastPrefix.reset( new GeoHash( _prefix ));
+ _lastPrefix.reset(new GeoHash(_prefix));
// If we've searched the entire space, we're finished.
- if ( ! _prefix.constrains() ) {
- GEODEBUG( "box exhausted" );
+ if (! _prefix.constrains()) {
+ GEODEBUG("box exhausted");
_state = DONE;
notePrefix();
return;
}
// If we won't fit in the box, and we're not doing a sub-scan, increase the size
- if ( ! fitsInBox( _g->sizeEdge( _prefix ) ) && _fringe.size() == 0 ) {
-
+ if (! fitsInBox(_g->getConverter().sizeEdge(_prefix)) && _fringe.size() == 0) {
// If we're still not expanded bigger than the box size, expand again
// TODO: Is there an advantage to scanning prior to expanding?
_prefix = _prefix.up();
continue;
-
}
// log() << "finished box prefix [" << _prefix << "]" << endl;
@@ -1668,47 +1102,44 @@ namespace mongo {
_state = DONE_NEIGHBOR;
// Go to the next sub-box, if applicable
- if( _fringe.size() > 0 ) _fringe.pop_back();
+ if(_fringe.size() > 0) _fringe.pop_back();
// Go to the next neighbor if this was the last sub-search
- if( _fringe.size() == 0 ) _neighbor++;
+ if(_fringe.size() == 0) _neighbor++;
break;
-
}
notePrefix();
}
// If we doeighbors
- if( onlyExpand ) return;
+ if(onlyExpand) return;
// If we're done expanding the current box...
- if( _state == DONE_NEIGHBOR ) {
-
+ if(_state == DONE_NEIGHBOR) {
// Iterate to the next neighbor
// Loop is useful for cases where we want to skip over boxes entirely,
// otherwise recursion increments the neighbors.
- for ( ; _neighbor < 9; _neighbor++ ) {
-
+ for (; _neighbor < 9; _neighbor++) {
// If we have no fringe for the neighbor, make sure we have the default fringe
- if( _fringe.size() == 0 ) _fringe.push_back( "" );
+ if(_fringe.size() == 0) _fringe.push_back("");
- if( ! isNeighbor ) {
+ if(! isNeighbor) {
_centerPrefix = _prefix;
- _centerBox = Box( _g, _centerPrefix );
+ _centerBox = makeBox(_centerPrefix);
isNeighbor = true;
}
int i = (_neighbor / 3) - 1;
int j = (_neighbor % 3) - 1;
- if ( ( i == 0 && j == 0 ) ||
- ( i < 0 && _centerPrefix.atMinX() ) ||
- ( i > 0 && _centerPrefix.atMaxX() ) ||
- ( j < 0 && _centerPrefix.atMinY() ) ||
- ( j > 0 && _centerPrefix.atMaxY() ) ) {
+ if ((i == 0 && j == 0) ||
+ (i < 0 && _centerPrefix.atMinX()) ||
+ (i > 0 && _centerPrefix.atMaxX()) ||
+ (j < 0 && _centerPrefix.atMinY()) ||
+ (j > 0 && _centerPrefix.atMaxY())) {
- //log() << "not moving to neighbor " << _neighbor << " @ " << i << " , " << j << " fringe : " << _fringe.size() << " " << _centerPrefix << endl;
+ //log() << "not moving to neighbor " << _neighbor << " @ " << i << ", " << j << " fringe : " << _fringe.size() << " " << _centerPrefix << endl;
//log() << _centerPrefix.atMinX() << " "
// << _centerPrefix.atMinY() << " "
// << _centerPrefix.atMaxX() << " "
@@ -1720,44 +1151,45 @@ namespace mongo {
}
// Make sure we've got a reasonable center
- verify( _centerPrefix.constrains() );
+ verify(_centerPrefix.constrains());
GeoHash _neighborPrefix = _centerPrefix;
- _neighborPrefix.move( i, j );
+ _neighborPrefix.move(i, j);
- //log() << "moving to neighbor " << _neighbor << " @ " << i << " , " << j << " fringe : " << _fringe.size() << " " << _centerPrefix << " " << _neighborPrefix << endl;
+ //log() << "moving to neighbor " << _neighbor << " @ " << i << ", " << j << " fringe : " << _fringe.size() << " " << _centerPrefix << " " << _neighborPrefix << endl;
- GEODEBUG( "moving to neighbor " << _neighbor << " @ " << i << " , " << j << " fringe : " << _fringe.size() );
- PREFIXDEBUG( _centerPrefix, _g );
- PREFIXDEBUG( _neighborPrefix , _g );
- while( _fringe.size() > 0 ) {
+ GEODEBUG("moving to neighbor " << _neighbor << " @ " << i << ", " << j
+ << " fringe : " << _fringe.size());
+ PREFIXDEBUG(_centerPrefix, _g);
+ PREFIXDEBUG(_neighborPrefix, _g);
+ while(_fringe.size() > 0) {
_prefix = _neighborPrefix + _fringe.back();
- Box cur( _g , _prefix );
+ Box cur(makeBox(_prefix));
- PREFIXDEBUG( _prefix, _g );
+ PREFIXDEBUG(_prefix, _g);
- double intAmt = intersectsBox( cur );
+ double intAmt = intersectsBox(cur);
// No intersection
- if( intAmt <= 0 ) {
- GEODEBUG( "skipping box" << cur.toString() );
+ if(intAmt <= 0) {
+ GEODEBUG("skipping box" << cur.toString());
_fringe.pop_back();
continue;
}
// Small intersection, refine search
- else if( intAmt < 0.5 && _prefix.canRefine() && _fringe.back().size() < 4 /* two bits */ ) {
+ else if(intAmt < 0.5 && _prefix.canRefine() && _fringe.back().size() < 4 /* two bits */) {
- GEODEBUG( "Intersection small : " << intAmt << ", adding to fringe: " << _fringe.back() << " curr prefix : " << _prefix << " bits : " << _prefix.getBits() );
+ GEODEBUG("Intersection small : " << intAmt << ", adding to fringe: " << _fringe.back() << " curr prefix : " << _prefix << " bits : " << _prefix.getBits());
- // log() << "Diving to level : " << ( _fringe.back().size() / 2 + 1 ) << endl;
+ // log() << "Diving to level : " << (_fringe.back().size() / 2 + 1) << endl;
string lastSuffix = _fringe.back();
_fringe.pop_back();
- _fringe.push_back( lastSuffix + "00" );
- _fringe.push_back( lastSuffix + "01" );
- _fringe.push_back( lastSuffix + "11" );
- _fringe.push_back( lastSuffix + "10" );
+ _fringe.push_back(lastSuffix + "00");
+ _fringe.push_back(lastSuffix + "01");
+ _fringe.push_back(lastSuffix + "11");
+ _fringe.push_back(lastSuffix + "10");
continue;
}
@@ -1765,23 +1197,23 @@ namespace mongo {
// Restart our search from a diff box.
_state = START;
- verify( ! onlyExpand );
+ verify(! onlyExpand);
- verify( _found <= 0x7fffffff );
- fillStack( maxFound - _foundInExp, maxAdded - static_cast<int>(_found) );
+ verify(_found <= 0x7fffffff);
+ fillStack(maxFound - _foundInExp, maxAdded - static_cast<int>(_found));
// When we return from the recursive fillStack call, we'll either have checked enough points or
// be entirely done. Max recurse depth is < 8 * 16.
// If we're maxed out on points, return
- if( _foundInExp >= maxFound || _found >= maxAdded ) {
+ if(_foundInExp >= maxFound || _found >= maxAdded) {
// Make sure we'll come back to add more points
- verify( _state == DOING_EXPAND );
+ verify(_state == DOING_EXPAND);
return;
}
// Otherwise we must be finished to return
- verify( _state == DONE );
+ verify(_state == DONE);
return;
}
@@ -1798,42 +1230,42 @@ namespace mongo {
virtual GeoHash expandStartHash() = 0;
// Whether the current box width is big enough for our search area
- virtual bool fitsInBox( double width ) = 0;
+ virtual bool fitsInBox(double width) = 0;
// The amount the current box overlaps our search area
- virtual double intersectsBox( Box& cur ) = 0;
+ virtual double intersectsBox(Box& cur) = 0;
- bool remembered( BSONObj o ){
+ bool remembered(BSONObj o){
BSONObj seenId = o["_id"].wrap("").getOwned();
- if( _seenIds.find( seenId ) != _seenIds.end() ){
- LOG( CDEBUG + 1 ) << "Object " << o["_id"] << " already seen." << endl;
+ if(_seenIds.find(seenId) != _seenIds.end()){
+ LOG(CDEBUG + 1) << "Object " << o["_id"] << " already seen." << endl;
return true;
}
else{
- _seenIds.insert( seenId );
- LOG( CDEBUG + 1 ) << "Object " << o["_id"] << " remembered." << endl;
+ _seenIds.insert(seenId);
+ LOG(CDEBUG + 1) << "Object " << o["_id"] << " remembered." << endl;
return false;
}
}
- virtual int addSpecific( const GeoKeyNode& node , const Point& keyP , bool onBounds , double keyD , bool potentiallyNewDoc ) {
+ virtual int addSpecific(const GeoKeyNode& node, const Point& keyP, bool onBounds, double keyD, bool potentiallyNewDoc) {
int found = 0;
// We need to handle every possible point in this method, even those not in the key value, to
// avoid us tracking which hashes we've already seen.
- if( ! potentiallyNewDoc ){
+ if(! potentiallyNewDoc){
// log() << "Already handled doc!" << endl;
return 0;
}
// Final check for new doc
// OK to touch, since we're probably returning this object now
- if( remembered( node.recordLoc.obj() ) ) return 0;
+ if(remembered(node.recordLoc.obj())) return 0;
- if( _uniqueDocs && ! onBounds ) {
+ if(_uniqueDocs && ! onBounds) {
//log() << "Added ind to " << _type << endl;
- _stack.push_front( GeoPoint( node ) );
+ _stack.push_front(GeoPoint(node));
found++;
}
else {
@@ -1845,33 +1277,33 @@ namespace mongo {
bool expensiveExact = expensiveExactCheck();
vector< BSONObj > locs;
- getPointsFor( node._key, node.recordLoc.obj(), locs, true );
- for( vector< BSONObj >::iterator i = locs.begin(); i != locs.end(); ++i ){
+ getPointsFor(node._key, node.recordLoc.obj(), locs, true);
+ for(vector< BSONObj >::iterator i = locs.begin(); i != locs.end(); ++i){
double d = -1;
- Point p( *i );
+ Point p(*i);
// We can avoid exact document checks by redoing approx checks,
// if the exact checks are more expensive.
bool needExact = true;
- if( expensiveExact ){
- verify( false );
- KeyResult result = approxKeyCheck( p, d );
- if( result == BAD ) continue;
- else if( result == GOOD ) needExact = false;
+ if(expensiveExact){
+ verify(false);
+ KeyResult result = approxKeyCheck(p, d);
+ if(result == BAD) continue;
+ else if(result == GOOD) needExact = false;
}
- if( ! needExact || exactDocCheck( p, d ) ){
+ if(! needExact || exactDocCheck(p, d)){
//log() << "Added mult to " << _type << endl;
- _stack.push_front( GeoPoint( node ) );
+ _stack.push_front(GeoPoint(node));
found++;
// If returning unique, just exit after first point is added
- if( _uniqueDocs ) break;
+ if(_uniqueDocs) break;
}
}
}
- while( _cur.isCleanAndEmpty() && _stack.size() > 0 ){
+ while(_cur.isCleanAndEmpty() && _stack.size() > 0){
_cur = _stack.front();
_stack.pop_front();
}
@@ -1880,13 +1312,13 @@ namespace mongo {
}
virtual long long nscanned() {
- if ( _firstCall ) {
+ if (_firstCall) {
ok();
}
return _nscanned;
}
- virtual void explainDetails( BSONObjBuilder& b ){
+ virtual void explainDetails(BSONObjBuilder& b){
b << "lookedAt" << _lookedAt;
b << "matchesPerfd" << _matchesPerfd;
b << "objectsLoaded" << _objectsLoaded;
@@ -1899,13 +1331,13 @@ namespace mongo {
virtual BSONObj prettyIndexBounds() const {
vector<GeoHash>::const_iterator i = _expPrefixes.end();
- if( _expPrefixes.size() > 0 && *(--i) != *( _expPrefix.get() ) )
- _expPrefixes.push_back( *( _expPrefix.get() ) );
+ if(_expPrefixes.size() > 0 && *(--i) != *(_expPrefix.get()))
+ _expPrefixes.push_back(*(_expPrefix.get()));
BSONObjBuilder bob;
BSONArrayBuilder bab;
- for( i = _expPrefixes.begin(); i != _expPrefixes.end(); ++i ){
- bab << Box( _g, *i ).toBSON();
+ for(i = _expPrefixes.begin(); i != _expPrefixes.end(); ++i){
+ bab << makeBox(*i).toBSON();
}
bob << _g->_geo << bab.arr();
@@ -1914,7 +1346,7 @@ namespace mongo {
}
void notePrefix() {
- _expPrefixes.push_back( _prefix );
+ _expPrefixes.push_back(_prefix);
}
string _type;
@@ -1960,55 +1392,65 @@ namespace mongo {
public:
typedef multiset<GeoPoint> Holder;
- GeoHopper( const Geo2dType * g , unsigned max , const Point& n , const BSONObj& filter = BSONObj() , double maxDistance = numeric_limits<double>::max() , GeoDistType type=GEO_PLAIN, bool uniqueDocs = false, bool needDistance = true )
- : GeoBrowse( g, "search", filter, uniqueDocs, needDistance ), _max( max ) , _near( n ), _maxDistance( maxDistance ), _type( type ), _distError( type == GEO_PLAIN ? g->_error : g->_errorSphere ), _farthest(0)
+ GeoHopper(const Geo2dType * g,
+ unsigned max,
+ const Point& n,
+ const BSONObj& filter = BSONObj(),
+ double maxDistance = numeric_limits<double>::max(),
+ GeoDistType type = GEO_PLAIN,
+ bool uniqueDocs = false,
+ bool needDistance = true)
+ : GeoBrowse(g, "search", filter, uniqueDocs, needDistance),
+ _max(max),
+ _near(n),
+ _maxDistance(maxDistance),
+ _type(type),
+ _distError(type == GEO_PLAIN ? g->getConverter().getError()
+ : g->getConverter().getErrorSphere()),
+ _farthest(0)
{}
- virtual KeyResult approxKeyCheck( const Point& p, double& d ) {
-
+ virtual KeyResult approxKeyCheck(const Point& p, double& d) {
// Always check approximate distance, since it lets us avoid doing
// checks of the rest of the object if it succeeds
-
switch (_type) {
case GEO_PLAIN:
- d = _near.distance( p );
+ d = distance(_near, p);
break;
case GEO_SPHERE:
- checkEarthBounds( p );
- d = spheredist_deg( _near, p );
+ checkEarthBounds(p);
+ d = spheredist_deg(_near, p);
break;
- default: verify( false );
+ default: verify(false);
}
- verify( d >= 0 );
+ verify(d >= 0);
- GEODEBUG( "\t\t\t\t\t\t\t checkDistance " << _near.toString()
+ GEODEBUG("\t\t\t\t\t\t\t checkDistance " << _near.toString()
<< "\t" << p.toString() << "\t" << d
- << " farthest: " << farthest() );
+ << " farthest: " << farthest());
// If we need more points
- double borderDist = ( _points.size() < _max ? _maxDistance : farthest() );
+ double borderDist = (_points.size() < _max ? _maxDistance : farthest());
- if( d >= borderDist - 2 * _distError && d <= borderDist + 2 * _distError ) return BORDER;
+ if (d >= borderDist - 2 * _distError && d <= borderDist + 2 * _distError) return BORDER;
else return d < borderDist ? GOOD : BAD;
-
}
- virtual bool exactDocCheck( const Point& p, double& d ){
-
+ virtual bool exactDocCheck(const Point& p, double& d){
bool within = false;
// Get the appropriate distance for the type
- switch ( _type ) {
+ switch (_type) {
case GEO_PLAIN:
- d = _near.distance( p );
- within = _near.distanceWithin( p, _maxDistance );
+ d = distance(_near, p);
+ within = distanceWithin(_near, p, _maxDistance);
break;
case GEO_SPHERE:
- checkEarthBounds( p );
- d = spheredist_deg( _near, p );
- within = ( d <= _maxDistance );
+ checkEarthBounds(p);
+ d = spheredist_deg(_near, p);
+ within = (d <= _maxDistance);
break;
- default: verify( false );
+ default: verify(false);
}
return within;
@@ -2019,39 +1461,39 @@ namespace mongo {
return _farthest;
}
- virtual int addSpecific( const GeoKeyNode& node, const Point& keyP, bool onBounds, double keyD, bool potentiallyNewDoc ) {
+ virtual int addSpecific(const GeoKeyNode& node, const Point& keyP, bool onBounds, double keyD, bool potentiallyNewDoc) {
// Unique documents
- GeoPoint newPoint( node, keyD, false );
+ GeoPoint newPoint(node, keyD, false);
int prevSize = _points.size();
// STEP 1 : Remove old duplicate points from the set if needed
- if( _uniqueDocs ){
+ if(_uniqueDocs){
// Lookup old point with same doc
- map< DiskLoc , Holder::iterator >::iterator oldPointIt = _seenPts.find( newPoint.loc() );
+ map< DiskLoc, Holder::iterator >::iterator oldPointIt = _seenPts.find(newPoint.loc());
- if( oldPointIt != _seenPts.end() ){
+ if(oldPointIt != _seenPts.end()){
const GeoPoint& oldPoint = *(oldPointIt->second);
// We don't need to care if we've already seen this same approx pt or better,
// or we've already gone to disk once for the point
- if( oldPoint < newPoint ){
- GEODEBUG( "\t\tOld point closer than new point" );
+ if(oldPoint < newPoint){
+ GEODEBUG("\t\tOld point closer than new point");
return 0;
}
- GEODEBUG( "\t\tErasing old point " << oldPointIt->first.obj() );
- _points.erase( oldPointIt->second );
+ GEODEBUG("\t\tErasing old point " << oldPointIt->first.obj());
+ _points.erase(oldPointIt->second);
}
}
- Holder::iterator newIt = _points.insert( newPoint );
- if( _uniqueDocs ) _seenPts[ newPoint.loc() ] = newIt;
+ Holder::iterator newIt = _points.insert(newPoint);
+ if(_uniqueDocs) _seenPts[ newPoint.loc() ] = newIt;
- GEODEBUG( "\t\tInserted new point " << newPoint.toString() << " approx : " << keyD );
+ GEODEBUG("\t\tInserted new point " << newPoint.toString() << " approx : " << keyD);
- verify( _max > 0 );
+ verify(_max > 0);
Holder::iterator lastPtIt = _points.end();
lastPtIt--;
@@ -2066,7 +1508,7 @@ namespace mongo {
// so we'll do this every once and awhile.
void processExtraPoints(){
- if( _points.size() == 0 ) return;
+ if(_points.size() == 0) return;
int prevSize = _points.size();
@@ -2074,32 +1516,32 @@ namespace mongo {
// whose distance isn't close to the _max - 1 position distance
int numToErase = _points.size() - _max;
- if( numToErase < 0 ) numToErase = 0;
+ if(numToErase < 0) numToErase = 0;
// Get the first point definitely in the _points array
Holder::iterator startErase = _points.end();
- for( int i = 0; i < numToErase + 1; i++ ) startErase--;
+ for(int i = 0; i < numToErase + 1; i++) startErase--;
_farthest = startErase->distance() + 2 * _distError;
- GEODEBUG( "\t\tPotentially erasing " << numToErase << " points, " << " size : " << _points.size() << " max : " << _max << " dist : " << startErase->distance() << " farthest dist : " << _farthest << " from error : " << _distError );
+ GEODEBUG("\t\tPotentially erasing " << numToErase << " points, " << " size : " << _points.size() << " max : " << _max << " dist : " << startErase->distance() << " farthest dist : " << _farthest << " from error : " << _distError);
startErase++;
- while( numToErase > 0 && startErase->distance() <= _farthest ){
- GEODEBUG( "\t\tNot erasing point " << startErase->toString() );
+ while(numToErase > 0 && startErase->distance() <= _farthest){
+ GEODEBUG("\t\tNot erasing point " << startErase->toString());
numToErase--;
startErase++;
- verify( startErase != _points.end() || numToErase == 0 );
+ verify(startErase != _points.end() || numToErase == 0);
}
- if( _uniqueDocs ){
- for( Holder::iterator i = startErase; i != _points.end(); ++i )
- _seenPts.erase( i->loc() );
+ if(_uniqueDocs){
+ for(Holder::iterator i = startErase; i != _points.end(); ++i)
+ _seenPts.erase(i->loc());
}
- _points.erase( startErase, _points.end() );
+ _points.erase(startErase, _points.end());
int diff = _points.size() - prevSize;
- if( diff > 0 ) _found += diff;
+ if(diff > 0) _found += diff;
else _found -= -diff;
}
@@ -2114,7 +1556,7 @@ namespace mongo {
// Safe to use currently since we don't yield in $near searches. If we do start to yield, we may need to
// replace dirtied disklocs in our holder / ensure our logic is correct.
- map< DiskLoc , Holder::iterator > _seenPts;
+ map< DiskLoc, Holder::iterator > _seenPts;
};
@@ -2122,31 +1564,39 @@ namespace mongo {
class GeoSearch : public GeoHopper {
public:
- GeoSearch( const Geo2dType * g , const Point& startPt , int numWanted=100 , BSONObj filter=BSONObj() , double maxDistance = numeric_limits<double>::max() , GeoDistType type=GEO_PLAIN, bool uniqueDocs = false, bool needDistance = false )
- : GeoHopper( g , numWanted , startPt , filter , maxDistance, type, uniqueDocs, needDistance ),
- _start( g->hash( startPt._x, startPt._y ) ),
+ GeoSearch(const Geo2dType * g,
+ const Point& startPt,
+ int numWanted = 100,
+ BSONObj filter = BSONObj(),
+ double maxDistance = numeric_limits<double>::max(),
+ GeoDistType type = GEO_PLAIN,
+ bool uniqueDocs = false,
+ bool needDistance = false)
+ : GeoHopper(g, numWanted, startPt, filter, maxDistance, type, uniqueDocs, needDistance),
+ _start(g->getConverter().hash(startPt.x, startPt.y)),
// TODO: Remove numWanted...
- _numWanted( numWanted ),
+ _numWanted(numWanted),
_type(type)
{
- verify( g->getDetails() );
+ verify(g->getDetails());
_nscanned = 0;
_found = 0;
- if( _maxDistance < 0 ){
+ if(_maxDistance < 0){
_scanDistance = numeric_limits<double>::max();
}
else if (type == GEO_PLAIN) {
- _scanDistance = maxDistance + _spec->_error;
+ _scanDistance = maxDistance + _spec->getConverter().getError();
}
else if (type == GEO_SPHERE) {
- checkEarthBounds( startPt );
+ checkEarthBounds(startPt);
// TODO: consider splitting into x and y scan distances
- _scanDistance = computeXScanDistance( startPt._y, rad2deg( _maxDistance ) + _spec->_error );
+ _scanDistance = computeXScanDistance(startPt.y,
+ rad2deg(_maxDistance) + _spec->getConverter().getError());
}
- verify( _scanDistance > 0 );
+ verify(_scanDistance > 0);
}
@@ -2161,7 +1611,7 @@ namespace mongo {
void exec() {
- if( _numWanted == 0 ) return;
+ if(_numWanted == 0) return;
/*
* Search algorithm
@@ -2181,15 +1631,16 @@ namespace mongo {
{
do {
long long f = found();
- verify( f <= 0x7fffffff );
- fillStack( maxPointsHeuristic, _numWanted - static_cast<int>(f) , true );
+ verify(f <= 0x7fffffff);
+ fillStack(maxPointsHeuristic, _numWanted - static_cast<int>(f), true);
processExtraPoints();
- } while( _state != DONE && _state != DONE_NEIGHBOR &&
+ } while(_state != DONE && _state != DONE_NEIGHBOR &&
found() < _numWanted &&
- (! _prefix.constrains() || _g->sizeEdge( _prefix ) <= _scanDistance ) );
+ (!_prefix.constrains() ||
+ _g->getConverter().sizeEdge(_prefix) <= _scanDistance));
// If we couldn't scan or scanned everything, we're done
- if( _state == DONE ){
+ if(_state == DONE){
expandEndPoints();
return;
}
@@ -2198,34 +1649,35 @@ namespace mongo {
#ifdef GEODEBUGGING
log() << "part 1 of near search completed, found " << found() << " points (out of " << _foundInExp << " scanned)"
- << " in expanded region " << _prefix << " @ " << Box( _g, _prefix )
+ << " in expanded region " << _prefix << " @ " << Box(_g, _prefix)
<< " with furthest distance " << farthest() << endl;
#endif
// Part 2
{
-
// Find farthest distance for completion scan
double farDist = farthest();
- if( found() < _numWanted ) {
+ if(found() < _numWanted) {
// Not enough found in Phase 1
farDist = _scanDistance;
}
- else if ( _type == GEO_PLAIN ) {
+ else if (_type == GEO_PLAIN) {
// Enough found, but need to search neighbor boxes
- farDist += _spec->_error;
+ farDist += _spec->getConverter().getError();
}
- else if ( _type == GEO_SPHERE ) {
+ else if (_type == GEO_SPHERE) {
// Enough found, but need to search neighbor boxes
- farDist = std::min( _scanDistance, computeXScanDistance( _near._y, rad2deg( farDist ) ) + 2 * _spec->_error );
+ farDist = std::min(_scanDistance,
+ computeXScanDistance(_near.y,
+ rad2deg(farDist)) + 2 * _spec->getConverter().getError());
}
- verify( farDist >= 0 );
- GEODEBUGPRINT( farDist );
+ verify(farDist >= 0);
+ GEODEBUGPRINT(farDist);
// Find the box that includes all the points we need to return
- _want = Box( _near._x - farDist , _near._y - farDist , farDist * 2 );
- GEODEBUGPRINT( _want.toString() );
+ _want = Box(_near.x - farDist, _near.y - farDist, farDist * 2);
+ GEODEBUGPRINT(_want.toString());
// log() << "Found : " << found() << " wanted : " << _numWanted << " Far distance : " << farDist << " box : " << _want << endl;
@@ -2233,87 +1685,87 @@ namespace mongo {
_scanDistance = farDist;
// Reset the search, our distances have probably changed
- if( _state == DONE_NEIGHBOR ){
+ if(_state == DONE_NEIGHBOR){
_state = DOING_EXPAND;
_neighbor = -1;
}
#ifdef GEODEBUGGING
- log() << "resetting search with start at " << _start << " (edge length " << _g->sizeEdge( _start ) << ")" << endl;
+ log() << "resetting search with start at " << _start << " (edge length " << _g->sizeEdge(_start) << ")" << endl;
#endif
// Do regular search in the full region
do {
- fillStack( maxPointsHeuristic );
+ fillStack(maxPointsHeuristic);
processExtraPoints();
}
- while( _state != DONE );
+ while(_state != DONE);
}
- GEODEBUG( "done near search with " << _points.size() << " points " );
+ GEODEBUG("done near search with " << _points.size() << " points ");
expandEndPoints();
}
- void addExactPoints( const GeoPoint& pt, Holder& points, bool force ){
+ void addExactPoints(const GeoPoint& pt, Holder& points, bool force){
int before, after;
- addExactPoints( pt, points, before, after, force );
+ addExactPoints(pt, points, before, after, force);
}
- void addExactPoints( const GeoPoint& pt, Holder& points, int& before, int& after, bool force ){
+ void addExactPoints(const GeoPoint& pt, Holder& points, int& before, int& after, bool force){
before = 0;
after = 0;
- GEODEBUG( "Adding exact points for " << pt.toString() );
+ GEODEBUG("Adding exact points for " << pt.toString());
- if( pt.isExact() ){
- if( force ) points.insert( pt );
+ if(pt.isExact()){
+ if(force) points.insert(pt);
return;
}
vector<BSONObj> locs;
- getPointsFor( pt.key(), pt.obj(), locs, _uniqueDocs );
+ getPointsFor(pt.key(), pt.obj(), locs, _uniqueDocs);
- GeoPoint nearestPt( pt, -1, true );
+ GeoPoint nearestPt(pt, -1, true);
- for( vector<BSONObj>::iterator i = locs.begin(); i != locs.end(); i++ ){
+ for(vector<BSONObj>::iterator i = locs.begin(); i != locs.end(); i++){
- Point loc( *i );
+ Point loc(*i);
double d;
- if( ! exactDocCheck( loc, d ) ) continue;
+ if(! exactDocCheck(loc, d)) continue;
- if( _uniqueDocs && ( nearestPt.distance() < 0 || d < nearestPt.distance() ) ){
+ if(_uniqueDocs && (nearestPt.distance() < 0 || d < nearestPt.distance())){
nearestPt._distance = d;
nearestPt._pt = *i;
continue;
}
- else if( ! _uniqueDocs ){
- GeoPoint exactPt( pt, d, true );
+ else if(! _uniqueDocs){
+ GeoPoint exactPt(pt, d, true);
exactPt._pt = *i;
- GEODEBUG( "Inserting exact pt " << exactPt.toString() << " for " << pt.toString() << " exact : " << d << " is less? " << ( exactPt < pt ) << " bits : " << _g->_bits );
- points.insert( exactPt );
+ GEODEBUG("Inserting exact pt " << exactPt.toString() << " for " << pt.toString() << " exact : " << d << " is less? " << (exactPt < pt) << " bits : " << _g->_bits);
+ points.insert(exactPt);
exactPt < pt ? before++ : after++;
}
}
- if( _uniqueDocs && nearestPt.distance() >= 0 ){
- GEODEBUG( "Inserting unique exact pt " << nearestPt.toString() << " for " << pt.toString() << " exact : " << nearestPt.distance() << " is less? " << ( nearestPt < pt ) << " bits : " << _g->_bits );
- points.insert( nearestPt );
- if( nearestPt < pt ) before++;
+ if(_uniqueDocs && nearestPt.distance() >= 0){
+ GEODEBUG("Inserting unique exact pt " << nearestPt.toString() << " for " << pt.toString() << " exact : " << nearestPt.distance() << " is less? " << (nearestPt < pt) << " bits : " << _g->_bits);
+ points.insert(nearestPt);
+ if(nearestPt < pt) before++;
else after++;
}
}
// TODO: Refactor this back into holder class, allow to run periodically when we are seeing a lot of pts
- void expandEndPoints( bool finish = true ){
+ void expandEndPoints(bool finish = true){
processExtraPoints();
@@ -2322,11 +1774,11 @@ namespace mongo {
// Step 1 : Trim points to max size
// TODO: This check will do little for now, but is skeleton for future work in incremental $near
// searches
- if( _max > 0 ){
+ if(_max > 0){
int numToErase = _points.size() - _max;
- if( numToErase > 0 ){
+ if(numToErase > 0){
Holder tested;
@@ -2335,85 +1787,83 @@ namespace mongo {
maybePointIt--;
double approxMin = maybePointIt->distance() - 2 * _distError;
- GEODEBUG( "\t\tNeed to erase " << numToErase << " max : " << _max << " min dist " << approxMin << " error : " << _distError << " starting from : " << (*maybePointIt).toString() );
+ GEODEBUG("\t\tNeed to erase " << numToErase << " max : " << _max << " min dist " << approxMin << " error : " << _distError << " starting from : " << (*maybePointIt).toString());
// Insert all
int erased = 0;
- while( _points.size() > 0 && ( maybePointIt->distance() >= approxMin || erased < numToErase ) ){
+ while(_points.size() > 0 && (maybePointIt->distance() >= approxMin || erased < numToErase)){
Holder::iterator current = maybePointIt--;
- addExactPoints( *current, tested, true );
- _points.erase( current );
+ addExactPoints(*current, tested, true);
+ _points.erase(current);
erased++;
- if( tested.size() )
+ if(tested.size())
approxMin = tested.begin()->distance() - 2 * _distError;
}
- GEODEBUG( "\t\tEnding search at point " << ( _points.size() == 0 ? "(beginning)" : maybePointIt->toString() ) );
+ GEODEBUG("\t\tEnding search at point " << (_points.size() == 0 ? "(beginning)" : maybePointIt->toString()));
int numToAddBack = erased - numToErase;
- verify( numToAddBack >= 0 );
+ verify(numToAddBack >= 0);
- GEODEBUG( "\t\tNum tested valid : " << tested.size() << " erased : " << erased << " added back : " << numToAddBack );
+ GEODEBUG("\t\tNum tested valid : " << tested.size() << " erased : " << erased << " added back : " << numToAddBack);
#ifdef GEODEBUGGING
- for( Holder::iterator it = tested.begin(); it != tested.end(); it++ ){
+ for(Holder::iterator it = tested.begin(); it != tested.end(); it++){
log() << "Tested Point: " << *it << endl;
}
#endif
Holder::iterator testedIt = tested.begin();
- for( int i = 0; i < numToAddBack && testedIt != tested.end(); i++ ){
- _points.insert( *testedIt );
+ for(int i = 0; i < numToAddBack && testedIt != tested.end(); i++){
+ _points.insert(*testedIt);
testedIt++;
}
}
}
#ifdef GEODEBUGGING
- for( Holder::iterator it = _points.begin(); it != _points.end(); it++ ){
+ for(Holder::iterator it = _points.begin(); it != _points.end(); it++){
log() << "Point: " << *it << endl;
}
#endif
// We've now trimmed first set of unneeded points
- GEODEBUG( "\t\t Start expanding, num points : " << _points.size() << " max : " << _max );
+ GEODEBUG("\t\t Start expanding, num points : " << _points.size() << " max : " << _max);
// Step 2: iterate through all points and add as needed
-
unsigned expandedPoints = 0;
Holder::iterator it = _points.begin();
double expandWindowEnd = -1;
- while( it != _points.end() ){
- const GeoPoint& currPt = *it;
+ while(it != _points.end()){
+ const GeoPoint& currPt = *it;
// TODO: If one point is exact, maybe not 2 * _distError
// See if we're in an expand window
bool inWindow = currPt.distance() <= expandWindowEnd;
// If we're not, and we're done with points, break
- if( ! inWindow && expandedPoints >= _max ) break;
-
- bool expandApprox = ! currPt.isExact() && ( ! _uniqueDocs || ( finish && _needDistance ) || inWindow );
+ if(! inWindow && expandedPoints >= _max) break;
- if( expandApprox ){
+ bool expandApprox = !currPt.isExact() &&
+ (!_uniqueDocs || (finish && _needDistance) || inWindow);
- // Add new point(s)
- // These will only be added in a radius of 2 * _distError around the current point,
- // so should not affect previously valid points.
+ if (expandApprox) {
+ // Add new point(s). These will only be added in a radius of 2 * _distError
+ // around the current point, so should not affect previously valid points.
int before, after;
- addExactPoints( currPt, _points, before, after, false );
+ addExactPoints(currPt, _points, before, after, false);
expandedPoints += before;
- if( _max > 0 && expandedPoints < _max )
+ if(_max > 0 && expandedPoints < _max)
expandWindowEnd = currPt.distance() + 2 * _distError;
// Iterate to the next point
Holder::iterator current = it++;
// Erase the current point
- _points.erase( current );
+ _points.erase(current);
}
else{
@@ -2422,15 +1872,16 @@ namespace mongo {
}
}
- GEODEBUG( "\t\tFinished expanding, num points : " << _points.size() << " max : " << _max );
+ GEODEBUG("\t\tFinished expanding, num points : " << _points.size()
+ << " max : " << _max);
// Finish
// TODO: Don't really need to trim?
- for( ; expandedPoints > _max; expandedPoints-- ) it--;
- _points.erase( it, _points.end() );
+ for(; expandedPoints > _max; expandedPoints--) it--;
+ _points.erase(it, _points.end());
#ifdef GEODEBUGGING
- for( Holder::iterator it = _points.begin(); it != _points.end(); it++ ){
+ for(Holder::iterator it = _points.begin(); it != _points.end(); it++){
log() << "Point: " << *it << endl;
}
#endif
@@ -2441,13 +1892,13 @@ namespace mongo {
}
// Whether the current box width is big enough for our search area
- virtual bool fitsInBox( double width ){
+ virtual bool fitsInBox(double width){
return width >= _scanDistance;
}
// Whether the current box overlaps our search area
- virtual double intersectsBox( Box& cur ){
- return cur.intersects( _want );
+ virtual double intersectsBox(Box& cur){
+ return cur.intersects(_want);
}
GeoHash _start;
@@ -2463,11 +1914,10 @@ namespace mongo {
class GeoSearchCursor : public GeoCursorBase {
public:
-
- GeoSearchCursor( shared_ptr<GeoSearch> s )
- : GeoCursorBase( s->_spec ) ,
- _s( s ) , _cur( s->_points.begin() ) , _end( s->_points.end() ), _nscanned() {
- if ( _cur != _end ) {
+ GeoSearchCursor(shared_ptr<GeoSearch> s)
+ : GeoCursorBase(s->_spec),
+ _s(s), _cur(s->_points.begin()), _end(s->_points.end()), _nscanned() {
+ if (_cur != _end) {
++_nscanned;
}
}
@@ -2482,7 +1932,7 @@ namespace mongo {
virtual BSONObj current() { verify(ok()); return _cur->_o; }
virtual DiskLoc currLoc() { verify(ok()); return _cur->_loc; }
virtual bool advance() {
- if( ok() ){
+ if(ok()){
_cur++;
incNscanned();
return ok();
@@ -2497,18 +1947,18 @@ namespace mongo {
virtual BSONObj prettyStartKey() const {
- return BSON( _s->_g->_geo << _s->_prefix.toString() );
+ return BSON(_s->_g->_geo << _s->_prefix.toString());
}
virtual BSONObj prettyEndKey() const {
GeoHash temp = _s->_prefix;
- temp.move( 1 , 1 );
- return BSON( _s->_g->_geo << temp.toString() );
+ temp.move(1, 1);
+ return BSON(_s->_g->_geo << temp.toString());
}
virtual long long nscanned() { return _nscanned; }
virtual CoveredIndexMatcher* matcher() const {
- if( _s->_matcher.get() ) return _s->_matcher.get();
+ if(_s->_matcher.get()) return _s->_matcher.get();
else return emptyMatcher.get();
}
@@ -2516,51 +1966,56 @@ namespace mongo {
GeoHopper::Holder::iterator _cur;
GeoHopper::Holder::iterator _end;
- void incNscanned() { if ( ok() ) { ++_nscanned; } }
+ void incNscanned() { if (ok()) { ++_nscanned; } }
long long _nscanned;
};
class GeoCircleBrowse : public GeoBrowse {
public:
+ GeoCircleBrowse(const Geo2dType * g, const BSONObj& circle, BSONObj filter = BSONObj(),
+ const string& type = "$center", bool uniqueDocs = true)
+ : GeoBrowse(g, "circle", filter, uniqueDocs) {
- GeoCircleBrowse( const Geo2dType * g , const BSONObj& circle , BSONObj filter = BSONObj() , const string& type="$center", bool uniqueDocs = true )
- : GeoBrowse( g , "circle" , filter, uniqueDocs ) {
-
- uassert( 13060 , "$center needs 2 fields (middle,max distance)" , circle.nFields() == 2 );
+ uassert(13060, "$center needs 2 fields (middle,max distance)", circle.nFields() == 2);
BSONObjIterator i(circle);
BSONElement center = i.next();
- uassert( 13656 , "the first field of $center object must be a location object" , center.isABSONObj() );
+ uassert(13656, "the first field of $center object must be a location object",
+ center.isABSONObj());
// Get geohash and exact center point
- // TODO: For wrapping search, may be useful to allow center points outside-of-bounds here.
- // Calculating the nearest point as a hash start inside the region would then be required.
- _start = g->_tohash(center);
+ // TODO: For wrapping search, may be useful to allow center points outside-of-bounds
+ // here. Calculating the nearest point as a hash start inside the region would then be
+ // required.
+ _start = g->getConverter().hash(center);
_startPt = Point(center);
_maxDistance = i.next().numberDouble();
- uassert( 13061 , "need a max distance >= 0 " , _maxDistance >= 0 );
+ uassert(13061, "need a max distance >= 0 ", _maxDistance >= 0);
if (type == "$center") {
// Look in box with bounds of maxDistance in either direction
_type = GEO_PLAIN;
- _xScanDistance = _maxDistance + _g->_error;
- _yScanDistance = _maxDistance + _g->_error;
+ xScanDistance = _maxDistance + _g->getConverter().getError();
+ yScanDistance = _maxDistance + _g->getConverter().getError();
}
else if (type == "$centerSphere") {
// Same, but compute maxDistance using spherical transform
-
- uassert(13461, "Spherical MaxDistance > PI. Are you sure you are using radians?", _maxDistance < M_PI);
- checkEarthBounds( _startPt );
+ uassert(13461, "Spherical MaxDistance > PI. Are you sure you are using radians?",
+ _maxDistance < M_PI);
+ checkEarthBounds(_startPt);
_type = GEO_SPHERE;
- _yScanDistance = rad2deg( _maxDistance ) + _g->_error;
- _xScanDistance = computeXScanDistance(_startPt._y, _yScanDistance);
+ // should this be sphere error?
+ yScanDistance = rad2deg(_maxDistance) + _g->getConverter().getError();
+ xScanDistance = computeXScanDistance(_startPt.y, yScanDistance);
- uassert(13462, "Spherical distance would require wrapping, which isn't implemented yet",
- (_startPt._x + _xScanDistance < 180) && (_startPt._x - _xScanDistance > -180) &&
- (_startPt._y + _yScanDistance < 90) && (_startPt._y - _yScanDistance > -90));
+ uassert(13462, "Spherical distance would require (unimplemented) wrapping",
+ (_startPt.x + xScanDistance < 180) &&
+ (_startPt.x - xScanDistance > -180) &&
+ (_startPt.y + yScanDistance < 90) &&
+ (_startPt.y - yScanDistance > -90));
}
else {
uassert(13460, "invalid $center query type: " + type, false);
@@ -2568,11 +2023,12 @@ namespace mongo {
// Bounding box includes fudge factor.
// TODO: Is this correct, since fudge factor may be spherically transformed?
- _bBox._min = Point( _startPt._x - _xScanDistance, _startPt._y - _yScanDistance );
- _bBox._max = Point( _startPt._x + _xScanDistance, _startPt._y + _yScanDistance );
-
- GEODEBUG( "Bounding box for circle query : " << _bBox.toString() << " (max distance : " << _maxDistance << ")" << " starting from " << _startPt.toString() );
+ _bBox._min = Point(_startPt.x - xScanDistance, _startPt.y - yScanDistance);
+ _bBox._max = Point(_startPt.x + xScanDistance, _startPt.y + yScanDistance);
+ GEODEBUG("Bounding box for circle query : " << _bBox.toString()
+ << " (max distance : " << _maxDistance << ")"
+ << " starting from " << _startPt.toString());
ok();
}
@@ -2580,49 +2036,47 @@ namespace mongo {
return _start;
}
- virtual bool fitsInBox( double width ) {
- return width >= std::max(_xScanDistance, _yScanDistance);
+ virtual bool fitsInBox(double width) {
+ return width >= std::max(xScanDistance, yScanDistance);
}
- virtual double intersectsBox( Box& cur ) {
- return cur.intersects( _bBox );
+ virtual double intersectsBox(Box& cur) {
+ return cur.intersects(_bBox);
}
- virtual KeyResult approxKeyCheck( const Point& p, double& d ) {
-
+ virtual KeyResult approxKeyCheck(const Point& p, double& d) {
// Inexact hash distance checks.
double error = 0;
switch (_type) {
case GEO_PLAIN:
- d = _startPt.distance( p );
- error = _g->_error;
+ d = distance(_startPt, p);
+ error = _g->getConverter().getError();
break;
case GEO_SPHERE: {
- checkEarthBounds( p );
- d = spheredist_deg( _startPt, p );
- error = _g->_errorSphere;
+ checkEarthBounds(p);
+ d = spheredist_deg(_startPt, p);
+ error = _g->getConverter().getErrorSphere();
break;
}
- default: verify( false );
+ default: verify(false);
}
// If our distance is in the error bounds...
- if( d >= _maxDistance - error && d <= _maxDistance + error ) return BORDER;
+ if(d >= _maxDistance - error && d <= _maxDistance + error) return BORDER;
return d > _maxDistance ? BAD : GOOD;
}
- virtual bool exactDocCheck( const Point& p, double& d ){
-
+ virtual bool exactDocCheck(const Point& p, double& d){
switch (_type) {
case GEO_PLAIN: {
- if( _startPt.distanceWithin( p, _maxDistance ) ) return true;
+ if(distanceWithin(_startPt, p, _maxDistance)) return true;
break;
}
case GEO_SPHERE:
- checkEarthBounds( p );
- if( spheredist_deg( _startPt , p ) <= _maxDistance ) return true;
+ checkEarthBounds(p);
+ if(spheredist_deg(_startPt, p) <= _maxDistance) return true;
break;
- default: verify( false );
+ default: verify(false);
}
return false;
@@ -2632,61 +2086,64 @@ namespace mongo {
GeoHash _start;
Point _startPt;
double _maxDistance; // user input
- double _xScanDistance; // effected by GeoDistType
- double _yScanDistance; // effected by GeoDistType
+ double xScanDistance; // effected by GeoDistType
+ double yScanDistance; // effected by GeoDistType
Box _bBox;
};
class GeoBoxBrowse : public GeoBrowse {
public:
+ GeoBoxBrowse(const Geo2dType * g, const BSONObj& box, BSONObj filter = BSONObj(),
+ bool uniqueDocs = true)
+ : GeoBrowse(g, "box", filter, uniqueDocs) {
- GeoBoxBrowse( const Geo2dType * g , const BSONObj& box , BSONObj filter = BSONObj(), bool uniqueDocs = true )
- : GeoBrowse( g , "box" , filter, uniqueDocs ) {
-
- uassert( 13063 , "$box needs 2 fields (bottomLeft,topRight)" , box.nFields() == 2 );
+ uassert(13063, "$box needs 2 fields (bottomLeft,topRight)", box.nFields() == 2);
// Initialize an *exact* box from the given obj.
BSONObjIterator i(box);
- _want._min = Point( i.next() );
- _want._max = Point( i.next() );
+ _want._min = Point(i.next());
+ _want._max = Point(i.next());
_wantRegion = _want;
- _wantRegion.fudge( g ); // Need to make sure we're checking regions within error bounds of where we want
- fixBox( g, _wantRegion );
- fixBox( g, _want );
+ // Need to make sure we're checking regions within error bounds of where we want
+ _wantRegion.fudge(g->getConverter().getError());
+ fixBox(g, _wantRegion);
+ fixBox(g, _want);
+
+ // XXX: why do we use _g and g, i think they're the same...
- uassert( 13064 , "need an area > 0 " , _want.area() > 0 );
+ uassert(13064, "need an area > 0 ", _want.area() > 0);
Point center = _want.center();
- _start = _g->hash( center._x , center._y );
+ _start = _g->getConverter().hash(center.x, center.y);
- GEODEBUG( "center : " << center.toString() << "\t" << _prefix );
+ GEODEBUG("center : " << center.toString() << "\t" << _prefix);
- _fudge = _g->_error;
+ _fudge = _g->getConverter().getError();
_wantLen = _fudge +
- std::max( ( _want._max._x - _want._min._x ) ,
- ( _want._max._y - _want._min._y ) ) / 2;
+ std::max((_want._max.x - _want._min.x),
+ (_want._max.y - _want._min.y)) / 2;
ok();
}
- void fixBox( const Geo2dType* g, Box& box ) {
- if( box._min._x > box._max._x )
- swap( box._min._x, box._max._x );
- if( box._min._y > box._max._y )
- swap( box._min._y, box._max._y );
+ void fixBox(const Geo2dType* g, Box& box) {
+ if(box._min.x > box._max.x)
+ swap(box._min.x, box._max.x);
+ if(box._min.y > box._max.y)
+ swap(box._min.y, box._max.y);
- double gMin = g->_min;
- double gMax = g->_max;
+ double gMin = g->getConverter().getMin();
+ double gMax = g->getConverter().getMax();
- if( box._min._x < gMin ) box._min._x = gMin;
- if( box._min._y < gMin ) box._min._y = gMin;
- if( box._max._x > gMax) box._max._x = gMax;
- if( box._max._y > gMax ) box._max._y = gMax;
+ if(box._min.x < gMin) box._min.x = gMin;
+ if(box._min.y < gMin) box._min.y = gMin;
+ if(box._max.x > gMax) box._max.x = gMax;
+ if(box._max.y > gMax) box._max.y = gMax;
}
- void swap( double& a, double& b ) {
+ void swap(double& a, double& b) {
double swap = a;
a = b;
b = swap;
@@ -2696,121 +2153,118 @@ namespace mongo {
return _start;
}
- virtual bool fitsInBox( double width ) {
+ virtual bool fitsInBox(double width) {
return width >= _wantLen;
}
- virtual double intersectsBox( Box& cur ) {
- return cur.intersects( _wantRegion );
+ virtual double intersectsBox(Box& cur) {
+ return cur.intersects(_wantRegion);
}
- virtual KeyResult approxKeyCheck( const Point& p, double& d ) {
- if( _want.onBoundary( p, _fudge ) ) return BORDER;
- else return _want.inside( p, _fudge ) ? GOOD : BAD;
+ virtual KeyResult approxKeyCheck(const Point& p, double& d) {
+ if(_want.onBoundary(p, _fudge)) return BORDER;
+ else return _want.inside(p, _fudge) ? GOOD : BAD;
}
- virtual bool exactDocCheck( const Point& p, double& d ){
- return _want.inside( p );
+ virtual bool exactDocCheck(const Point& p, double& d){
+ return _want.inside(p);
}
Box _want;
Box _wantRegion;
double _wantLen;
double _fudge;
-
GeoHash _start;
-
};
class GeoPolygonBrowse : public GeoBrowse {
public:
+ GeoPolygonBrowse(const Geo2dType* g, const BSONObj& polyPoints,
+ BSONObj filter = BSONObj(), bool uniqueDocs = true)
+ : GeoBrowse(g, "polygon", filter, uniqueDocs) {
- GeoPolygonBrowse( const Geo2dType* g , const BSONObj& polyPoints ,
- BSONObj filter = BSONObj(), bool uniqueDocs = true ) : GeoBrowse( g , "polygon" , filter, uniqueDocs ) {
+ GEODEBUG("In Polygon")
- GEODEBUG( "In Polygon" )
-
- BSONObjIterator i( polyPoints );
+ BSONObjIterator i(polyPoints);
BSONElement first = i.next();
- _poly.add( Point( first ) );
+ _poly.add(Point(first));
- while ( i.more() ) {
- _poly.add( Point( i.next() ) );
+ while (i.more()) {
+ _poly.add(Point(i.next()));
}
- uassert( 14030, "polygon must be defined by three points or more", _poly.size() >= 3 );
+ uassert(14030, "polygon must be defined by three points or more", _poly.size() >= 3);
_bounds = _poly.bounds();
- _bounds.fudge( g ); // We need to check regions within the error bounds of these bounds
- _bounds.truncate( g ); // We don't need to look anywhere outside the space
-
- _maxDim = _g->_error + _bounds.maxDim() / 2;
+ // We need to check regions within the error bounds of these bounds
+ _bounds.fudge(g->getConverter().getError());
+ // We don't need to look anywhere outside the space
+ _bounds.truncate(g->getConverter().getMin(), g->getConverter().getMax());
+ _maxDim = g->getConverter().getError() + _bounds.maxDim() / 2;
ok();
}
// The initial geo hash box for our first expansion
virtual GeoHash expandStartHash() {
- return _g->hash( _bounds.center() );
+ return _g->getConverter().hash(_bounds.center());
}
// Whether the current box width is big enough for our search area
- virtual bool fitsInBox( double width ) {
+ virtual bool fitsInBox(double width) {
return _maxDim <= width;
}
// Whether the current box overlaps our search area
- virtual double intersectsBox( Box& cur ) {
- return cur.intersects( _bounds );
+ virtual double intersectsBox(Box& cur) {
+ return cur.intersects(_bounds);
}
- virtual KeyResult approxKeyCheck( const Point& p, double& d ) {
-
- int in = _poly.contains( p, _g->_error );
-
- if( in == 0 ) return BORDER;
+ virtual KeyResult approxKeyCheck(const Point& p, double& d) {
+ int in = _poly.contains(p, _g->getConverter().getError());
+ if(in == 0) return BORDER;
else return in > 0 ? GOOD : BAD;
-
}
- virtual bool exactDocCheck( const Point& p, double& d ){
- return _poly.contains( p );
+ virtual bool exactDocCheck(const Point& p, double& d){
+ return _poly.contains(p);
}
private:
-
Polygon _poly;
Box _bounds;
double _maxDim;
-
GeoHash _start;
};
- shared_ptr<Cursor> Geo2dType::newCursor( const BSONObj& query , const BSONObj& order , int numWanted ) const {
- if ( numWanted < 0 )
+ shared_ptr<Cursor> Geo2dType::newCursor(const BSONObj& query, const BSONObj& order,
+ int numWanted) const {
+ if (numWanted < 0)
numWanted = numWanted * -1;
- else if ( numWanted == 0 )
+ else if (numWanted == 0)
numWanted = 100;
BSONObjIterator i(query);
- while ( i.more() ) {
+ while (i.more()) {
BSONElement e = i.next();
- if ( _geo != e.fieldName() )
+ if (_geo != e.fieldName())
continue;
- if ( e.type() == Array ) {
- // If we get an array query, assume it is a location, and do a $within { $center : [[x, y], 0] } search
- shared_ptr<Cursor> c( new GeoCircleBrowse( this , BSON( "0" << e.embeddedObjectUserCheck() << "1" << 0 ), query.filterFieldsUndotted( BSON( _geo << "" ), false ), "$center", true ) );
+ if (e.type() == Array) {
+ // If we get an array query, assume it is a location, and do a $within { $center :
+ // [[x, y], 0] } search
+ BSONObj circle = BSON("0" << e.embeddedObjectUserCheck() << "1" << 0);
+ BSONObj filter = query.filterFieldsUndotted(BSON(_geo << ""), false);
+ shared_ptr<Cursor> c(new GeoCircleBrowse(this, circle, filter, "$center", true));
return c;
}
- else if ( e.type() == Object ) {
-
- // TODO: Filter out _geo : { $special... } field so it doesn't get matched accidentally,
- // if matcher changes
+ else if (e.type() == Object) {
+ // TODO: Filter out _geo : { $special... } field so it doesn't get matched
+ // accidentally, if matcher changes
- switch ( e.embeddedObject().firstElement().getGtLtOp() ) {
+ switch (e.embeddedObject().firstElement().getGtLtOp()) {
case BSONObj::opNEAR: {
BSONObj n = e.embeddedObject();
e = n.firstElement();
@@ -2829,67 +2283,77 @@ namespace mongo {
}
double maxDistance = numeric_limits<double>::max();
- if ( e.isABSONObj() && e.embeddedObject().nFields() > 2 ) {
+ if (e.isABSONObj() && e.embeddedObject().nFields() > 2) {
BSONObjIterator i(e.embeddedObject());
i.next();
i.next();
BSONElement e = i.next();
- if ( e.isNumber() )
+ if (e.isNumber())
maxDistance = e.numberDouble();
}
{
BSONElement e = n["$maxDistance"];
- if ( e.isNumber() )
+ if (e.isNumber())
maxDistance = e.numberDouble();
}
bool uniqueDocs = false;
- if( ! n["$uniqueDocs"].eoo() ) uniqueDocs = n["$uniqueDocs"].trueValue();
+ if(! n["$uniqueDocs"].eoo()) uniqueDocs = n["$uniqueDocs"].trueValue();
- shared_ptr<GeoSearch> s( new GeoSearch( this , Point( e ) , numWanted , query , maxDistance, type, uniqueDocs ) );
+ shared_ptr<GeoSearch> s(new GeoSearch(this, Point(e), numWanted, query,
+ maxDistance, type, uniqueDocs));
s->exec();
shared_ptr<Cursor> c;
- c.reset( new GeoSearchCursor( s ) );
+ c.reset(new GeoSearchCursor(s));
return c;
}
case BSONObj::opWITHIN: {
e = e.embeddedObject().firstElement();
- uassert( 13057 , "$within has to take an object or array" , e.isABSONObj() );
+ uassert(13057, "$within has to take an object or array", e.isABSONObj());
BSONObj context = e.embeddedObject();
e = e.embeddedObject().firstElement();
string type = e.fieldName();
bool uniqueDocs = true;
- if( ! context["$uniqueDocs"].eoo() ) uniqueDocs = context["$uniqueDocs"].trueValue();
+ if (!context["$uniqueDocs"].eoo())
+ uniqueDocs = context["$uniqueDocs"].trueValue();
- if ( startsWith(type, "$center") ) {
- uassert( 13059 , "$center has to take an object or array" , e.isABSONObj() );
- shared_ptr<Cursor> c( new GeoCircleBrowse( this , e.embeddedObjectUserCheck() , query , type, uniqueDocs ) );
+ if (startsWith(type, "$center")) {
+ uassert(13059, "$center has to take an object or array", e.isABSONObj());
+ shared_ptr<Cursor> c(new GeoCircleBrowse(this, e.embeddedObjectUserCheck(),
+ query, type, uniqueDocs));
return c;
}
- else if ( type == "$box" ) {
- uassert( 13065 , "$box has to take an object or array" , e.isABSONObj() );
- shared_ptr<Cursor> c( new GeoBoxBrowse( this , e.embeddedObjectUserCheck() , query, uniqueDocs ) );
+ else if (type == "$box") {
+ uassert(13065, "$box has to take an object or array", e.isABSONObj());
+ shared_ptr<Cursor> c(new GeoBoxBrowse(this, e.embeddedObjectUserCheck(),
+ query, uniqueDocs));
return c;
}
- else if ( startsWith( type, "$poly" ) ) {
- uassert( 14029 , "$polygon has to take an object or array" , e.isABSONObj() );
- shared_ptr<Cursor> c( new GeoPolygonBrowse( this , e.embeddedObjectUserCheck() , query, uniqueDocs ) );
+ else if (startsWith(type, "$poly")) {
+ uassert(14029, "$polygon has to take an object or array", e.isABSONObj());
+ shared_ptr<Cursor> c(new GeoPolygonBrowse(this, e.embeddedObjectUserCheck(),
+ query, uniqueDocs));
return c;
}
- throw UserException( 13058 , str::stream() << "unknown $within information : " << context << ", a shape must be specified." );
+ throw UserException(13058, str::stream() << "unknown $within information : "
+ << context
+ << ", a shape must be specified.");
}
default:
- // Otherwise... assume the object defines a point, and we want to do a zero-radius $within $center
- shared_ptr<Cursor> c( new GeoCircleBrowse( this , BSON( "0" << e.embeddedObjectUserCheck() << "1" << 0 ), query.filterFieldsUndotted( BSON( _geo << "" ), false ) ) );
+ // Otherwise... assume the object defines a point, and we want to do a
+ // zero-radius $within $center
+
+ shared_ptr<Cursor> c(new GeoCircleBrowse(this, BSON("0" << e.embeddedObjectUserCheck() << "1" << 0), query.filterFieldsUndotted(BSON(_geo << ""), false)));
+
return c;
}
}
}
- throw UserException( 13042 , (string)"missing geo field (" + _geo + ") in : " + query.toString() );
+ throw UserException(13042, (string)"missing geo field (" + _geo + ") in : " + query.toString());
}
// ------
@@ -2898,7 +2362,7 @@ namespace mongo {
class Geo2dFindNearCmd : public Command {
public:
- Geo2dFindNearCmd() : Command( "geoNear" ) {}
+ Geo2dFindNearCmd() : Command("geoNear") {}
virtual LockType locktype() const { return READ; }
bool slaveOk() const { return true; }
void help(stringstream& h) const { h << "http://dochub.mongodb.org/core/geo#GeospatialIndexing-geoNearCommand"; }
@@ -2906,64 +2370,64 @@ namespace mongo {
bool run(const string& dbname, BSONObj& cmdObj, int, string& errmsg, BSONObjBuilder& result, bool fromRepl) {
string ns = dbname + "." + cmdObj.firstElement().valuestr();
- NamespaceDetails * d = nsdetails( ns.c_str() );
- if ( ! d ) {
+ NamespaceDetails * d = nsdetails(ns.c_str());
+ if (! d) {
errmsg = "can't find ns";
return false;
}
vector<int> idxs;
- d->findIndexByType( GEO2DNAME , idxs );
+ d->findIndexByType(GEO2DNAME, idxs);
- if ( idxs.size() > 1 ) {
+ if (idxs.size() > 1) {
errmsg = "more than 1 geo indexes :(";
return false;
}
- if ( idxs.size() == 0 ) {
+ if (idxs.size() == 0) {
errmsg = "no geo index :(";
return false;
}
int geoIdx = idxs[0];
- result.append( "ns" , ns );
+ result.append("ns", ns);
- IndexDetails& id = d->idx( geoIdx );
+ IndexDetails& id = d->idx(geoIdx);
Geo2dType * g = (Geo2dType*)id.getSpec().getType();
- verify( &id == g->getDetails() );
+ verify(&id == g->getDetails());
int numWanted = 100;
- if ( cmdObj["num"].isNumber() ) {
+ if (cmdObj["num"].isNumber()) {
numWanted = cmdObj["num"].numberInt();
- verify( numWanted >= 0 );
+ verify(numWanted >= 0);
}
bool uniqueDocs = false;
- if( ! cmdObj["uniqueDocs"].eoo() ) uniqueDocs = cmdObj["uniqueDocs"].trueValue();
+ if(! cmdObj["uniqueDocs"].eoo()) uniqueDocs = cmdObj["uniqueDocs"].trueValue();
bool includeLocs = false;
- if( ! cmdObj["includeLocs"].eoo() ) includeLocs = cmdObj["includeLocs"].trueValue();
+ if(! cmdObj["includeLocs"].eoo()) includeLocs = cmdObj["includeLocs"].trueValue();
uassert(13046, "'near' param missing/invalid", !cmdObj["near"].eoo());
- const Point n( cmdObj["near"] );
- result.append( "near" , g->_tohash( cmdObj["near"] ).toString() );
+ const Point n(cmdObj["near"]);
+ result.append("near", g->getConverter().hash(cmdObj["near"]).toString());
BSONObj filter;
- if ( cmdObj["query"].type() == Object )
+ if (cmdObj["query"].type() == Object)
filter = cmdObj["query"].embeddedObject();
double maxDistance = numeric_limits<double>::max();
- if ( cmdObj["maxDistance"].isNumber() )
+ if (cmdObj["maxDistance"].isNumber())
maxDistance = cmdObj["maxDistance"].number();
GeoDistType type = GEO_PLAIN;
- if ( cmdObj["spherical"].trueValue() )
+ if (cmdObj["spherical"].trueValue())
type = GEO_SPHERE;
- GeoSearch gs( g , n , numWanted , filter , maxDistance , type, uniqueDocs, true );
+ GeoSearch gs(g, n, numWanted, filter, maxDistance, type, uniqueDocs, true);
- if ( cmdObj["start"].type() == String) {
+ if (cmdObj["start"].type() == String) {
GeoHash start ((string) cmdObj["start"].valuestr());
gs._start = start;
}
@@ -2971,42 +2435,42 @@ namespace mongo {
gs.exec();
double distanceMultiplier = 1;
- if ( cmdObj["distanceMultiplier"].isNumber() )
+ if (cmdObj["distanceMultiplier"].isNumber())
distanceMultiplier = cmdObj["distanceMultiplier"].number();
double totalDistance = 0;
- BSONObjBuilder arr( result.subarrayStart( "results" ) );
+ BSONObjBuilder arr(result.subarrayStart("results"));
int x = 0;
- for ( GeoHopper::Holder::iterator i=gs._points.begin(); i!=gs._points.end(); i++ ) {
+ for (GeoHopper::Holder::iterator i=gs._points.begin(); i!=gs._points.end(); i++) {
const GeoPoint& p = *i;
double dis = distanceMultiplier * p.distance();
totalDistance += dis;
- BSONObjBuilder bb( arr.subobjStart( BSONObjBuilder::numStr( x++ ) ) );
- bb.append( "dis" , dis );
- if( includeLocs ){
- if( p._pt.couldBeArray() ) bb.append( "loc", BSONArray( p._pt ) );
- else bb.append( "loc" , p._pt );
+ BSONObjBuilder bb(arr.subobjStart(BSONObjBuilder::numStr(x++)));
+ bb.append("dis", dis);
+ if(includeLocs){
+ if(p._pt.couldBeArray()) bb.append("loc", BSONArray(p._pt));
+ else bb.append("loc", p._pt);
}
- bb.append( "obj" , p._o );
+ bb.append("obj", p._o);
bb.done();
- if ( arr.len() > BSONObjMaxUserSize ) {
+ if (arr.len() > BSONObjMaxUserSize) {
warning() << "Too many results to fit in single document. Truncating..." << endl;
break;
}
}
arr.done();
- BSONObjBuilder stats( result.subobjStart( "stats" ) );
- stats.append( "time" , cc().curop()->elapsedMillis() );
- stats.appendNumber( "btreelocs" , gs._nscanned );
- stats.appendNumber( "nscanned" , gs._lookedAt );
- stats.appendNumber( "objectsLoaded" , gs._objectsLoaded );
- stats.append( "avgDistance" , totalDistance / x );
- stats.append( "maxDistance" , gs.farthest() );
+ BSONObjBuilder stats(result.subobjStart("stats"));
+ stats.append("time", cc().curop()->elapsedMillis());
+ stats.appendNumber("btreelocs", gs._nscanned);
+ stats.appendNumber("nscanned", gs._lookedAt);
+ stats.appendNumber("objectsLoaded", gs._objectsLoaded);
+ stats.append("avgDistance", totalDistance / x);
+ stats.append("maxDistance", gs.farthest());
stats.done();
return true;
@@ -3016,15 +2480,15 @@ namespace mongo {
class GeoWalkCmd : public Command {
public:
- GeoWalkCmd() : Command( "geoWalk" ) {}
+ GeoWalkCmd() : Command("geoWalk") {}
virtual LockType locktype() const { return READ; }
bool slaveOk() const { return true; }
bool slaveOverrideOk() const { return true; }
bool run(const string& dbname, BSONObj& cmdObj, int, string& errmsg, BSONObjBuilder& result, bool fromRepl) {
string ns = dbname + "." + cmdObj.firstElement().valuestr();
- NamespaceDetails * d = nsdetails( ns.c_str() );
- if ( ! d ) {
+ NamespaceDetails * d = nsdetails(ns.c_str());
+ if (! d) {
errmsg = "can't find ns";
return false;
}
@@ -3032,10 +2496,10 @@ namespace mongo {
int geoIdx = -1;
{
NamespaceDetails::IndexIterator ii = d->ii();
- while ( ii.more() ) {
+ while (ii.more()) {
IndexDetails& id = ii.next();
- if ( id.getSpec().getTypeName() == GEO2DNAME ) {
- if ( geoIdx >= 0 ) {
+ if (id.getSpec().getTypeName() == GEO2DNAME) {
+ if (geoIdx >= 0) {
errmsg = "2 geo indexes :(";
return false;
}
@@ -3044,22 +2508,22 @@ namespace mongo {
}
}
- if ( geoIdx < 0 ) {
+ if (geoIdx < 0) {
errmsg = "no geo index :(";
return false;
}
- IndexDetails& id = d->idx( geoIdx );
+ IndexDetails& id = d->idx(geoIdx);
Geo2dType * g = (Geo2dType*)id.getSpec().getType();
- verify( &id == g->getDetails() );
+ verify(&id == g->getDetails());
int max = 100000;
- auto_ptr<BtreeCursor> bc( BtreeCursor::make( d , geoIdx , id , BSONObj() , BSONObj() , true , 1 ) );
+ auto_ptr<BtreeCursor> bc(BtreeCursor::make(d, geoIdx, id, BSONObj(), BSONObj(), true, 1));
BtreeCursor &c = *bc;
- while ( c.ok() && max-- ) {
- GeoHash h( c.currKey().firstElement() );
+ while (c.ok() && max--) {
+ GeoHash h(c.currKey().firstElement());
int len;
cout << "\t" << h.toString()
<< "\t" << c.current()[g->_geo]
@@ -3072,192 +2536,161 @@ namespace mongo {
return true;
}
-
} geoWalkCmd;
struct GeoUnitTest : public StartupTest {
-
- int round( double d ) {
- return (int)(.5+(d*1000));
+ int round(double d) {
+ return (int)(.5 + (d * 1000));
}
-#define GEOHEQ(a,b) if ( a.toString() != b ){ cout << "[" << a.toString() << "] != [" << b << "]" << endl; verify( a == GeoHash(b) ); }
+#define GEOHEQ(a,b) if (a.toString() != b){ cout << "[" << a.toString() << "] != [" << b << "]" << endl; verify(a == GeoHash(b)); }
void run() {
- verify( ! GeoHash::isBitSet( 0 , 0 ) );
- verify( ! GeoHash::isBitSet( 0 , 31 ) );
- verify( GeoHash::isBitSet( 1 , 31 ) );
+ verify(!GeoHash::isBitSet(0, 0));
+ verify(!GeoHash::isBitSet(0, 31));
+ verify(GeoHash::isBitSet(1, 31));
+
+ IndexSpec i(BSON("loc" << "2d"));
+ Geo2dType g(&geo2dplugin, &i);
+ const GeoHashConverter &conv = g.getConverter();
- IndexSpec i( BSON( "loc" << "2d" ) );
- Geo2dType g( &geo2dplugin , &i );
{
double x = 73.01212;
double y = 41.352964;
- BSONObj in = BSON( "x" << x << "y" << y );
- GeoHash h = g._hash( in );
- BSONObj out = g._unhash( h );
- verify( round(x) == round( out["x"].number() ) );
- verify( round(y) == round( out["y"].number() ) );
- verify( round( in["x"].number() ) == round( out["x"].number() ) );
- verify( round( in["y"].number() ) == round( out["y"].number() ) );
+ BSONObj in = BSON("x" << x << "y" << y);
+ GeoHash h = conv.hash(in);
+ BSONObj out = conv.unhashToBSONObj(h);
+ verify(round(x) == round(out["x"].number()));
+ verify(round(y) == round(out["y"].number()));
+ verify(round(in["x"].number()) == round(out["x"].number()));
+ verify(round(in["y"].number()) == round(out["y"].number()));
}
-
{
double x = -73.01212;
double y = 41.352964;
- BSONObj in = BSON( "x" << x << "y" << y );
- GeoHash h = g._hash( in );
- BSONObj out = g._unhash( h );
- verify( round(x) == round( out["x"].number() ) );
- verify( round(y) == round( out["y"].number() ) );
- verify( round( in["x"].number() ) == round( out["x"].number() ) );
- verify( round( in["y"].number() ) == round( out["y"].number() ) );
+ BSONObj in = BSON("x" << x << "y" << y);
+ GeoHash h = conv.hash(in);
+ BSONObj out = conv.unhashToBSONObj(h);
+ verify(round(x) == round(out["x"].number()));
+ verify(round(y) == round(out["y"].number()));
+ verify(round(in["x"].number()) == round(out["x"].number()));
+ verify(round(in["y"].number()) == round(out["y"].number()));
}
-
{
- GeoHash h( "0000" );
- h.move( 0 , 1 );
- GEOHEQ( h , "0001" );
- h.move( 0 , -1 );
- GEOHEQ( h , "0000" );
-
- h.init( "0001" );
- h.move( 0 , 1 );
- GEOHEQ( h , "0100" );
- h.move( 0 , -1 );
- GEOHEQ( h , "0001" );
-
-
- h.init( "0000" );
- h.move( 1 , 0 );
- GEOHEQ( h , "0010" );
+ GeoHash h("0000");
+ h.move(0, 1);
+ GEOHEQ(h, "0001");
+ h.move(0, -1);
+ GEOHEQ(h, "0000");
+
+ h = GeoHash("0001");
+ h.move(0, 1);
+ GEOHEQ(h, "0100");
+ h.move(0, -1);
+ GEOHEQ(h, "0001");
+
+ h = GeoHash("0000");
+ h.move(1, 0);
+ GEOHEQ(h, "0010");
}
-
{
- Box b( 5 , 5 , 2 );
- verify( "(5,5) -->> (7,7)" == b.toString() );
+ Box b(5, 5, 2);
+ verify("(5,5) -->> (7,7)" == b.toString());
}
-
{
- GeoHash a = g.hash( 1 , 1 );
- GeoHash b = g.hash( 4 , 5 );
- verify( 5 == (int)(g.distance( a , b ) ) );
- a = g.hash( 50 , 50 );
- b = g.hash( 42 , 44 );
- verify( round(10) == round(g.distance( a , b )) );
+ GeoHash a = conv.hash(1, 1);
+ GeoHash b = conv.hash(4, 5);
+ verify(5 == (int)(conv.distanceBetweenHashes(a, b)));
+ a = conv.hash(50, 50);
+ b = conv.hash(42, 44);
+ verify(round(10) == round(conv.distanceBetweenHashes(a, b)));
}
-
{
GeoHash x("0000");
- verify( 0 == x.getHash() );
- x.init( 0 , 1 , 32 );
- GEOHEQ( x , "0000000000000000000000000000000000000000000000000000000000000001" )
+ verify(0 == x.getHash());
+ x = GeoHash(0, 1, 32);
+ GEOHEQ(x, "0000000000000000000000000000000000000000000000000000000000000001")
- verify( GeoHash( "1100").hasPrefix( GeoHash( "11" ) ) );
- verify( ! GeoHash( "1000").hasPrefix( GeoHash( "11" ) ) );
+ verify(GeoHash("1100").hasPrefix(GeoHash("11")));
+ verify(!GeoHash("1000").hasPrefix(GeoHash("11")));
}
-
{
GeoHash x("1010");
- GEOHEQ( x , "1010" );
+ GEOHEQ(x, "1010");
GeoHash y = x + "01";
- GEOHEQ( y , "101001" );
+ GEOHEQ(y, "101001");
}
-
{
-
- GeoHash a = g.hash( 5 , 5 );
- GeoHash b = g.hash( 5 , 7 );
- GeoHash c = g.hash( 100 , 100 );
- /*
- cout << "a: " << a << endl;
- cout << "b: " << b << endl;
- cout << "c: " << c << endl;
-
- cout << "a: " << a.toStringHex1() << endl;
- cout << "b: " << b.toStringHex1() << endl;
- cout << "c: " << c.toStringHex1() << endl;
- */
+ GeoHash a = conv.hash(5, 5);
+ GeoHash b = conv.hash(5, 7);
+ GeoHash c = conv.hash(100, 100);
BSONObj oa = a.wrap();
BSONObj ob = b.wrap();
BSONObj oc = c.wrap();
- /*
- cout << "a: " << oa.hexDump() << endl;
- cout << "b: " << ob.hexDump() << endl;
- cout << "c: " << oc.hexDump() << endl;
- */
- verify( oa.woCompare( ob ) < 0 );
- verify( oa.woCompare( oc ) < 0 );
-
+ verify(oa.woCompare(ob) < 0);
+ verify(oa.woCompare(oc) < 0);
}
-
{
- GeoHash x( "000000" );
- x.move( -1 , 0 );
- GEOHEQ( x , "101010" );
- x.move( 1 , -1 );
- GEOHEQ( x , "010101" );
- x.move( 0 , 1 );
- GEOHEQ( x , "000000" );
+ GeoHash x("000000");
+ x.move(-1, 0);
+ GEOHEQ(x, "101010");
+ x.move(1, -1);
+ GEOHEQ(x, "010101");
+ x.move(0, 1);
+ GEOHEQ(x, "000000");
}
-
{
- GeoHash prefix( "110011000000" );
- GeoHash entry( "1100110000011100000111000001110000011100000111000001000000000000" );
- verify( ! entry.hasPrefix( prefix ) );
-
+ GeoHash prefix("110011000000");
+ GeoHash entry( "1100110000011100000111000001110000011100000111000001000000000000");
+ verify(!entry.hasPrefix(prefix));
entry = GeoHash("1100110000001100000111000001110000011100000111000001000000000000");
- verify( entry.toString().find( prefix.toString() ) == 0 );
- verify( entry.hasPrefix( GeoHash( "1100" ) ) );
- verify( entry.hasPrefix( prefix ) );
+ verify(entry.toString().find(prefix.toString()) == 0);
+ verify(entry.hasPrefix(GeoHash("1100")));
+ verify(entry.hasPrefix(prefix));
}
-
{
- GeoHash a = g.hash( 50 , 50 );
- GeoHash b = g.hash( 48 , 54 );
- verify( round( 4.47214 ) == round( g.distance( a , b ) ) );
+ GeoHash a = conv.hash(50, 50);
+ GeoHash b = conv.hash(48, 54);
+ verify(round(4.47214) == round(conv.distanceBetweenHashes(a, b)));
}
-
-
{
- Box b( Point( 29.762283 , -95.364271 ) , Point( 29.764283000000002 , -95.36227099999999 ) );
- verify( b.inside( 29.763 , -95.363 ) );
- verify( ! b.inside( 32.9570255 , -96.1082497 ) );
- verify( ! b.inside( 32.9570255 , -96.1082497 , .01 ) );
+ Box b(Point(29.762283, -95.364271), Point(29.764283000000002, -95.36227099999999));
+ verify(b.inside(29.763, -95.363));
+ verify(! b.inside(32.9570255, -96.1082497));
+ verify(! b.inside(32.9570255, -96.1082497, .01));
}
-
{
- GeoHash a( "11001111" );
- verify( GeoHash( "11" ) == a.commonPrefix( GeoHash("11") ) );
- verify( GeoHash( "11" ) == a.commonPrefix( GeoHash("11110000") ) );
+ GeoHash a("11001111");
+ verify(GeoHash("11") == a.commonPrefix(GeoHash("11")));
+ verify(GeoHash("11") == a.commonPrefix(GeoHash("11110000")));
}
-
{
int N = 10000;
+#if 0 // XXX: we want to make sure the two unhash versions both work, but private.
{
Timer t;
- for ( int i=0; i<N; i++ ) {
+ for (int i = 0; i < N; i++) {
unsigned x = (unsigned)rand();
unsigned y = (unsigned)rand();
- GeoHash h( x , y );
- unsigned a,b;
- h.unhash_slow( a,b );
- verify( a == x );
- verify( b == y );
+ GeoHash h(x, y);
+ unsigned a, b;
+ h.unhash(&a, &b);
+ verify(a == x);
+ verify(b == y);
}
//cout << "slow: " << t.millis() << endl;
}
-
+#endif
{
Timer t;
- for ( int i=0; i<N; i++ ) {
+ for (int i=0; i<N; i++) {
unsigned x = (unsigned)rand();
unsigned y = (unsigned)rand();
- GeoHash h( x , y );
- unsigned a,b;
- h.unhash_fast( a,b );
- verify( a == x );
- verify( b == y );
+ GeoHash h(x, y);
+ unsigned a, b;
+ h.unhash(&a, &b);
+ verify(a == x);
+ verify(b == y);
}
//cout << "fast: " << t.millis() << endl;
}
@@ -3266,7 +2699,6 @@ namespace mongo {
{
// see http://en.wikipedia.org/wiki/Great-circle_distance#Worked_example
-
{
Point BNA (-86.67, 36.12);
Point LAX (-118.40, 33.94);
@@ -3275,8 +2707,8 @@ namespace mongo {
double dist2 = spheredist_deg(LAX, BNA);
// target is 0.45306
- verify( 0.45305 <= dist1 && dist1 <= 0.45307 );
- verify( 0.45305 <= dist2 && dist2 <= 0.45307 );
+ verify(0.45305 <= dist1 && dist1 <= 0.45307);
+ verify(0.45305 <= dist2 && dist2 <= 0.45307);
}
{
Point BNA (-1.5127, 0.6304);
@@ -3286,37 +2718,35 @@ namespace mongo {
double dist2 = spheredist_rad(LAX, BNA);
// target is 0.45306
- verify( 0.45305 <= dist1 && dist1 <= 0.45307 );
- verify( 0.45305 <= dist2 && dist2 <= 0.45307 );
+ verify(0.45305 <= dist1 && dist1 <= 0.45307);
+ verify(0.45305 <= dist2 && dist2 <= 0.45307);
}
{
- Point JFK (-73.77694444, 40.63861111 );
+ Point JFK (-73.77694444, 40.63861111);
Point LAX (-118.40, 33.94);
+ const double EARTH_RADIUS_KM = 6371;
+ const double EARTH_RADIUS_MILES = EARTH_RADIUS_KM * 0.621371192;
double dist = spheredist_deg(JFK, LAX) * EARTH_RADIUS_MILES;
- verify( dist > 2469 && dist < 2470 );
+ verify(dist > 2469 && dist < 2470);
}
-
{
Point BNA (-86.67, 36.12);
Point LAX (-118.40, 33.94);
- Point JFK (-73.77694444, 40.63861111 );
- verify( spheredist_deg(BNA, BNA) < 1e-6);
- verify( spheredist_deg(LAX, LAX) < 1e-6);
- verify( spheredist_deg(JFK, JFK) < 1e-6);
+ Point JFK (-73.77694444, 40.63861111);
+ verify(spheredist_deg(BNA, BNA) < 1e-6);
+ verify(spheredist_deg(LAX, LAX) < 1e-6);
+ verify(spheredist_deg(JFK, JFK) < 1e-6);
Point zero (0, 0);
Point antizero (0,-180);
// these were known to cause NaN
- verify( spheredist_deg(zero, zero) < 1e-6);
- verify( fabs(M_PI-spheredist_deg(zero, antizero)) < 1e-6);
- verify( fabs(M_PI-spheredist_deg(antizero, zero)) < 1e-6);
+ verify(spheredist_deg(zero, zero) < 1e-6);
+ verify(fabs(M_PI-spheredist_deg(zero, antizero)) < 1e-6);
+ verify(fabs(M_PI-spheredist_deg(antizero, zero)) < 1e-6);
}
}
}
} geoUnitTest;
-
-
}
-
diff --git a/src/mongo/db/geo/core.h b/src/mongo/db/geo/core.h
index 25a2f3b9239..0fc46a53609 100644
--- a/src/mongo/db/geo/core.h
+++ b/src/mongo/db/geo/core.h
@@ -1,7 +1,7 @@
// core.h
/**
-* Copyright (C) 2008 10gen Inc.
+* Copyright (C) 2008-2012 10gen Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
@@ -19,7 +19,8 @@
#pragma once
#include "mongo/pch.h"
-#include "../jsobj.h"
+#include "mongo/db/jsobj.h"
+#include "mongo/util/mongoutils/str.h"
#include <cmath>
@@ -27,524 +28,46 @@
# define M_PI 3.14159265358979323846
#endif
-namespace mongo {
-
- class GeoBitSets {
- public:
- GeoBitSets() {
- for ( int i=0; i<32; i++ ) {
- masks32[i] = ( 1 << ( 31 - i ) );
- }
- for ( int i=0; i<64; i++ ) {
- masks64[i] = ( 1LL << ( 63 - i ) );
- }
-
- for ( unsigned i=0; i<16; i++ ) {
- unsigned fixed = 0;
- for ( int j=0; j<4; j++ ) {
- if ( i & ( 1 << j ) )
- fixed |= ( 1 << ( j * 2 ) );
- }
- hashedToNormal[fixed] = i;
- }
-
- long long currAllX = 0, currAllY = 0;
- for ( int i = 0; i < 64; i++ ){
- if( i % 2 == 0 ){
- allX[ i / 2 ] = currAllX;
- currAllX = currAllX + ( 1LL << ( 63 - i ) );
- }
- else{
- allY[ i / 2 ] = currAllY;
- currAllY = currAllY + ( 1LL << ( 63 - i ) );
- }
- }
- }
- int masks32[32];
- long long masks64[64];
- long long allX[32];
- long long allY[32];
-
- unsigned hashedToNormal[256];
- };
-
- extern GeoBitSets geoBitSets;
-
- class GeoHash {
- public:
-
- GeoHash()
- : _hash(0),_bits(0) {
- }
-
- explicit GeoHash( const char * hash ) {
- init( hash );
- }
-
- explicit GeoHash( const string& hash ) {
- init( hash );
- }
-
- static GeoHash makeFromBinData(const char *bindata, unsigned bits) {
- GeoHash h;
- h._bits = bits;
- h._copy( (char*)&h._hash , bindata );
- h._fix();
- return h;
- }
-
- explicit GeoHash( const BSONElement& e , unsigned bits=32 ) {
- _bits = bits;
- if ( e.type() == BinData ) {
- int len = 0;
- _copy( (char*)&_hash , e.binData( len ) );
- verify( len == 8 );
- _bits = bits;
- }
- else {
- cout << "GeoHash bad element: " << e << endl;
- uassert(13047,"wrong type for geo index. if you're using a pre-release version, need to rebuild index",0);
- }
- _fix();
- }
-
- GeoHash( unsigned x , unsigned y , unsigned bits=32) {
- init( x , y , bits );
- }
-
- GeoHash( const GeoHash& old ) {
- _hash = old._hash;
- _bits = old._bits;
- }
-
- GeoHash( long long hash , unsigned bits )
- : _hash( hash ) , _bits( bits ) {
- _fix();
- }
-
- void init( unsigned x , unsigned y , unsigned bits ) {
- verify( bits <= 32 );
- _hash = 0;
- _bits = bits;
- for ( unsigned i=0; i<bits; i++ ) {
- if ( isBitSet( x , i ) ) _hash |= geoBitSets.masks64[i*2];
- if ( isBitSet( y , i ) ) _hash |= geoBitSets.masks64[(i*2)+1];
- }
- }
-
- void unhash_fast( unsigned& x , unsigned& y ) const {
- x = 0;
- y = 0;
- char * c = (char*)(&_hash);
- for ( int i=0; i<8; i++ ) {
- unsigned t = (unsigned)(c[i]) & 0x55;
- y |= ( geoBitSets.hashedToNormal[t] << (4*(i)) );
-
- t = ( (unsigned)(c[i]) >> 1 ) & 0x55;
- x |= ( geoBitSets.hashedToNormal[t] << (4*(i)) );
- }
- }
-
- void unhash_slow( unsigned& x , unsigned& y ) const {
- x = 0;
- y = 0;
- for ( unsigned i=0; i<_bits; i++ ) {
- if ( getBitX(i) )
- x |= geoBitSets.masks32[i];
- if ( getBitY(i) )
- y |= geoBitSets.masks32[i];
- }
- }
-
- void unhash( unsigned& x , unsigned& y ) const {
- unhash_fast( x , y );
- }
-
- /**
- * @param 0 = high
- */
- static bool isBitSet( unsigned val , unsigned bit ) {
- return geoBitSets.masks32[bit] & val;
- }
-
- GeoHash up() const {
- return GeoHash( _hash , _bits - 1 );
- }
-
- bool hasPrefix( const GeoHash& other ) const {
- verify( other._bits <= _bits );
- if ( other._bits == 0 )
- return true;
- long long x = other._hash ^ _hash;
- x = x >> (64-(other._bits*2));
- return x == 0;
- }
-
-
- string toString() const {
- StringBuilder buf;
- for ( unsigned x=0; x<_bits*2; x++ )
- buf.append( _hash & geoBitSets.masks64[x] ? "1" : "0" );
- return buf.str();
- }
-
- string toStringHex1() const {
- stringstream ss;
- ss << hex << _hash;
- return ss.str();
- }
-
- void init( const string& s ) {
- _hash = 0;
- _bits = s.size() / 2;
- for ( unsigned pos=0; pos<s.size(); pos++ )
- if ( s[pos] == '1' )
- setBit( pos , 1 );
- }
-
- void setBit( unsigned pos , bool one ) {
- verify( pos < _bits * 2 );
- if ( one )
- _hash |= geoBitSets.masks64[pos];
- else if ( _hash & geoBitSets.masks64[pos] )
- _hash &= ~geoBitSets.masks64[pos];
- }
-
- bool getBit( unsigned pos ) const {
- return _hash & geoBitSets.masks64[pos];
- }
-
- bool getBitX( unsigned pos ) const {
- verify( pos < 32 );
- return getBit( pos * 2 );
- }
-
- bool getBitY( unsigned pos ) const {
- verify( pos < 32 );
- return getBit( ( pos * 2 ) + 1 );
- }
-
- BSONObj wrap( const char* name = "" ) const {
- BSONObjBuilder b(20);
- append( b , name );
- BSONObj o = b.obj();
- if( ! strlen( name ) ) verify( o.objsize() == 20 );
- return o;
- }
-
- bool constrains() const {
- return _bits > 0;
- }
-
- bool canRefine() const {
- return _bits < 32;
- }
-
- bool atMinX() const {
- return ( _hash & geoBitSets.allX[ _bits ] ) == 0;
- }
-
- bool atMinY() const {
- //log() << " MinY : " << hex << (unsigned long long) _hash << " " << _bits << " " << hex << (unsigned long long) geoBitSets.allY[ _bits ] << endl;
- return ( _hash & geoBitSets.allY[ _bits ] ) == 0;
- }
-
- bool atMaxX() const {
- return ( _hash & geoBitSets.allX[ _bits ] ) == geoBitSets.allX[ _bits ];
- }
-
- bool atMaxY() const {
- return ( _hash & geoBitSets.allY[ _bits ] ) == geoBitSets.allY[ _bits ];
- }
-
- void move( int x , int y ) {
- verify( _bits );
- _move( 0 , x );
- _move( 1 , y );
- }
-
- void _move( unsigned offset , int d ) {
- if ( d == 0 )
- return;
- verify( d <= 1 && d>= -1 ); // TEMP
-
- bool from, to;
- if ( d > 0 ) {
- from = 0;
- to = 1;
- }
- else {
- from = 1;
- to = 0;
- }
-
- unsigned pos = ( _bits * 2 ) - 1;
- if ( offset == 0 )
- pos--;
- while ( true ) {
- if ( getBit(pos) == from ) {
- setBit( pos , to );
- return;
- }
-
- if ( pos < 2 ) {
- // overflow
- for ( ; pos < ( _bits * 2 ) ; pos += 2 ) {
- setBit( pos , from );
- }
- return;
- }
-
- setBit( pos , from );
- pos -= 2;
- }
-
- verify(0);
- }
-
- GeoHash& operator=(const GeoHash& h) {
- _hash = h._hash;
- _bits = h._bits;
- return *this;
- }
-
- bool operator==(const GeoHash& h ) const {
- return _hash == h._hash && _bits == h._bits;
- }
-
- bool operator!=(const GeoHash& h ) const {
- return !( *this == h );
- }
-
- bool operator<(const GeoHash& h ) const {
- if( _hash != h._hash ) return _hash < h._hash;
- return _bits < h._bits;
- }
-
- GeoHash& operator+=( const char * s ) {
- unsigned pos = _bits * 2;
- _bits += strlen(s) / 2;
- verify( _bits <= 32 );
- while ( s[0] ) {
- if ( s[0] == '1' )
- setBit( pos , 1 );
- pos++;
- s++;
- }
-
- return *this;
- }
-
- GeoHash operator+( const char * s ) const {
- GeoHash n = *this;
- n+=s;
- return n;
- }
-
- GeoHash operator+( const std::string& s ) const {
- return operator+( s.c_str() );
- }
-
- void _fix() {
- static long long FULL = 0xFFFFFFFFFFFFFFFFLL;
- long long mask = FULL << ( 64 - ( _bits * 2 ) );
- _hash &= mask;
- }
-
- void append( BSONObjBuilder& b , const char * name ) const {
- char buf[8];
- _copy( buf , (char*)&_hash );
- b.appendBinData( name , 8 , bdtCustom , buf );
- }
-
- long long getHash() const {
- return _hash;
- }
-
- unsigned getBits() const {
- return _bits;
- }
-
- GeoHash commonPrefix( const GeoHash& other ) const {
- unsigned i=0;
- for ( ; i<_bits && i<other._bits; i++ ) {
- if ( getBitX( i ) == other.getBitX( i ) &&
- getBitY( i ) == other.getBitY( i ) )
- continue;
- break;
- }
- return GeoHash(_hash,i);
- }
-
- private:
-
- static void _copy( char * dst , const char * src ) {
- for ( unsigned a=0; a<8; a++ ) {
- dst[a] = src[7-a];
- }
- }
-
- long long _hash;
- unsigned _bits; // bits per field, so 1 to 32
- };
-
- inline ostream& operator<<( ostream &s, const GeoHash &h ) {
- s << h.toString();
- return s;
- }
-
- class GeoConvert {
- public:
- virtual ~GeoConvert() {}
-
- virtual void unhash( const GeoHash& h , double& x , double& y ) const = 0;
- virtual GeoHash hash( double x , double y ) const = 0;
- };
-
- class Point {
- public:
-
- Point( const GeoConvert * g , const GeoHash& hash ) {
- g->unhash( hash , _x , _y );
- }
-
- explicit Point( const BSONElement& e ) {
- BSONObjIterator i(e.Obj());
- _x = i.next().number();
- _y = i.next().number();
- }
-
- explicit Point( const BSONObj& o ) {
- BSONObjIterator i(o);
- _x = i.next().number();
- _y = i.next().number();
- }
-
- Point( double x , double y )
- : _x( x ) , _y( y ) {
- }
-
- Point() : _x(0),_y(0) {
- }
-
- GeoHash hash( const GeoConvert * g ) {
- return g->hash( _x , _y );
- }
-
- double distance( const Point& p ) const {
- double a = _x - p._x;
- double b = _y - p._y;
-
- // Avoid numerical error if possible...
- if( a == 0 ) return abs( _y - p._y );
- if( b == 0 ) return abs( _x - p._x );
-
- return sqrt( ( a * a ) + ( b * b ) );
- }
-
- /**
- * Distance method that compares x or y coords when other direction is zero,
- * avoids numerical error when distances are very close to radius but axis-aligned.
- *
- * An example of the problem is:
- * (52.0 - 51.9999) - 0.0001 = 3.31965e-15 and 52.0 - 51.9999 > 0.0001 in double arithmetic
- * but:
- * 51.9999 + 0.0001 <= 52.0
- *
- * This avoids some (but not all!) suprising results in $center queries where points are
- * ( radius + center.x, center.y ) or vice-versa.
- */
- bool distanceWithin( const Point& p, double radius ) const {
- double a = _x - p._x;
- double b = _y - p._y;
-
- if( a == 0 ) {
- //
- // Note: For some, unknown reason, when a 32-bit g++ optimizes this call, the sum is
- // calculated imprecisely. We need to force the compiler to always evaluate it correctly,
- // hence the weirdness.
- //
- // On some 32-bit linux machines, removing the volatile keyword or calculating the sum inline
- // will make certain geo tests fail. Of course this check will force volatile for all 32-bit systems,
- // not just affected systems.
- if( sizeof(void*) <= 4 ){
- volatile double sum = _y > p._y ? p._y + radius : _y + radius;
- return _y > p._y ? sum >= _y : sum >= p._y;
- }
- else {
- // Original math, correct for most systems
- return _y > p._y ? p._y + radius >= _y : _y + radius >= p._y;
- }
- }
- if( b == 0 ) {
- if( sizeof(void*) <= 4 ){
- volatile double sum = _x > p._x ? p._x + radius : _x + radius;
- return _x > p._x ? sum >= _x : sum >= p._x;
- }
- else {
- return _x > p._x ? p._x + radius >= _x : _x + radius >= p._x;
- }
- }
-
- return sqrt( ( a * a ) + ( b * b ) ) <= radius;
- }
-
- string toString() const {
- StringBuilder buf;
- buf << "(" << _x << "," << _y << ")";
- return buf.str();
+#if 0
+# define CDEBUG -1
+#else
+# define CDEBUG 10
+#endif
+#if 0
+# define GEODEBUGGING
+# define GEODEBUG(x) cout << x << endl;
+# define GEODEBUGPRINT(x) PRINT(x)
+ inline void PREFIXDEBUG(GeoHash prefix, const GeoConvert* g) {
+ if (!prefix.constrains()) {
+ cout << "\t empty prefix" << endl;
+ return ;
}
- double _x;
- double _y;
- };
-
-
- extern const double EARTH_RADIUS_KM;
- extern const double EARTH_RADIUS_MILES;
+ Point ll (g, prefix); // lower left
+ prefix.move(1,1);
+ Point tr (g, prefix); // top right
- // Technically lat/long bounds, not really tied to earth radius.
- inline void checkEarthBounds( Point p ) {
- uassert( 14808, str::stream() << "point " << p.toString() << " must be in earth-like bounds of long : [-180, 180], lat : [-90, 90] ",
- p._x >= -180 && p._x <= 180 && p._y >= -90 && p._y <= 90 );
- }
-
- inline double deg2rad(double deg) { return deg * (M_PI/180); }
- inline double rad2deg(double rad) { return rad * (180/M_PI); }
-
- // WARNING: _x and _y MUST be longitude and latitude in that order
- // note: multiply by earth radius for distance
- inline double spheredist_rad( const Point& p1, const Point& p2 ) {
- // this uses the n-vector formula: http://en.wikipedia.org/wiki/N-vector
- // If you try to match the code to the formula, note that I inline the cross-product.
- // TODO: optimize with SSE
+ Point center ((ll._x+tr._x)/2, (ll._y+tr._y)/2);
+ double radius = fabs(ll._x - tr._x) / 2;
- double sin_x1(sin(p1._x)), cos_x1(cos(p1._x));
- double sin_y1(sin(p1._y)), cos_y1(cos(p1._y));
- double sin_x2(sin(p2._x)), cos_x2(cos(p2._x));
- double sin_y2(sin(p2._y)), cos_y2(cos(p2._y));
+ cout << "\t ll: " << ll.toString() << " tr: " << tr.toString()
+ << " center: " << center.toString() << " radius: " << radius << endl;
- double cross_prod =
- (cos_y1*cos_x1 * cos_y2*cos_x2) +
- (cos_y1*sin_x1 * cos_y2*sin_x2) +
- (sin_y1 * sin_y2);
-
- if (cross_prod >= 1 || cross_prod <= -1) {
- // fun with floats
- verify( fabs(cross_prod)-1 < 1e-6 );
- return cross_prod > 0 ? 0 : M_PI;
- }
-
- return acos(cross_prod);
}
+#else
+# define GEODEBUG(x)
+# define GEODEBUGPRINT(x)
+# define PREFIXDEBUG(x, y)
+#endif
- // note: return is still in radians as that can be multiplied by radius to get arc length
- inline double spheredist_deg( const Point& p1, const Point& p2 ) {
- return spheredist_rad(
- Point( deg2rad(p1._x), deg2rad(p1._y) ),
- Point( deg2rad(p2._x), deg2rad(p2._y) )
- );
- }
+// Used by haystack.cpp. XXX: change to something else/only have one of these geo things/nuke em
+// all?
+#define GEOQUADDEBUG(x)
+//#define GEOQUADDEBUG(x) cout << x << endl
+// XXX: move elsewhere?
+namespace mongo {
+ inline double deg2rad(const double deg) { return deg * (M_PI / 180.0); }
+ inline double rad2deg(const double rad) { return rad * (180.0 / M_PI); }
}
diff --git a/src/mongo/db/geo/hash.cpp b/src/mongo/db/geo/hash.cpp
new file mode 100644
index 00000000000..b1841cfae76
--- /dev/null
+++ b/src/mongo/db/geo/hash.cpp
@@ -0,0 +1,602 @@
+/**
+ * Copyright (C) 2012 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "mongo/pch.h"
+#include "mongo/db/jsobj.h"
+#include "mongo/db/geo/core.h"
+#include "mongo/db/geo/hash.h"
+#include "mongo/db/geo/shapes.h"
+#include "mongo/util/mongoutils/str.h"
+
+// So we can get at the str namespace.
+using namespace mongoutils;
+
+namespace mongo {
+
+ inline std::ostream& operator<<(std::ostream &s, const GeoHash &h) {
+ s << h.toString();
+ return s;
+ }
+
+ /*
+ * GeoBitSets fills out various bit patterns that are used by GeoHash.
+ * What patterns? Look at the comments next to the fields.
+ * TODO(hk): hashedToNormal is still a bit of a mystery.
+ */
+ class GeoBitSets {
+ public:
+ GeoBitSets() {
+ for (unsigned i = 0; i < 16; i++) {
+ unsigned fixed = 0;
+ for (int j = 0; j < 4; j++) {
+ if (i & (1 << j))
+ fixed |= (1 << (j * 2));
+ }
+ hashedToNormal[fixed] = i;
+ }
+
+ long long currAllX = 0, currAllY = 0;
+ for (int i = 0; i < 64; i++){
+ long long thisBit = 1LL << (63 - i);
+
+ if (i % 2 == 0) {
+ allX[i / 2] = currAllX;
+ currAllX |= thisBit;
+ } else{
+ allY[i / 2] = currAllY;
+ currAllY |= thisBit;
+ }
+ }
+ }
+
+ // The 0-th entries of each all[XY] is 0.
+ // The i-th entry of allX has i alternating bits turned on starting
+ // with the most significant. Example:
+ // allX[1] = 8000000000000000
+ // allX[2] = a000000000000000
+ // allX[3] = a800000000000000
+ long long allX[32];
+ // Same alternating bits but starting with one from the MSB:
+ // allY[1] = 4000000000000000
+ // allY[2] = 5000000000000000
+ // allY[3] = 5400000000000000
+ long long allY[32];
+
+ unsigned hashedToNormal[256];
+ };
+
+ // Oh global variables.
+ GeoBitSets geoBitSets;
+
+ // For i return the i-th most significant bit.
+ // masks(0) = 80000..000
+ // masks(1) = 40000..000
+ // etc.
+ // Number of 0s depends on 32 vs. 64 bit.
+ inline static int mask32For(const int i) {
+ return 1 << (31 - i);
+ }
+
+ inline static long long mask64For(const int i) {
+ return 1LL << (63 - i);
+ }
+
+ /* This class maps an x,y coordinate pair to a hash value.
+ * This should probably be renamed/generalized so that it's more of a planar hash,
+ * and we also have a spherical hash, etc.
+ */
+ GeoHash::GeoHash() : _hash(0), _bits(0) { }
+
+ GeoHash::GeoHash(const string& hash) {
+ initFromString(hash.c_str());
+ }
+
+ GeoHash::GeoHash(const char *s) {
+ initFromString(s);
+ }
+
+ void GeoHash::initFromString(const char *s) {
+ int length = strlen(s);
+ uassert(16457, "initFromString passed a too-long string", length <= 64);
+ uassert(16458, "initFromString passed an odd length string ", 0 == (length % 2));
+ _hash = 0;
+ // _bits is how many bits for X or Y, not both, so we divide by 2.
+ _bits = length / 2;
+ for (int i = 0; s[i] != '\0'; ++i)
+ if (s[i] == '1')
+ setBit(i, 1);
+ }
+
+ // This only works if e is BinData.
+ GeoHash::GeoHash(const BSONElement& e, unsigned bits) {
+ _bits = bits;
+ if (e.type() == BinData) {
+ int len = 0;
+ _copyAndReverse((char*)&_hash, e.binData(len));
+ verify(len == 8);
+ } else {
+ cout << "GeoHash bad element: " << e << endl;
+ uassert(13047, "wrong type for geo index. if you're using a pre-release version,"
+ " need to rebuild index", 0);
+ }
+ clearUnusedBits();
+ }
+
+ GeoHash::GeoHash(unsigned x, unsigned y, unsigned bits) {
+ verify(bits <= 32);
+ _hash = 0;
+ _bits = bits;
+ for (unsigned i = 0; i < bits; i++) {
+ if (isBitSet(x, i)) _hash |= mask64For(i * 2);
+ if (isBitSet(y, i)) _hash |= mask64For((i * 2) + 1);
+ }
+ }
+
+ GeoHash::GeoHash(const GeoHash& old) {
+ _hash = old._hash;
+ _bits = old._bits;
+ }
+
+ GeoHash::GeoHash(long long hash, unsigned bits) : _hash(hash) , _bits(bits) {
+ clearUnusedBits();
+ }
+
+ // TODO(hk): This is nasty and has no examples.
+ void GeoHash::unhash_fast(unsigned *x, unsigned *y) const {
+ *x = 0;
+ *y = 0;
+ const char *c = reinterpret_cast<const char*>(&_hash);
+ for (int i = 0; i < 8; i++) {
+ unsigned t = (unsigned)(c[i]) & 0x55;
+ *y |= (geoBitSets.hashedToNormal[t] << (4 * i));
+
+ t = ((unsigned)(c[i]) >> 1) & 0x55;
+ *x |= (geoBitSets.hashedToNormal[t] << (4 * i));
+ }
+ }
+
+ void GeoHash::unhash_slow(unsigned *x, unsigned *y) const {
+ *x = 0;
+ *y = 0;
+ for (unsigned i = 0; i < _bits; i++) {
+ if (getBitX(i))
+ *x |= mask32For(i);
+ if (getBitY(i))
+ *y |= mask32For(i);
+ }
+ }
+
+ void GeoHash::unhash(unsigned *x, unsigned *y) const {
+ unhash_fast(x, y);
+ }
+
+ /** Is the 'bit'-th most significant bit set? (NOT the least significant) */
+ bool GeoHash::isBitSet(unsigned val, unsigned bit) {
+ return mask32For(bit) & val;
+ }
+
+ /** Return a GeoHash with one bit of precision lost. */
+ GeoHash GeoHash::up() const {
+ return GeoHash(_hash, _bits - 1);
+ }
+
+ bool GeoHash::hasPrefix(const GeoHash& other) const {
+ verify(other._bits <= _bits);
+ if (other._bits == 0)
+ return true;
+
+ long long x = other._hash ^ _hash;
+ // We only care about the leftmost other._bits (well, really _bits*2 since we have x and
+ // y)
+ x = x >> (64 - (other._bits * 2));
+ return x == 0;
+ }
+
+ string GeoHash::toString() const {
+ StringBuilder buf;
+ for (unsigned x = 0; x < _bits * 2; x++)
+ buf.append((_hash & mask64For(x)) ? "1" : "0");
+ return buf.str();
+ }
+
+ string GeoHash::toStringHex1() const {
+ stringstream ss;
+ ss << hex << _hash;
+ return ss.str();
+ }
+
+ void GeoHash::setBit(unsigned pos, bool value) {
+ verify(pos < _bits * 2);
+ const long long mask = mask64For(pos);
+ if (value)
+ _hash |= mask;
+ else // if (_hash & mask)
+ _hash &= ~mask;
+ }
+
+ bool GeoHash::getBit(unsigned pos) const {
+ return _hash & mask64For(pos);
+ }
+
+ bool GeoHash::getBitX(unsigned pos) const {
+ verify(pos < 32);
+ return getBit(pos * 2);
+ }
+
+ bool GeoHash::getBitY(unsigned pos) const {
+ verify(pos < 32);
+ return getBit((pos * 2) + 1);
+ }
+
+ // TODO(hk): Comment this.
+ BSONObj GeoHash::wrap(const char* name) const {
+ BSONObjBuilder b(20);
+ appendToBuilder(&b, name);
+ BSONObj o = b.obj();
+ if ('\0' == name[0]) verify(o.objsize() == 20);
+ return o;
+ }
+
+ // Do we have a non-trivial GeoHash?
+ bool GeoHash::constrains() const {
+ return _bits > 0;
+ }
+
+ // Could our GeoHash have higher precision?
+ bool GeoHash::canRefine() const {
+ return _bits < 32;
+ }
+
+ /**
+ * Hashing works like this:
+ * Divide the world into 4 buckets. Label each one as such:
+ * -----------------
+ * | | |
+ * | | |
+ * | 0,1 | 1,1 |
+ * -----------------
+ * | | |
+ * | | |
+ * | 0,0 | 1,0 |
+ * -----------------
+ * We recursively divide each cell, furthermore.
+ * The functions below tell us what quadrant we're in *at the finest level
+ * of the subdivision.*
+ */
+ bool GeoHash::atMinX() const {
+ return (_hash & geoBitSets.allX[_bits]) == 0;
+ }
+ bool GeoHash::atMinY() const {
+ return (_hash & geoBitSets.allY[_bits]) == 0;
+ }
+ bool GeoHash::atMaxX() const {
+ return (_hash & geoBitSets.allX[_bits]) == geoBitSets.allX[_bits];
+ }
+ bool GeoHash::atMaxY() const {
+ return (_hash & geoBitSets.allY[_bits]) == geoBitSets.allY[_bits];
+ }
+
+ // TODO(hk): comment better
+ void GeoHash::move(int x, int y) {
+ verify(_bits);
+ _move(0, x);
+ _move(1, y);
+ }
+
+ // TODO(hk): comment much better
+ void GeoHash::_move(unsigned offset, int d) {
+ if (d == 0)
+ return;
+ verify(d <= 1 && d>= -1); // TEMP
+
+ bool from, to;
+ if (d > 0) {
+ from = 0;
+ to = 1;
+ }
+ else {
+ from = 1;
+ to = 0;
+ }
+
+ unsigned pos = (_bits * 2) - 1;
+ if (offset == 0)
+ pos--;
+ while (true) {
+ if (getBit(pos) == from) {
+ setBit(pos , to);
+ return;
+ }
+
+ if (pos < 2) {
+ // overflow
+ for (; pos < (_bits * 2) ; pos += 2) {
+ setBit(pos , from);
+ }
+ return;
+ }
+
+ setBit(pos , from);
+ pos -= 2;
+ }
+
+ verify(0);
+ }
+
+ GeoHash& GeoHash::operator=(const GeoHash& h) {
+ _hash = h._hash;
+ _bits = h._bits;
+ return *this;
+ }
+
+ bool GeoHash::operator==(const GeoHash& h) const {
+ return _hash == h._hash && _bits == h._bits;
+ }
+
+ bool GeoHash::operator!=(const GeoHash& h) const {
+ return !(*this == h);
+ }
+
+ bool GeoHash::operator<(const GeoHash& h) const {
+ if (_hash != h._hash) return _hash < h._hash;
+ return _bits < h._bits;
+ }
+
+ // Append the hash in s to our current hash. We expect s to be '0' or '1' or '\0',
+ // though we also treat non-'1' values as '0'.
+ GeoHash& GeoHash::operator+=(const char* s) {
+ unsigned pos = _bits * 2;
+ _bits += strlen(s) / 2;
+ verify(_bits <= 32);
+ while ('\0' != s[0]) {
+ if (s[0] == '1')
+ setBit(pos , 1);
+ pos++;
+ s++;
+ }
+ return *this;
+ }
+
+ GeoHash GeoHash::operator+(const char *s) const {
+ GeoHash n = *this;
+ n += s;
+ return n;
+ }
+
+ GeoHash GeoHash::operator+(const std::string& s) const {
+ return operator+(s.c_str());
+ }
+
+ /*
+ * Keep the upper _bits*2 bits of _hash, clear the lower bits.
+ * Maybe there's junk in there? Not sure why this is done.
+ */
+ void GeoHash::clearUnusedBits() {
+ static long long FULL = 0xFFFFFFFFFFFFFFFFLL;
+ long long mask = FULL << (64 - (_bits * 2));
+ _hash &= mask;
+ }
+
+ // Append the hash to the builder provided.
+ void GeoHash::appendToBuilder(BSONObjBuilder* b, const char * name) const {
+ char buf[8];
+ _copyAndReverse(buf, (char*)&_hash);
+ b->appendBinData(name, 8, bdtCustom, buf);
+ }
+
+ long long GeoHash::getHash() const {
+ return _hash;
+ }
+
+ unsigned GeoHash::getBits() const {
+ return _bits;
+ }
+
+ GeoHash GeoHash::commonPrefix(const GeoHash& other) const {
+ unsigned i = 0;
+ for (; i < _bits && i < other._bits; i++) {
+ if (getBitX(i) == other.getBitX(i) && getBitY(i) == other.getBitY(i))
+ continue;
+ break;
+ }
+ // i is how many bits match between this and other.
+ return GeoHash(_hash, i);
+ }
+
+ // Binary data is stored in some particular byte ordering that requires this.
+ void GeoHash::_copyAndReverse(char *dst, const char *src) {
+ for (unsigned a = 0; a < 8; a++) {
+ dst[a] = src[7 - a];
+ }
+ }
+
+ GeoHashConverter::GeoHashConverter(const Parameters &params) : _params(params) {
+ // TODO(hk): What do we require of the values in params?
+
+ // Compute how much error there is so it can be used as a fudge factor.
+ GeoHash a(0, 0, _params.bits);
+ GeoHash b = a;
+ b.move(1, 1);
+
+ // Epsilon is 1/100th of a bucket size
+ // TODO: Can we actually find error bounds for the sqrt function?
+ double epsilon = 0.001 / _params.scaling;
+ _error = distanceBetweenHashes(a, b) + epsilon;
+
+ // Error in radians
+ _errorSphere = deg2rad(_error);
+ }
+
+ double GeoHashConverter::distanceBetweenHashes(const GeoHash& a, const GeoHash& b) const {
+ double ax, ay, bx, by;
+ unhash(a, &ax, &ay);
+ unhash(b, &bx, &by);
+
+ double dx = bx - ax;
+ double dy = by - ay;
+
+ return sqrt((dx * dx) + (dy * dy));
+ }
+
+ /**
+ * Hashing functions. Convert the following types (which have a double precision point)
+ * to a GeoHash:
+ * BSONElement
+ * BSONObj
+ * Point
+ * double, double
+ */
+
+ GeoHash GeoHashConverter::hash(const Point &p) const {
+ return hash(p.x, p.y);
+ }
+
+ GeoHash GeoHashConverter::hash(const BSONElement& e) const {
+ if (e.isABSONObj())
+ return hash(e.embeddedObject());
+ return GeoHash(e, _params.bits);
+ }
+
+ GeoHash GeoHashConverter::hash(const BSONObj& o) const {
+ return hash(o, NULL);
+ }
+
+ // src is printed out as debugging information. Maybe it is actually somehow the 'source' of o?
+ GeoHash GeoHashConverter::hash(const BSONObj& o, const BSONObj* src) const {
+ BSONObjIterator i(o);
+ uassert(13067,
+ str::stream() << "geo field is empty"
+ << (src ? causedBy((*src).toString()) : ""),
+ i.more());
+
+ BSONElement x = i.next();
+ uassert(13068,
+ str::stream() << "geo field only has 1 element"
+ << causedBy(src ? (*src).toString() : x.toString()),
+ i.more());
+
+ BSONElement y = i.next();
+ uassert(13026,
+ str::stream() << "geo values have to be numbers"
+ << causedBy(src ? (*src).toString() :
+ BSON_ARRAY(x << y).toString()),
+ x.isNumber() && y.isNumber());
+
+ uassert(13027,
+ str::stream() << "point not in interval of [ " << _params.min << ", "
+ << _params.max << " ]"
+ << causedBy(src ? (*src).toString() :
+ BSON_ARRAY(x.number() << y.number()).toString()),
+ x.number() <= _params.max && x.number() >= _params.min &&
+ y.number() <= _params.max && y.number() >= _params.min);
+
+ return GeoHash(convertToHashScale(x.number()), convertToHashScale(y.number()),
+ _params.bits);
+ }
+
+ GeoHash GeoHashConverter::hash(double x, double y) const {
+ uassert(16433,
+ str::stream() << "point not in interval of [ " << _params.min << ", "
+ << _params.max << " ]"
+ << causedBy(BSON_ARRAY(x << y).toString()),
+ x <= _params.max && x >= _params.min &&
+ y <= _params.max && y >= _params.min);
+
+ return GeoHash(convertToHashScale(x), convertToHashScale(y) , _params.bits);
+ }
+
+ /**
+ * Unhashing functions. These convert from a "discretized" GeoHash to the "continuous"
+ * doubles according to our scaling parameters.
+ *
+ * Possible outputs:
+ * double, double
+ * Point
+ * BSONObj
+ */
+ // TODO(hk): these should have consistent naming
+ Point GeoHashConverter::unhashToPoint(const GeoHash &h) const {
+ Point point;
+ unhash(h, &point.x, &point.y);
+ return point;
+ }
+
+ Point GeoHashConverter::unhashToPoint(const BSONElement &e) const {
+ return unhashToPoint(hash(e));
+ }
+
+ BSONObj GeoHashConverter::unhashToBSONObj(const GeoHash& h) const {
+ unsigned x, y;
+ h.unhash(&x, &y);
+ BSONObjBuilder b;
+ b.append("x", convertFromHashScale(x));
+ b.append("y", convertFromHashScale(y));
+ return b.obj();
+ }
+
+ void GeoHashConverter::unhash(const GeoHash &h, double *x, double *y) const {
+ unsigned a, b;
+ h.unhash(&a, &b);
+ *x = convertFromHashScale(a);
+ *y = convertFromHashScale(b);
+ }
+
+ double GeoHashConverter::sizeOfDiag(const GeoHash& a) const {
+ GeoHash b = a;
+ b.move(1, 1);
+ return distanceBetweenHashes(a, b);
+ }
+
+ double GeoHashConverter::sizeEdge(const GeoHash& a) const {
+ if (!a.constrains())
+ return _params.max - _params.min;
+
+ double ax, ay, bx, by;
+ GeoHash b = a;
+ b.move(1, 1);
+ unhash(a, &ax, &ay);
+ unhash(b, &bx, &by);
+
+ // _min and _max are a singularity
+ if (bx == _params.min)
+ bx = _params.max;
+
+ return fabs(ax - bx);
+ }
+
+ // Convert from an unsigned in [0, (max-min)*scaling] to [min, max]
+ double GeoHashConverter::convertFromHashScale(unsigned in) const {
+ double x = in;
+ x /= _params.scaling;
+ x += _params.min;
+ return x;
+ }
+
+ // Convert from a double that is [min, max] to an unsigned in [0, (max-min)*scaling]
+ unsigned GeoHashConverter::convertToHashScale(double in) const {
+ verify(in <= _params.max && in >= _params.min);
+
+ if (in == _params.max) {
+ // prevent aliasing with _min by moving inside the "box"
+ // makes 180 == 179.999 (roughly)
+ in -= _error / 2;
+ }
+
+ in -= _params.min;
+ verify(in >= 0);
+ return static_cast<unsigned>(in * _params.scaling);
+ }
+} // namespace mongo
diff --git a/src/mongo/db/geo/hash.h b/src/mongo/db/geo/hash.h
new file mode 100644
index 00000000000..0fd4fb6f75a
--- /dev/null
+++ b/src/mongo/db/geo/hash.h
@@ -0,0 +1,194 @@
+/**
+* Copyright (C) 2008-2012 10gen Inc.
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU Affero General Public License, version 3,
+* as published by the Free Software Foundation.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU Affero General Public License for more details.
+*
+* You should have received a copy of the GNU Affero General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#pragma once
+
+#include "mongo/pch.h"
+#include "mongo/db/jsobj.h"
+#include <iostream>
+
+namespace mongo {
+
+ class GeoHash;
+ class Point;
+ std::ostream& operator<<(std::ostream &s, const GeoHash &h);
+
+ /* This class maps an unsigned x,y coordinate pair to a hash value.
+ * To hash values more interesting than unsigned, use the GeoHashConverter,
+ * which maps doubles to unsigned values.
+ */
+ class GeoHash {
+ public:
+ GeoHash();
+ // The strings are binary values of length <= 64,
+ // examples: 1001010100101, 1
+ explicit GeoHash(const string& hash);
+ explicit GeoHash(const char *s);
+ // bits is how many bits are used to hash each of x and y.
+ GeoHash(unsigned x, unsigned y, unsigned bits = 32);
+ GeoHash(const GeoHash& old);
+ // hash is a raw hash value. we just copy these into our private fields.
+ GeoHash(long long hash, unsigned bits);
+ // This only works if e is BinData. To get a GeoHash from other BSONElements,
+ // use the converter class.
+ explicit GeoHash(const BSONElement& e, unsigned bits = 32);
+
+ // Convert from the hashed value to unsigned.
+ void unhash(unsigned *x, unsigned *y) const;
+
+ /** Is the 'bit'-th most significant bit set? (NOT the least significant) */
+ static bool isBitSet(unsigned val, unsigned bit);
+
+ /** Return a GeoHash with one bit of precision lost. */
+ GeoHash up() const;
+
+ bool hasPrefix(const GeoHash& other) const;
+
+ string toString() const;
+ string toStringHex1() const;
+
+ void setBit(unsigned pos, bool value);
+ bool getBit(unsigned pos) const;
+
+ bool getBitX(unsigned pos) const;
+ bool getBitY(unsigned pos) const;
+
+ // XXX: what does this really do?
+ BSONObj wrap(const char* name = "") const;
+
+ // XXX what does this do
+ bool constrains() const;
+ bool canRefine() const;
+
+ // XXX comment better
+ bool atMinX() const;
+ bool atMinY() const;
+
+ // XXX comment better
+ bool atMaxX() const;
+ bool atMaxY() const;
+
+ // XXX: what does this do
+ void move(int x, int y);
+
+ GeoHash& operator=(const GeoHash& h);
+ bool operator==(const GeoHash& h) const;
+ bool operator!=(const GeoHash& h) const;
+ bool operator<(const GeoHash& h) const;
+ // Append the hash in s to our current hash. We expect s to be '0' or '1' or '\0',
+ // though we also treat non-'1' values as '0'.
+ GeoHash& operator+=(const char* s);
+ GeoHash operator+(const char *s) const;
+ GeoHash operator+(const std::string& s) const;
+
+ // Append the hash to the builder provided.
+ void appendToBuilder(BSONObjBuilder* b, const char * name) const;
+ long long getHash() const;
+ unsigned getBits() const;
+
+ GeoHash commonPrefix(const GeoHash& other) const;
+ private:
+ // XXX not sure why this is done exactly. Why does binary
+ // data need to be reversed? byte ordering of some sort?
+ static void _copyAndReverse(char *dst, const char *src);
+ // Create a hash from the provided string. Used by the string and char* cons.
+ void initFromString(const char *s);
+ /* Keep the upper _bits*2 bits of _hash, clear the lower bits.
+ * Maybe there's junk in there? XXX Not sure why this is done.
+ */
+ void clearUnusedBits();
+ // XXX: what does this do
+ void _move(unsigned offset, int d);
+ // XXX: this is nasty and has no example
+ void unhash_fast(unsigned *x, unsigned *y) const;
+ void unhash_slow(unsigned *x, unsigned *y) const;
+
+ long long _hash;
+ // Bits per field. Our hash is 64 bits, and we have an X and a Y field,
+ // so this is 1 to 32.
+ unsigned _bits;
+ };
+
+ /* Convert between various types and the GeoHash. We need additional information (scaling etc.)
+ * to convert to/from GeoHash. The additional information doesn't change often and is the same
+ * for all conversions, so we stick all the conversion methods here with their associated
+ * data.
+ */
+ class GeoHashConverter {
+ public:
+ struct Parameters {
+ // How many bits to use for the hash?
+ int bits;
+ // X/Y values must be [min, max]
+ double min;
+ double max;
+ // Values are scaled by this when converted to/from hash scale.
+ double scaling;
+ };
+
+ GeoHashConverter(const Parameters &params);
+
+ int getBits() const { return _params.bits; }
+ double getError() const { return _error; }
+ double getErrorSphere() const { return _errorSphere ;}
+ double getMin() const { return _params.min; }
+ double getMax() const { return _params.max; }
+
+ double distanceBetweenHashes(const GeoHash& a, const GeoHash& b) const;
+
+ /**
+ * Hashing functions. Convert the following types to a GeoHash:
+ * BSONElement
+ * BSONObj
+ * Point
+ * double, double
+ */
+ GeoHash hash(const Point &p) const;
+ GeoHash hash(const BSONElement& e) const;
+ GeoHash hash(const BSONObj& o) const;
+ // src is printed out as debugging information. I'm not sure if it's actually
+ // somehow the 'source' of o? Anyway, this is nasty, very nasty. XXX
+ GeoHash hash(const BSONObj& o, const BSONObj* src) const;
+ GeoHash hash(double x, double y) const;
+
+ /** Unhashing functions.
+ * Convert from a hash to the following types:
+ * double, double
+ * Point
+ * BSONObj
+ */
+ // XXX: these should have consistent naming
+ Point unhashToPoint(const GeoHash &h) const;
+ Point unhashToPoint(const BSONElement &e) const;
+ BSONObj unhashToBSONObj(const GeoHash& h) const;
+ void unhash(const GeoHash &h, double *x, double *y) const;
+
+ double sizeOfDiag(const GeoHash& a) const;
+ // XXX: understand/clean this.
+ double sizeEdge(const GeoHash& a) const;
+ private:
+ // Convert from an unsigned in [0, (max-min)*scaling] to [min, max]
+ double convertFromHashScale(unsigned in) const;
+
+ // Convert from a double that is [min, max] to an unsigned in [0, (max-min)*scaling]
+ unsigned convertToHashScale(double in) const;
+
+ Parameters _params;
+ // We compute these based on the _params:
+ double _error;
+ double _errorSphere;
+ };
+} // namespace mongo
diff --git a/src/mongo/db/geo/hash_test.cpp b/src/mongo/db/geo/hash_test.cpp
new file mode 100644
index 00000000000..593bedf4b76
--- /dev/null
+++ b/src/mongo/db/geo/hash_test.cpp
@@ -0,0 +1,72 @@
+/**
+ * Copyright (C) 2012 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+/**
+ * This file contains tests for mongo/db/geo/hash.cpp.
+ */
+
+#include <string>
+#include <sstream>
+
+#include "mongo/db/geo/hash.h"
+#include "mongo/unittest/unittest.h"
+#include "util/assert_util.h"
+
+using mongo::GeoHash;
+using std::string;
+using std::stringstream;
+
+namespace {
+ TEST(GeoHash, MakeZeroHash) {
+ unsigned x = 0, y = 0;
+ GeoHash hash(x, y);
+ }
+
+ static string makeRandomBitString(int length) {
+ stringstream ss;
+ for (int i = 0; i < length; ++i) {
+ if (random() & 1) {
+ ss << "1";
+ } else {
+ ss << "0";
+ }
+ }
+ return ss.str();
+ }
+
+ TEST(GeoHash, MakeRandomValidHashes) {
+ int maxStringLength = 64;
+ for (int i = 0; i < maxStringLength; i += 2) {
+ string a = makeRandomBitString(i);
+ GeoHash hashA = GeoHash(a);
+ (void)hashA.isBitSet(i, 0);
+ (void)hashA.isBitSet(i, 1);
+ }
+ }
+
+ // ASSERT_THROWS does not work if we try to put GeoHash(a) in the macro.
+ static GeoHash makeHash(const string& a) { return GeoHash(a); }
+
+ TEST(GeoHash, MakeTooLongHash) {
+ string a = makeRandomBitString(100);
+ ASSERT_THROWS(makeHash(a), mongo::UserException);
+ }
+
+ TEST(GeoHash, MakeOddHash) {
+ string a = makeRandomBitString(13);
+ ASSERT_THROWS(makeHash(a), mongo::UserException);
+ }
+}
diff --git a/src/mongo/db/geo/haystack.cpp b/src/mongo/db/geo/haystack.cpp
index bcad610494c..089a3891464 100644
--- a/src/mongo/db/geo/haystack.cpp
+++ b/src/mongo/db/geo/haystack.cpp
@@ -1,7 +1,7 @@
// db/geo/haystack.cpp
/**
- * Copyright (C) 2008 10gen Inc.
+ * Copyright (C) 2008-2012 10gen Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
@@ -17,219 +17,233 @@
*/
#include "pch.h"
-#include "../namespace-inl.h"
-#include "../jsobj.h"
-#include "../index.h"
-#include "../commands.h"
-#include "../pdfile.h"
-#include "../btree.h"
-#include "../curop-inl.h"
-#include "../matcher.h"
-#include "core.h"
-#include "../../util/timer.h"
-
-#define GEOQUADDEBUG(x)
-//#define GEOQUADDEBUG(x) cout << x << endl
+#include "mongo/db/namespace-inl.h"
+#include "mongo/db/jsobj.h"
+#include "mongo/db/index.h"
+#include "mongo/db/commands.h"
+#include "mongo/db/pdfile.h"
+#include "mongo/db/btree.h"
+#include "mongo/db/curop-inl.h"
+#include "mongo/db/matcher.h"
+#include "mongo/db/geo/core.h"
+#include "mongo/db/geo/hash.h"
+#include "mongo/db/geo/shapes.h"
+#include "mongo/util/timer.h"
/**
- * this is a geo based search piece, which is different than regular geo lookup
- * this is useful when you want to look for something within a region where the ratio is low
- * works well for search for restaurants withing 25 miles with a certain name
- * should not be used for finding the closest restaurants that are open
+ * Provides the geoHaystack index type and the command "geoSearch."
+ * Examines all documents in a given radius of a given point.
+ * Returns all documents that match a given search restriction.
+ * See http://www.mongodb.org/display/DOCS/Geospatial+Haystack+Indexing
+ *
+ * Use when you want to look for restaurants within 25 miles with a certain name.
+ * Don't use when you want to find the closest open restaurants; see 2d.cpp for that.
*/
namespace mongo {
-
- string GEOSEARCHNAME = "geoHaystack";
+ static const string GEOSEARCHNAME = "geoHaystack";
class GeoHaystackSearchHopper {
public:
- GeoHaystackSearchHopper( const BSONObj& n , double maxDistance , unsigned limit , const string& geoField )
- : _near( n ) , _maxDistance( maxDistance ) , _limit( limit ) , _geoField(geoField) {
-
- }
-
- void got( const DiskLoc& loc ) {
- Point p( loc.obj().getFieldDotted( _geoField ) );
- if ( _near.distance( p ) > _maxDistance )
+ /**
+ * Constructed with a point, a max distance from that point, and a max number of
+ * matched points to store.
+ * @param n The centroid that we're searching
+ * @param maxDistance The maximum distance to consider from that point
+ * @param limit The maximum number of results to return
+ * @param geoField Which field in the provided DiskLoc has the point to test.
+ */
+ GeoHaystackSearchHopper(const BSONObj& near, double maxDistance, unsigned limit,
+ const string& geoField)
+ : _near(near), _maxDistance(maxDistance), _limit(limit), _geoField(geoField) { }
+
+ // Consider the point in loc, and keep it if it's within _maxDistance (and we have space for
+ // it)
+ void consider(const DiskLoc& loc) {
+ if (limitReached()) return;
+ Point p(loc.obj().getFieldDotted(_geoField));
+ if (distance(_near, p) > _maxDistance)
return;
- _locs.push_back( loc );
+ _locs.push_back(loc);
}
- int append( BSONArrayBuilder& b ) {
- for ( unsigned i=0; i<_locs.size() && i<_limit; i++ )
- b.append( _locs[i].obj() );
+ int appendResultsTo(BSONArrayBuilder* b) {
+ for (unsigned i = 0; i <_locs.size(); i++)
+ b->append(_locs[i].obj());
return _locs.size();
}
+ // Have we stored as many points as we can?
+ const bool limitReached() const {
+ return _locs.size() >= _limit;
+ }
+ private:
Point _near;
double _maxDistance;
unsigned _limit;
- string _geoField;
-
+ const string _geoField;
vector<DiskLoc> _locs;
};
+ /**
+ * Provides the IndexType for geoSearch.
+ * Maps (lat, lng) to the bucketSize-sided square bucket that contains it.
+ * Usage:
+ * db.foo.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 })
+ * pos is the name of the field to be indexed that has lat/lng data in an array.
+ * type is the name of the secondary field to be indexed.
+ * bucketSize specifies the dimension of the square bucket for the data in pos.
+ * ALL fields are mandatory.
+ */
class GeoHaystackSearchIndex : public IndexType {
-
public:
-
- GeoHaystackSearchIndex( const IndexPlugin* plugin , const IndexSpec* spec )
- : IndexType( plugin , spec ) {
+ GeoHaystackSearchIndex(const IndexPlugin* plugin, const IndexSpec* spec)
+ : IndexType(plugin, spec) {
BSONElement e = spec->info["bucketSize"];
- uassert( 13321 , "need bucketSize" , e.isNumber() );
+ uassert(13321, "need bucketSize", e.isNumber());
_bucketSize = e.numberDouble();
+ uassert(16455, "bucketSize cannot be zero", _bucketSize != 0.0);
- BSONObjBuilder orderBuilder;
-
- BSONObjIterator i( spec->keyPattern );
- while ( i.more() ) {
+ // Example:
+ // db.foo.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 })
+ BSONObjIterator i(spec->keyPattern);
+ while (i.more()) {
BSONElement e = i.next();
- if ( e.type() == String && GEOSEARCHNAME == e.valuestr() ) {
- uassert( 13314 , "can't have 2 geo fields" , _geo.size() == 0 );
- uassert( 13315 , "2d has to be first in index" , _other.size() == 0 );
- _geo = e.fieldName();
- }
- else {
- _other.push_back( e.fieldName() );
+ if (e.type() == String && GEOSEARCHNAME == e.valuestr()) {
+ uassert(13314, "can't have more than one geo field", _geoField.size() == 0);
+ uassert(13315, "the geo field has to be first in index",
+ _otherFields.size() == 0);
+ _geoField = e.fieldName();
+ } else {
+ // TODO(hk): Do we want to do any checking on e.type and e.valuestr?
+ uassert(13326, "geoSearch can only have 1 non-geo field for now",
+ _otherFields.size() == 0);
+ _otherFields.push_back(e.fieldName());
}
- orderBuilder.append( "" , 1 );
}
- uassert( 13316 , "no geo field specified" , _geo.size() );
- uassert( 13317 , "no other fields specified" , _other.size() );
- uassert( 13326 , "quadrant search can only have 1 other field for now" , _other.size() == 1 );
- _order = orderBuilder.obj();
- }
-
- int hash( const BSONElement& e ) const {
- uassert( 13322 , "not a number" , e.isNumber() );
- return hash( e.numberDouble() );
- }
-
- int hash( double d ) const {
- d += 180;
- d /= _bucketSize;
- return (int)d;
- }
-
- string makeString( int hashedX , int hashedY ) const {
- stringstream ss;
- ss << hashedX << "_" << hashedY;
- return ss.str();
- }
-
- void _add( const BSONObj& obj, const string& root , const BSONElement& e , BSONObjSet& keys ) const {
- BSONObjBuilder buf;
- buf.append( "" , root );
- if ( e.eoo() )
- buf.appendNull( "" );
- else
- buf.appendAs( e , "" );
-
- BSONObj key = buf.obj();
- GEOQUADDEBUG( obj << "\n\t" << root << "\n\t" << key );
- keys.insert( key );
+ uassert(13316, "no geo field specified", _geoField.size());
+ // XXX: Fix documentation that says the other field is optional; code says it's mandatory.
+ uassert(13317, "no non-geo fields specified", _otherFields.size());
}
- void getKeys( const BSONObj &obj, BSONObjSet &keys ) const {
-
- BSONElement loc = obj.getFieldDotted( _geo );
- if ( loc.eoo() )
+ void getKeys(const BSONObj &obj, BSONObjSet &keys) const {
+ BSONElement loc = obj.getFieldDotted(_geoField);
+ if (loc.eoo())
return;
- uassert( 13323 , "latlng not an array" , loc.isABSONObj() );
+ uassert(13323, "latlng not an array", loc.isABSONObj());
string root;
{
- BSONObjIterator i( loc.Obj() );
+ BSONObjIterator i(loc.Obj());
BSONElement x = i.next();
BSONElement y = i.next();
- root = makeString( hash(x) , hash(y) );
+ root = makeString(hash(x), hash(y));
}
-
- verify( _other.size() == 1 );
+ verify(_otherFields.size() == 1);
BSONElementSet all;
- obj.getFieldsDotted( _other[0] , all );
- if ( all.size() == 0 ) {
- _add( obj , root , BSONElement() , keys );
- }
- else {
- for ( BSONElementSet::iterator i=all.begin(); i!=all.end(); ++i ) {
- _add( obj , root , *i , keys );
+ // This is getFieldsDotted (plural not singular) since the object we're indexing
+ // may be an array.
+ obj.getFieldsDotted(_otherFields[0], all);
+
+ if (all.size() == 0) {
+ // We're indexing a document that doesn't have the secondary non-geo field present.
+ // XXX: do we want to add this even if all.size() > 0? result:empty search terms
+ // match everything instead of only things w/empty search terms)
+ addKey(root, BSONElement(), keys);
+ } else {
+ // Ex:If our secondary field is type: "foo" or type: {a:"foo", b:"bar"},
+ // all.size()==1. We can query on the complete field.
+ // Ex: If our secondary field is type: ["A", "B"] all.size()==2 and all has values
+ // "A" and "B". The query looks for any of the fields in the array.
+ for (BSONElementSet::iterator i = all.begin(); i != all.end(); ++i) {
+ addKey(root, *i, keys);
}
}
-
}
- shared_ptr<Cursor> newCursor( const BSONObj& query , const BSONObj& order , int numWanted ) const {
+ // XXX: Who could call this and how do they know not to actually do so?
+ shared_ptr<Cursor> newCursor(const BSONObj& query, const BSONObj& order,
+ int numWanted) const {
shared_ptr<Cursor> c;
verify(0);
return c;
}
- void searchCommand( NamespaceDetails* nsd , int idxNo ,
- const BSONObj& n /*near*/ , double maxDistance , const BSONObj& search ,
- BSONObjBuilder& result , unsigned limit ) {
-
+ void searchCommand(NamespaceDetails* nsd, int idxNo,
+ const BSONObj& n /*near*/, double maxDistance, const BSONObj& search,
+ BSONObjBuilder& result, unsigned limit) {
Timer t;
- log(1) << "SEARCH near:" << n << " maxDistance:" << maxDistance << " search: " << search << endl;
- int x,y;
+ log(1) << "SEARCH near:" << n << " maxDistance:" << maxDistance
+ << " search: " << search << endl;
+ int x, y;
{
- BSONObjIterator i( n );
- x = hash( i.next() );
- y = hash( i.next() );
+ BSONObjIterator i(n);
+ x = hash(i.next());
+ y = hash(i.next());
}
- int scale = (int)ceil( maxDistance / _bucketSize );
+ int scale = static_cast<int>(ceil(maxDistance / _bucketSize));
- GeoHaystackSearchHopper hopper(n,maxDistance,limit,_geo);
+ GeoHaystackSearchHopper hopper(n, maxDistance, limit, _geoField);
long long btreeMatches = 0;
- for ( int a=-scale; a<=scale; a++ ) {
- for ( int b=-scale; b<=scale; b++ ) {
-
+ // TODO(hk): Consider starting with a (or b)=0, then going to a=+-1, then a=+-2, etc.
+ // Would want a HaystackKeyIterator or similar for this, but it'd be a nice
+ // encapsulation allowing us to S2-ify this trivially/abstract the key details.
+ for (int a = -scale; a <= scale && !hopper.limitReached(); ++a) {
+ for (int b = -scale; b <= scale && !hopper.limitReached(); ++b) {
BSONObjBuilder bb;
- bb.append( "" , makeString( x + a , y + b ) );
- for ( unsigned i=0; i<_other.size(); i++ ) {
- BSONElement e = search.getFieldDotted( _other[i] );
- if ( e.eoo() )
- bb.appendNull( "" );
+ bb.append("", makeString(x + a, y + b));
+
+ for (unsigned i = 0; i < _otherFields.size(); i++) {
+ // See if the non-geo field we're indexing on is in the provided search term.
+ BSONElement e = search.getFieldDotted(_otherFields[i]);
+ if (e.eoo())
+ bb.appendNull("");
else
- bb.appendAs( e , "" );
+ bb.appendAs(e, "");
}
BSONObj key = bb.obj();
- GEOQUADDEBUG( "KEY: " << key );
+ GEOQUADDEBUG("KEY: " << key);
+ // TODO(hk): this keeps a set of all DiskLoc seen in this pass so that we don't
+ // consider the element twice. Do we want to instead store a hash of the set?
+ // Is this often big?
set<DiskLoc> thisPass;
- scoped_ptr<BtreeCursor> cursor( BtreeCursor::make( nsd , idxNo , *getDetails() , key , key , true , 1 ) );
- while ( cursor->ok() ) {
- pair<set<DiskLoc>::iterator, bool> p = thisPass.insert( cursor->currLoc() );
- if ( p.second ) {
- hopper.got( cursor->currLoc() );
- GEOQUADDEBUG( "\t" << cursor->current() );
+
+ // Lookup from key to key, inclusive.
+ scoped_ptr<BtreeCursor> cursor(BtreeCursor::make(nsd, idxNo, *getDetails(),
+ key, key, true, 1));
+ while (cursor->ok() && !hopper.limitReached()) {
+ pair<set<DiskLoc>::iterator, bool> p = thisPass.insert(cursor->currLoc());
+ // If a new element was inserted (haven't seen the DiskLoc before), p.second
+ // is true.
+ if (p.second) {
+ hopper.consider(cursor->currLoc());
+ GEOQUADDEBUG("\t" << cursor->current());
btreeMatches++;
}
cursor->advance();
}
}
-
}
- BSONArrayBuilder arr( result.subarrayStart( "results" ) );
- int num = hopper.append( arr );
+ BSONArrayBuilder arr(result.subarrayStart("results"));
+ int num = hopper.appendResultsTo(&arr);
arr.done();
{
- BSONObjBuilder b( result.subobjStart( "stats" ) );
- b.append( "time" , t.millis() );
- b.appendNumber( "btreeMatches" , btreeMatches );
- b.append( "n" , num );
+ BSONObjBuilder b(result.subobjStart("stats"));
+ b.append("time", t.millis());
+ b.appendNumber("btreeMatches", btreeMatches);
+ b.append("n", num);
b.done();
}
}
@@ -237,81 +251,104 @@ namespace mongo {
const IndexDetails* getDetails() const {
return _spec->getDetails();
}
+ private:
+ // TODO(hk): consider moving hash/unhash/makeString out
+ int hash(const BSONElement& e) const {
+ uassert(13322, "geo field is not a number", e.isNumber());
+ return hash(e.numberDouble());
+ }
+
+ int hash(double d) const {
+ d += 180;
+ d /= _bucketSize;
+ return static_cast<int>(d);
+ }
- string _geo;
- vector<string> _other;
+ string makeString(int hashedX, int hashedY) const {
+ stringstream ss;
+ ss << hashedX << "_" << hashedY;
+ return ss.str();
+ }
- BSONObj _order;
+ // Build a new BSONObj with root in it. If e is non-empty, append that to the key. Insert
+ // the BSONObj into keys.
+ void addKey(const string& root, const BSONElement& e, BSONObjSet& keys) const {
+ BSONObjBuilder buf;
+ buf.append("", root);
+ if (e.eoo())
+ buf.appendNull("");
+ else
+ buf.appendAs(e, "");
+
+ keys.insert(buf.obj());
+ }
+
+ string _geoField;
+ vector<string> _otherFields;
double _bucketSize;
};
class GeoHaystackSearchIndexPlugin : public IndexPlugin {
public:
- GeoHaystackSearchIndexPlugin() : IndexPlugin( GEOSEARCHNAME ) {
- }
+ GeoHaystackSearchIndexPlugin() : IndexPlugin(GEOSEARCHNAME) { }
- virtual IndexType* generate( const IndexSpec* spec ) const {
- return new GeoHaystackSearchIndex( this , spec );
+ virtual IndexType* generate(const IndexSpec* spec) const {
+ return new GeoHaystackSearchIndex(this, spec);
}
-
} nameIndexPlugin;
-
class GeoHaystackSearchCommand : public Command {
public:
- GeoHaystackSearchCommand() : Command( "geoSearch" ) {}
+ GeoHaystackSearchCommand() : Command("geoSearch") {}
+
virtual LockType locktype() const { return READ; }
bool slaveOk() const { return true; }
bool slaveOverrideOk() const { return true; }
- bool run(const string& dbname , BSONObj& cmdObj, int, string& errmsg, BSONObjBuilder& result, bool fromRepl) {
+ bool run(const string& dbname, BSONObj& cmdObj, int,
+ string& errmsg, BSONObjBuilder& result, bool fromRepl) {
string ns = dbname + "." + cmdObj.firstElement().valuestr();
- NamespaceDetails * d = nsdetails( ns.c_str() );
- if ( ! d ) {
+ NamespaceDetails *nsd = nsdetails(ns.c_str());
+ if (NULL == nsd) {
errmsg = "can't find ns";
return false;
}
vector<int> idxs;
- d->findIndexByType( GEOSEARCHNAME , idxs );
- if ( idxs.size() == 0 ) {
+ nsd->findIndexByType(GEOSEARCHNAME, idxs);
+ if (idxs.size() == 0) {
errmsg = "no geoSearch index";
return false;
}
- if ( idxs.size() > 1 ) {
+ if (idxs.size() > 1) {
errmsg = "more than 1 geosearch index";
return false;
}
int idxNum = idxs[0];
- IndexDetails& id = d->idx( idxNum );
- GeoHaystackSearchIndex * si = (GeoHaystackSearchIndex*)id.getSpec().getType();
- verify( &id == si->getDetails() );
+ IndexDetails& id = nsd->idx(idxNum);
+ GeoHaystackSearchIndex *si =
+ static_cast<GeoHaystackSearchIndex*>(id.getSpec().getType());
+ verify(&id == si->getDetails());
- BSONElement n = cmdObj["near"];
+ BSONElement near = cmdObj["near"];
BSONElement maxDistance = cmdObj["maxDistance"];
BSONElement search = cmdObj["search"];
- uassert( 13318 , "near needs to be an array" , n.isABSONObj() );
- uassert( 13319 , "maxDistance needs a number" , maxDistance.isNumber() );
- uassert( 13320 , "search needs to be an object" , search.type() == Object );
+ uassert(13318, "near needs to be an array", near.isABSONObj());
+ uassert(13319, "maxDistance needs a number", maxDistance.isNumber());
+ uassert(13320, "search needs to be an object", search.type() == Object);
unsigned limit = 50;
- if ( cmdObj["limit"].isNumber() )
- limit = (unsigned)cmdObj["limit"].numberInt();
-
- si->searchCommand( d , idxNum , n.Obj() , maxDistance.numberDouble() , search.Obj() , result , limit );
+ if (cmdObj["limit"].isNumber())
+ limit = static_cast<unsigned>(cmdObj["limit"].numberInt());
+ si->searchCommand(nsd, idxNum, near.Obj(), maxDistance.numberDouble(), search.Obj(),
+ result, limit);
return 1;
}
-
} nameSearchCommand;
-
-
-
-
-
}
diff --git a/src/mongo/db/geo/shapes.cpp b/src/mongo/db/geo/shapes.cpp
new file mode 100644
index 00000000000..dab6ede2a9d
--- /dev/null
+++ b/src/mongo/db/geo/shapes.cpp
@@ -0,0 +1,459 @@
+/**
+* Copyright (C) 2008-2012 10gen Inc.
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU Affero General Public License, version 3,
+* as published by the Free Software Foundation.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU Affero General Public License for more details.
+*
+* You should have received a copy of the GNU Affero General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#include "mongo/pch.h"
+#include "mongo/db/jsobj.h"
+#include "mongo/db/geo/core.h"
+#include "mongo/db/geo/shapes.h"
+#include "mongo/util/mongoutils/str.h"
+
+// So we can get at the str namespace.
+using namespace mongoutils;
+
+namespace mongo {
+
+////////////// Point
+
+ Point::Point() : x(0), y(0) { }
+
+ Point::Point(double x, double y) : x(x), y(y) { }
+
+ Point::Point(const BSONElement& e) {
+ BSONObjIterator i(e.Obj());
+ x = i.next().number();
+ y = i.next().number();
+ }
+
+ Point::Point(const BSONObj& o) {
+ BSONObjIterator i(o);
+ x = i.next().number();
+ y = i.next().number();
+ }
+
+ string Point::toString() const {
+ StringBuilder buf;
+ buf << "(" << x << "," << y << ")";
+ return buf.str();
+ }
+
+
+////////////// Box
+
+ Box::Box() {}
+
+ Box::Box(double x, double y, double size) : _min(x, y), _max(x + size, y + size) { }
+
+ Box::Box(Point min, Point max) : _min(min), _max(max) { }
+
+ BSONArray Box::toBSON() const {
+ return BSON_ARRAY(BSON_ARRAY(_min.x << _min.y) << BSON_ARRAY(_max.x << _max.y));
+ }
+
+ string Box::toString() const {
+ StringBuilder buf;
+ buf << _min.toString() << " -->> " << _max.toString();
+ return buf.str();
+ }
+
+ bool Box::between(double min, double max, double val, double fudge) const {
+ return val + fudge >= min && val <= max + fudge;
+ }
+
+ bool Box::onBoundary(double bound, double val, double fudge) const {
+ return (val >= bound - fudge && val <= bound + fudge);
+ }
+
+ bool Box::mid(double amin, double amax,
+ double bmin, double bmax, bool min, double* res) const {
+ verify(amin <= amax);
+ verify(bmin <= bmax);
+
+ if (amin < bmin) {
+ if (amax < bmin)
+ return false;
+ *res = min ? bmin : amax;
+ return true;
+ }
+ if (amin > bmax)
+ return false;
+ *res = min ? amin : bmax;
+ return true;
+ }
+
+ double Box::intersects(const Box& other) const {
+ Point boundMin(0,0);
+ Point boundMax(0,0);
+
+ if (!mid(_min.x, _max.x, other._min.x, other._max.x, true, &boundMin.x) ||
+ !mid(_min.x, _max.x, other._min.x, other._max.x, false, &boundMax.x) ||
+ !mid(_min.y, _max.y, other._min.y, other._max.y, true, &boundMin.y) ||
+ !mid(_min.y, _max.y, other._min.y, other._max.y, false, &boundMax.y))
+ return 0;
+
+ Box intersection(boundMin, boundMax);
+ return intersection.area() / area();
+ }
+
+ double Box::area() const {
+ return (_max.x - _min.x) * (_max.y - _min.y);
+ }
+
+ double Box::maxDim() const {
+ return max(_max.x - _min.x, _max.y - _min.y);
+ }
+
+ Point Box::center() const {
+ return Point((_min.x + _max.x) / 2,
+ (_min.y + _max.y) / 2);
+ }
+
+ void Box::truncate(double min, double max) {
+ if (_min.x < min) _min.x = min;
+ if (_min.y < min) _min.y = min;
+ if (_max.x > max) _max.x = max;
+ if (_max.y > max) _max.y = max;
+ }
+
+ void Box::fudge(double error) {
+ _min.x -= error;
+ _min.y -= error;
+ _max.x += error;
+ _max.y += error;
+ }
+
+ bool Box::onBoundary(Point p, double fudge) {
+ return onBoundary(_min.x, p.x, fudge) ||
+ onBoundary(_max.x, p.x, fudge) ||
+ onBoundary(_min.y, p.y, fudge) ||
+ onBoundary(_max.y, p.y, fudge);
+ }
+
+ bool Box::inside(Point p, double fudge) {
+ bool res = inside(p.x, p.y, fudge);
+ return res;
+ }
+
+ bool Box::inside(double x, double y, double fudge) {
+ return between(_min.x, _max.x , x, fudge) &&
+ between(_min.y, _max.y , y, fudge);
+ }
+
+ bool Box::contains(const Box& other, double fudge) {
+ return inside(other._min, fudge) && inside(other._max, fudge);
+ }
+
+////////////// Polygon
+
+ Polygon::Polygon(void) : _centroidCalculated(false), _boundsCalculated(false) {}
+
+ Polygon::Polygon(vector<Point> points) : _centroidCalculated(false),
+ _boundsCalculated(false), _points(points) { }
+
+ void Polygon::add(Point p) {
+ _centroidCalculated = false;
+ _boundsCalculated = false;
+ _points.push_back(p);
+ }
+
+ int Polygon::size(void) const { return _points.size(); }
+
+ bool Polygon::contains(const Point& p) const { return contains(p, 0) > 0; }
+
+ /*
+ * Return values:
+ * -1 if no intersection
+ * 0 if maybe an intersection (using fudge)
+ * 1 if there is an intersection
+ *
+ * A ray casting intersection method is used.
+ */
+ int Polygon::contains(const Point &p, double fudge) const {
+ Box fudgeBox(Point(p.x - fudge, p.y - fudge), Point(p.x + fudge, p.y + fudge));
+
+ int counter = 0;
+ Point p1 = _points[0];
+ for (int i = 1; i <= size(); i++) {
+ // XXX: why is there a mod here?
+ Point p2 = _points[i % size()];
+
+ GEODEBUG("Doing intersection check of " << fudgeBox.toString()
+ << " with seg " << p1.toString() << " to " << p2.toString());
+
+ // We need to check whether or not this segment intersects our error box
+ if (fudge > 0 &&
+ // Points not too far below box
+ fudgeBox._min.y <= std::max(p1.y, p2.y) &&
+ // Points not too far above box
+ fudgeBox._max.y >= std::min(p1.y, p2.y) &&
+ // Points not too far to left of box
+ fudgeBox._min.x <= std::max(p1.x, p2.x) &&
+ // Points not too far to right of box
+ fudgeBox._max.x >= std::min(p1.x, p2.x)) {
+
+ GEODEBUG("Doing detailed check");
+
+ // If our box contains one or more of these points, we need to do an exact
+ // check.
+ if (fudgeBox.inside(p1)) {
+ GEODEBUG("Point 1 inside");
+ return 0;
+ }
+ if (fudgeBox.inside(p2)) {
+ GEODEBUG("Point 2 inside");
+ return 0;
+ }
+
+ // Do intersection check for vertical sides
+ if (p1.y != p2.y) {
+ double invSlope = (p2.x - p1.x) / (p2.y - p1.y);
+
+ double xintersT = (fudgeBox._max.y - p1.y) * invSlope + p1.x;
+ if (fudgeBox._min.x <= xintersT && fudgeBox._max.x >= xintersT) {
+ GEODEBUG("Top intersection @ " << xintersT);
+ return 0;
+ }
+
+ double xintersB = (fudgeBox._min.y - p1.y) * invSlope + p1.x;
+ if (fudgeBox._min.x <= xintersB && fudgeBox._max.x >= xintersB) {
+ GEODEBUG("Bottom intersection @ " << xintersB);
+ return 0;
+ }
+ }
+
+ // Do intersection check for horizontal sides
+ if (p1.x != p2.x) {
+ double slope = (p2.y - p1.y) / (p2.x - p1.x);
+
+ double yintersR = (p1.x - fudgeBox._max.x) * slope + p1.y;
+ if (fudgeBox._min.y <= yintersR && fudgeBox._max.y >= yintersR) {
+ GEODEBUG("Right intersection @ " << yintersR);
+ return 0;
+ }
+
+ double yintersL = (p1.x - fudgeBox._min.x) * slope + p1.y;
+ if (fudgeBox._min.y <= yintersL && fudgeBox._max.y >= yintersL) {
+ GEODEBUG("Left intersection @ " << yintersL);
+ return 0;
+ }
+ }
+ } else if (fudge == 0){
+ // If this is an exact vertex, we won't intersect, so check this
+ if (p.y == p1.y && p.x == p1.x) return 1;
+ else if (p.y == p2.y && p.x == p2.x) return 1;
+
+ // If this is a horizontal line we won't intersect, so check this
+ if (p1.y == p2.y && p.y == p1.y){
+ // Check that the x-coord lies in the line
+ if (p.x >= std::min(p1.x, p2.x) && p.x <= std::max(p1.x, p2.x))
+ return 1;
+ }
+ }
+
+ // Normal intersection test.
+ // TODO: Invert these for clearer logic?
+ if (p.y > std::min(p1.y, p2.y)) {
+ if (p.y <= std::max(p1.y, p2.y)) {
+ if (p.x <= std::max(p1.x, p2.x)) {
+ if (p1.y != p2.y) {
+ double xinters = (p.y-p1.y)*(p2.x-p1.x)/(p2.y-p1.y)+p1.x;
+ // Special case of point on vertical line
+ if (p1.x == p2.x && p.x == p1.x){
+
+ // Need special case for the vertical edges, for example:
+ // 1) \e pe/----->
+ // vs.
+ // 2) \ep---e/----->
+ //
+ // if we count exact as intersection, then 1 is in but 2 is out
+ // if we count exact as no-int then 1 is out but 2 is in.
+
+ return 1;
+ } else if (p1.x == p2.x || p.x <= xinters) {
+ counter++;
+ }
+ }
+ }
+ }
+ }
+
+ p1 = p2;
+ }
+
+ if (counter % 2 == 0) {
+ return -1;
+ } else {
+ return 1;
+ }
+ }
+
+ Point Polygon::centroid(void) {
+ /* Centroid is cached, it won't change betwen points */
+ if (_centroidCalculated) {
+ return _centroid;
+ }
+
+ Point cent;
+ double signedArea = 0.0;
+ double area = 0.0; // Partial signed area
+
+ /// For all vertices except last
+ int i = 0;
+ for (i = 0; i < size() - 1; ++i) {
+ area = _points[i].x * _points[i+1].y - _points[i+1].x * _points[i].y ;
+ signedArea += area;
+ cent.x += (_points[i].x + _points[i+1].x) * area;
+ cent.y += (_points[i].y + _points[i+1].y) * area;
+ }
+
+ // Do last vertex
+ area = _points[i].x * _points[0].y - _points[0].x * _points[i].y;
+ cent.x += (_points[i].x + _points[0].x) * area;
+ cent.y += (_points[i].y + _points[0].y) * area;
+ signedArea += area;
+ signedArea *= 0.5;
+ cent.x /= (6 * signedArea);
+ cent.y /= (6 * signedArea);
+
+ _centroidCalculated = true;
+ _centroid = cent;
+
+ return cent;
+ }
+
+ Box Polygon::bounds(void) {
+ if (_boundsCalculated) {
+ return _bounds;
+ }
+
+ _bounds._max = _points[0];
+ _bounds._min = _points[0];
+
+ for (int i = 1; i < size(); i++) {
+ _bounds._max.x = max(_bounds._max.x, _points[i].x);
+ _bounds._max.y = max(_bounds._max.y, _points[i].y);
+ _bounds._min.x = min(_bounds._min.x, _points[i].x);
+ _bounds._min.y = min(_bounds._min.y, _points[i].y);
+
+ }
+
+ _boundsCalculated = true;
+ return _bounds;
+ }
+
+ /////// Other methods
+
+ // TODO(hk): is this really worthwhile?
+ /**
+ * Distance method that compares x or y coords when other direction is zero,
+ * avoids numerical error when distances are very close to radius but axis-aligned.
+ *
+ * An example of the problem is:
+ * (52.0 - 51.9999) - 0.0001 = 3.31965e-15 and 52.0 - 51.9999 > 0.0001 in double arithmetic
+ * but:
+ * 51.9999 + 0.0001 <= 52.0
+ *
+ * This avoids some (but not all!) suprising results in $center queries where points are
+ * (radius + center.x, center.y) or vice-versa.
+ */
+ bool distanceWithin(const Point &p1, const Point &p2, double radius) {
+ double a = p2.x - p1.x;
+ double b = p2.y - p1.y;
+
+ if (a == 0) {
+ //
+ // Note: For some, unknown reason, when a 32-bit g++ optimizes this call, the sum is
+ // calculated imprecisely. We need to force the compiler to always evaluate it
+ // correctly, hence the weirdness.
+ //
+ // On some 32-bit linux machines, removing the volatile keyword or calculating the sum
+ // inline will make certain geo tests fail. Of course this check will force volatile
+ // for all 32-bit systems, not just affected systems.
+ if (sizeof(void*) <= 4){
+ volatile double sum = p2.y > p1.y ? p1.y + radius : p2.y + radius;
+ return p2.y > p1.y ? sum >= p2.y : sum >= p1.y;
+ } else {
+ // Original math, correct for most systems
+ return p2.y > p1.y ? p1.y + radius >= p2.y : p2.y + radius >= p1.y;
+ }
+ }
+
+ if (b == 0) {
+ if (sizeof(void*) <= 4){
+ volatile double sum = p2.x > p1.x ? p1.x + radius : p2.x + radius;
+ return p2.x > p1.x ? sum >= p2.x : sum >= p1.x;
+ } else {
+ return p2.x > p1.x ? p1.x + radius >= p2.x : p2.x + radius >= p1.x;
+ }
+ }
+
+ return sqrt((a * a) + (b * b)) <= radius;
+ }
+
+ // Technically lat/long bounds, not really tied to earth radius.
+ void checkEarthBounds(const Point &p) {
+ uassert(14808, str::stream() << "point " << p.toString()
+ << " must be in earth-like bounds of long "
+ << ": [-180, 180], lat : [-90, 90] ",
+ p.x >= -180 && p.x <= 180 && p.y >= -90 && p.y <= 90);
+ }
+
+
+ // WARNING: x and y MUST be longitude and latitude in that order
+ // note: multiply by earth radius for distance
+ double spheredist_rad(const Point& p1, const Point& p2) {
+ // this uses the n-vector formula: http://en.wikipedia.org/wiki/N-vector
+ // If you try to match the code to the formula, note that I inline the cross-product.
+
+ double sinx1(sin(p1.x)), cosx1(cos(p1.x));
+ double siny1(sin(p1.y)), cosy1(cos(p1.y));
+ double sinx2(sin(p2.x)), cosx2(cos(p2.x));
+ double siny2(sin(p2.y)), cosy2(cos(p2.y));
+
+ double cross_prod =
+ (cosy1*cosx1 * cosy2*cosx2) +
+ (cosy1*sinx1 * cosy2*sinx2) +
+ (siny1 * siny2);
+
+ if (cross_prod >= 1 || cross_prod <= -1) {
+ // fun with floats
+ verify(fabs(cross_prod)-1 < 1e-6);
+ return cross_prod > 0 ? 0 : M_PI;
+ }
+
+ return acos(cross_prod);
+ }
+
+ // @param p1 A point on the sphere where x and y are degrees.
+ // @param p2 A point on the sphere where x and y are degrees.
+ // @return The distance between the two points in RADIANS. Multiply by radius to get arc
+ // length.
+ double spheredist_deg(const Point& p1, const Point& p2) {
+ return spheredist_rad(Point(deg2rad(p1.x), deg2rad(p1.y)),
+ Point(deg2rad(p2.x), deg2rad(p2.y)));
+ }
+
+ double distance(const Point& p1, const Point &p2) {
+ double a = p1.x - p2.x;
+ double b = p1.y - p2.y;
+
+ // Avoid numerical error if possible...
+ // TODO(hk): not convinced this is worth it
+ if (a == 0) return abs(b);
+ if (b == 0) return abs(a);
+
+ return sqrt((a * a) + (b * b));
+ }
+} // namespace mongo
diff --git a/src/mongo/db/geo/shapes.h b/src/mongo/db/geo/shapes.h
new file mode 100644
index 00000000000..ee7451d1c05
--- /dev/null
+++ b/src/mongo/db/geo/shapes.h
@@ -0,0 +1,104 @@
+/**
+* Copyright (C) 2008-2012 10gen Inc.
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU Affero General Public License, version 3,
+* as published by the Free Software Foundation.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU Affero General Public License for more details.
+*
+* You should have received a copy of the GNU Affero General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#pragma once
+
+#include "mongo/pch.h"
+#include "mongo/db/jsobj.h"
+
+namespace mongo {
+
+ // TODO(hk): move these distance things to something like geomath.h
+ struct Point;
+ double distance(const Point& p1, const Point &p2);
+ bool distanceWithin(const Point &p1, const Point &p2, double radius);
+ void checkEarthBounds(const Point &p);
+ double spheredist_rad(const Point& p1, const Point& p2);
+ double spheredist_deg(const Point& p1, const Point& p2);
+
+ struct Point {
+ Point();
+ Point(double x, double y);
+ explicit Point(const BSONElement& e);
+ explicit Point(const BSONObj& o);
+ string toString() const;
+
+ double x;
+ double y;
+ };
+
+ class Box {
+ public:
+ Box();
+ Box(double x, double y, double size);
+ Box(Point min, Point max);
+
+ BSONArray toBSON() const;
+ string toString() const;
+
+ bool between(double min, double max, double val, double fudge = 0) const;
+ bool onBoundary(double bound, double val, double fudge = 0) const;
+ // TODO(hk) comment
+ bool mid(double amin, double amax, double bmin, double bmax, bool min, double* res) const;
+
+ // TODO(hk): What is the return here? comment
+ double intersects(const Box& other) const;
+ double area() const;
+ double maxDim() const;
+ Point center() const;
+ void truncate(double min, double max);
+ void fudge(double error);
+ bool onBoundary(Point p, double fudge = 0);
+ bool inside(Point p, double fudge = 0);
+ bool inside(double x, double y, double fudge = 0);
+ bool contains(const Box& other, double fudge = 0);
+ // TODO(hk): This could be private and Polygon could be our friend, or we could
+ // have getters/setters (lots of code change).
+ Point _min;
+ Point _max;
+ };
+
+ class Polygon {
+ public:
+ Polygon();
+ Polygon(vector<Point> points);
+
+ void add(Point p);
+ int size() const;
+
+ bool contains(const Point& p) const;
+
+ /*
+ * Return values:
+ * -1 if no intersection
+ * 0 if maybe an intersection (using fudge)
+ * 1 if there is an intersection
+ */
+ int contains(const Point &p, double fudge) const;
+
+ /**
+ * Calculate the centroid, or center of mass of the polygon object.
+ */
+ Point centroid();
+ Box bounds();
+ private:
+ bool _centroidCalculated;
+ Point _centroid;
+ Box _bounds;
+ bool _boundsCalculated;
+ vector<Point> _points;
+ };
+} // namespace mongo