summaryrefslogtreecommitdiff
path: root/src/mongo/db/geo
diff options
context:
space:
mode:
Diffstat (limited to 'src/mongo/db/geo')
-rw-r--r--src/mongo/db/geo/big_polygon.cpp298
-rw-r--r--src/mongo/db/geo/big_polygon.h102
-rw-r--r--src/mongo/db/geo/big_polygon_test.cpp1010
-rw-r--r--src/mongo/db/geo/geoconstants.h6
-rw-r--r--src/mongo/db/geo/geometry_container.cpp1901
-rw-r--r--src/mongo/db/geo/geometry_container.h245
-rw-r--r--src/mongo/db/geo/geoparser.cpp1228
-rw-r--r--src/mongo/db/geo/geoparser.h111
-rw-r--r--src/mongo/db/geo/geoparser_test.cpp690
-rw-r--r--src/mongo/db/geo/hash.cpp1491
-rw-r--r--src/mongo/db/geo/hash.h453
-rw-r--r--src/mongo/db/geo/hash_test.cpp766
-rw-r--r--src/mongo/db/geo/haystack.cpp137
-rw-r--r--src/mongo/db/geo/r2_region_coverer.cpp446
-rw-r--r--src/mongo/db/geo/r2_region_coverer.h217
-rw-r--r--src/mongo/db/geo/r2_region_coverer_test.cpp1117
-rw-r--r--src/mongo/db/geo/s2.h1
-rw-r--r--src/mongo/db/geo/shapes.cpp1290
-rw-r--r--src/mongo/db/geo/shapes.h583
19 files changed, 6102 insertions, 5990 deletions
diff --git a/src/mongo/db/geo/big_polygon.cpp b/src/mongo/db/geo/big_polygon.cpp
index fb496bfa96e..f50bdf1ae37 100644
--- a/src/mongo/db/geo/big_polygon.cpp
+++ b/src/mongo/db/geo/big_polygon.cpp
@@ -35,197 +35,191 @@
namespace mongo {
- using std::unique_ptr;
- using std::vector;
+using std::unique_ptr;
+using std::vector;
- BigSimplePolygon::BigSimplePolygon() {
- }
-
- // Caller should ensure loop is valid.
- BigSimplePolygon::BigSimplePolygon(S2Loop* loop) :
- _loop(loop), _isNormalized(loop->IsNormalized()) {
- }
+BigSimplePolygon::BigSimplePolygon() {}
- BigSimplePolygon::~BigSimplePolygon() {
- }
+// Caller should ensure loop is valid.
+BigSimplePolygon::BigSimplePolygon(S2Loop* loop)
+ : _loop(loop), _isNormalized(loop->IsNormalized()) {}
- void BigSimplePolygon::Init(S2Loop* loop) {
- _loop.reset(loop);
- _isNormalized = loop->IsNormalized();
- _borderLine.reset();
- _borderPoly.reset();
- }
+BigSimplePolygon::~BigSimplePolygon() {}
- double BigSimplePolygon::GetArea() const {
- return _loop->GetArea();
- }
+void BigSimplePolygon::Init(S2Loop* loop) {
+ _loop.reset(loop);
+ _isNormalized = loop->IsNormalized();
+ _borderLine.reset();
+ _borderPoly.reset();
+}
- bool BigSimplePolygon::Contains(const S2Polygon& polygon) const {
- const S2Polygon& polyBorder = GetPolygonBorder();
+double BigSimplePolygon::GetArea() const {
+ return _loop->GetArea();
+}
- if (_isNormalized) {
- // Polygon border is the same as the loop
- return polyBorder.Contains(&polygon);
- }
+bool BigSimplePolygon::Contains(const S2Polygon& polygon) const {
+ const S2Polygon& polyBorder = GetPolygonBorder();
- // Polygon border is the complement of the loop
- //
- // Return true iff big polygon's complement (polyBorder) doesn't intersect with polygon.
- // We don't guarantee whether the points on border are contained or not.
- return !polyBorder.Intersects(&polygon);
+ if (_isNormalized) {
+ // Polygon border is the same as the loop
+ return polyBorder.Contains(&polygon);
}
- bool BigSimplePolygon::Contains(const S2Polyline& line) const {
- //
- // A line is contained within a loop if the result of subtracting the loop from the line is
- // nothing.
- //
- // Also, a line is contained within a loop if the result of clipping the line to the
- // complement of the loop is nothing.
- //
- // If we can't subtract the loop itself using S2, we clip (intersect) to the inverse. Every
- // point in S2 is contained in exactly one of these loops.
- //
- // TODO: Polygon borders are actually kind of weird, and this is somewhat inconsistent with
- // Intersects(). A point might Intersect() a boundary exactly, but not be Contain()ed
- // within the Polygon. Think the right thing to do here is custom intersection functions.
- //
- const S2Polygon& polyBorder = GetPolygonBorder();
+ // Polygon border is the complement of the loop
+ //
+ // Return true iff big polygon's complement (polyBorder) doesn't intersect with polygon.
+ // We don't guarantee whether the points on border are contained or not.
+ return !polyBorder.Intersects(&polygon);
+}
- OwnedPointerVector<S2Polyline> clippedOwned;
- vector<S2Polyline*>& clipped = clippedOwned.mutableVector();
-
- if (_isNormalized) {
- // Polygon border is the same as the loop
- polyBorder.SubtractFromPolyline(&line, &clipped);
- return clipped.size() == 0;
- }
- else {
- // Polygon border is the complement of the loop
- polyBorder.IntersectWithPolyline(&line, &clipped);
- return clipped.size() == 0;
- }
+bool BigSimplePolygon::Contains(const S2Polyline& line) const {
+ //
+ // A line is contained within a loop if the result of subtracting the loop from the line is
+ // nothing.
+ //
+ // Also, a line is contained within a loop if the result of clipping the line to the
+ // complement of the loop is nothing.
+ //
+ // If we can't subtract the loop itself using S2, we clip (intersect) to the inverse. Every
+ // point in S2 is contained in exactly one of these loops.
+ //
+ // TODO: Polygon borders are actually kind of weird, and this is somewhat inconsistent with
+ // Intersects(). A point might Intersect() a boundary exactly, but not be Contain()ed
+ // within the Polygon. Think the right thing to do here is custom intersection functions.
+ //
+ const S2Polygon& polyBorder = GetPolygonBorder();
+
+ OwnedPointerVector<S2Polyline> clippedOwned;
+ vector<S2Polyline*>& clipped = clippedOwned.mutableVector();
+
+ if (_isNormalized) {
+ // Polygon border is the same as the loop
+ polyBorder.SubtractFromPolyline(&line, &clipped);
+ return clipped.size() == 0;
+ } else {
+ // Polygon border is the complement of the loop
+ polyBorder.IntersectWithPolyline(&line, &clipped);
+ return clipped.size() == 0;
}
+}
- bool BigSimplePolygon::Contains(S2Point const& point) const {
- return _loop->Contains(point);
- }
+bool BigSimplePolygon::Contains(S2Point const& point) const {
+ return _loop->Contains(point);
+}
- bool BigSimplePolygon::Intersects(const S2Polygon& polygon) const {
- // If the loop area is at most 2*Pi, treat it as a simple Polygon.
- if (_isNormalized) {
- const S2Polygon& polyBorder = GetPolygonBorder();
- return polyBorder.Intersects(&polygon);
- }
-
- // The loop area is greater than 2*Pi, so it intersects a polygon (even with holes) if it
- // intersects any of the top-level polygon loops, since any valid polygon is less than
- // a hemisphere.
- //
- // Intersecting a polygon hole requires that the loop must have intersected the containing
- // loop - topology ftw.
- //
- // Another approach is to check polyBorder doesn't contain polygon, but the following
- // approach is cheaper.
-
- // Iterate over all the top-level polygon loops
- for (int i = 0; i < polygon.num_loops(); i = polygon.GetLastDescendant(i) + 1) {
- const S2Loop* polyLoop = polygon.loop(i);
- if (_loop->Intersects(polyLoop))
- return true;
- }
-
- return false;
+bool BigSimplePolygon::Intersects(const S2Polygon& polygon) const {
+ // If the loop area is at most 2*Pi, treat it as a simple Polygon.
+ if (_isNormalized) {
+ const S2Polygon& polyBorder = GetPolygonBorder();
+ return polyBorder.Intersects(&polygon);
}
- bool BigSimplePolygon::Intersects(const S2Polyline& line) const {
- //
- // A loop intersects a line if line intersects the loop border or, if it doesn't, either
- // line is contained in the loop, or line is disjoint with the loop. So checking any
- // vertex of the line is sufficient.
- //
- // TODO: Make a general Polygon/Line relation tester which uses S2 primitives
- //
- return GetLineBorder().Intersects(&line) || _loop->Contains(line.vertex(0));
- }
+ // The loop area is greater than 2*Pi, so it intersects a polygon (even with holes) if it
+ // intersects any of the top-level polygon loops, since any valid polygon is less than
+ // a hemisphere.
+ //
+ // Intersecting a polygon hole requires that the loop must have intersected the containing
+ // loop - topology ftw.
+ //
+ // Another approach is to check polyBorder doesn't contain polygon, but the following
+ // approach is cheaper.
- bool BigSimplePolygon::Intersects(S2Point const& point) const {
- return Contains(point);
+ // Iterate over all the top-level polygon loops
+ for (int i = 0; i < polygon.num_loops(); i = polygon.GetLastDescendant(i) + 1) {
+ const S2Loop* polyLoop = polygon.loop(i);
+ if (_loop->Intersects(polyLoop))
+ return true;
}
- void BigSimplePolygon::Invert() {
- _loop->Invert();
- _isNormalized = _loop->IsNormalized();
- }
+ return false;
+}
- const S2Polygon& BigSimplePolygon::GetPolygonBorder() const {
- if (_borderPoly)
- return *_borderPoly;
+bool BigSimplePolygon::Intersects(const S2Polyline& line) const {
+ //
+ // A loop intersects a line if line intersects the loop border or, if it doesn't, either
+ // line is contained in the loop, or line is disjoint with the loop. So checking any
+ // vertex of the line is sufficient.
+ //
+ // TODO: Make a general Polygon/Line relation tester which uses S2 primitives
+ //
+ return GetLineBorder().Intersects(&line) || _loop->Contains(line.vertex(0));
+}
- unique_ptr<S2Loop> cloned(_loop->Clone());
+bool BigSimplePolygon::Intersects(S2Point const& point) const {
+ return Contains(point);
+}
- // Any loop in polygon should be than a hemisphere (2*Pi).
- cloned->Normalize();
+void BigSimplePolygon::Invert() {
+ _loop->Invert();
+ _isNormalized = _loop->IsNormalized();
+}
- OwnedPointerVector<S2Loop> loops;
- loops.mutableVector().push_back(cloned.release());
- _borderPoly.reset(new S2Polygon(&loops.mutableVector()));
+const S2Polygon& BigSimplePolygon::GetPolygonBorder() const {
+ if (_borderPoly)
return *_borderPoly;
- }
- const S2Polyline& BigSimplePolygon::GetLineBorder() const {
- if (_borderLine)
- return *_borderLine;
+ unique_ptr<S2Loop> cloned(_loop->Clone());
- vector<S2Point> points;
- int numVertices = _loop->num_vertices();
- for (int i = 0; i <= numVertices; ++i) {
- // vertex() maps "numVertices" to 0 internally, so we don't have to deal with
- // the index out of range.
- points.push_back(_loop->vertex(i));
- }
+ // Any loop in polygon should be than a hemisphere (2*Pi).
+ cloned->Normalize();
- _borderLine.reset(new S2Polyline(points));
+ OwnedPointerVector<S2Loop> loops;
+ loops.mutableVector().push_back(cloned.release());
+ _borderPoly.reset(new S2Polygon(&loops.mutableVector()));
+ return *_borderPoly;
+}
+const S2Polyline& BigSimplePolygon::GetLineBorder() const {
+ if (_borderLine)
return *_borderLine;
- }
- BigSimplePolygon* BigSimplePolygon::Clone() const {
- return new BigSimplePolygon(_loop->Clone());
+ vector<S2Point> points;
+ int numVertices = _loop->num_vertices();
+ for (int i = 0; i <= numVertices; ++i) {
+ // vertex() maps "numVertices" to 0 internally, so we don't have to deal with
+ // the index out of range.
+ points.push_back(_loop->vertex(i));
}
- S2Cap BigSimplePolygon::GetCapBound() const {
- return _loop->GetCapBound();
- }
+ _borderLine.reset(new S2Polyline(points));
- S2LatLngRect BigSimplePolygon::GetRectBound() const {
- return _loop->GetRectBound();
- }
+ return *_borderLine;
+}
- bool BigSimplePolygon::Contains(const S2Cell& cell) const {
- return _loop->Contains(cell);
- }
+BigSimplePolygon* BigSimplePolygon::Clone() const {
+ return new BigSimplePolygon(_loop->Clone());
+}
- bool BigSimplePolygon::MayIntersect(const S2Cell& cell) const {
- return _loop->MayIntersect(cell);
- }
+S2Cap BigSimplePolygon::GetCapBound() const {
+ return _loop->GetCapBound();
+}
- bool BigSimplePolygon::VirtualContainsPoint(const S2Point& p) const {
- return _loop->VirtualContainsPoint(p);
- }
+S2LatLngRect BigSimplePolygon::GetRectBound() const {
+ return _loop->GetRectBound();
+}
- void BigSimplePolygon::Encode(Encoder* const encoder) const {
- invariant(false);
- }
+bool BigSimplePolygon::Contains(const S2Cell& cell) const {
+ return _loop->Contains(cell);
+}
- bool BigSimplePolygon::Decode(Decoder* const decoder) {
- invariant(false);
- }
+bool BigSimplePolygon::MayIntersect(const S2Cell& cell) const {
+ return _loop->MayIntersect(cell);
+}
- bool BigSimplePolygon::DecodeWithinScope(Decoder* const decoder) {
- invariant(false);
- }
+bool BigSimplePolygon::VirtualContainsPoint(const S2Point& p) const {
+ return _loop->VirtualContainsPoint(p);
+}
+
+void BigSimplePolygon::Encode(Encoder* const encoder) const {
+ invariant(false);
+}
+bool BigSimplePolygon::Decode(Decoder* const decoder) {
+ invariant(false);
}
+bool BigSimplePolygon::DecodeWithinScope(Decoder* const decoder) {
+ invariant(false);
+}
+}
diff --git a/src/mongo/db/geo/big_polygon.h b/src/mongo/db/geo/big_polygon.h
index c5a913ac05b..9551ecb4b8f 100644
--- a/src/mongo/db/geo/big_polygon.h
+++ b/src/mongo/db/geo/big_polygon.h
@@ -40,82 +40,78 @@
namespace mongo {
- // Simple GeoJSON polygon with a custom CRS identifier as having a strict winding order.
- // The winding order will determine unambiguously the inside/outside of the polygon even
- // if larger than one hemisphere.
- //
- // BigSimplePolygon uses S2Loop internally, which follows a left-foot rule (inside to the
- // left when walking the edge of the polygon, counter-clockwise)
- class BigSimplePolygon : public S2Region {
- public:
-
- BigSimplePolygon();
-
- BigSimplePolygon(S2Loop* loop);
-
- virtual ~BigSimplePolygon();
-
- void Init(S2Loop* loop);
+// Simple GeoJSON polygon with a custom CRS identifier as having a strict winding order.
+// The winding order will determine unambiguously the inside/outside of the polygon even
+// if larger than one hemisphere.
+//
+// BigSimplePolygon uses S2Loop internally, which follows a left-foot rule (inside to the
+// left when walking the edge of the polygon, counter-clockwise)
+class BigSimplePolygon : public S2Region {
+public:
+ BigSimplePolygon();
- double GetArea() const;
+ BigSimplePolygon(S2Loop* loop);
- bool Contains(const S2Polygon& polygon) const;
+ virtual ~BigSimplePolygon();
- bool Contains(const S2Polyline& line) const;
+ void Init(S2Loop* loop);
- // Needs to be this way for S2 compatibility
- bool Contains(S2Point const& point) const;
+ double GetArea() const;
- bool Intersects(const S2Polygon& polygon) const;
+ bool Contains(const S2Polygon& polygon) const;
- bool Intersects(const S2Polyline& line) const;
+ bool Contains(const S2Polyline& line) const;
- bool Intersects(S2Point const& point) const;
+ // Needs to be this way for S2 compatibility
+ bool Contains(S2Point const& point) const;
- // Only used in tests
- void Invert();
+ bool Intersects(const S2Polygon& polygon) const;
- const S2Polygon& GetPolygonBorder() const;
+ bool Intersects(const S2Polyline& line) const;
- const S2Polyline& GetLineBorder() const;
+ bool Intersects(S2Point const& point) const;
- //
- // S2Region interface
- //
+ // Only used in tests
+ void Invert();
- BigSimplePolygon* Clone() const;
+ const S2Polygon& GetPolygonBorder() const;
- S2Cap GetCapBound() const;
+ const S2Polyline& GetLineBorder() const;
- S2LatLngRect GetRectBound() const;
+ //
+ // S2Region interface
+ //
- bool Contains(S2Cell const& cell) const;
+ BigSimplePolygon* Clone() const;
- bool MayIntersect(S2Cell const& cell) const;
+ S2Cap GetCapBound() const;
- bool VirtualContainsPoint(S2Point const& p) const;
+ S2LatLngRect GetRectBound() const;
- void Encode(Encoder* const encoder) const;
+ bool Contains(S2Cell const& cell) const;
- bool Decode(Decoder* const decoder);
+ bool MayIntersect(S2Cell const& cell) const;
- bool DecodeWithinScope(Decoder* const decoder);
+ bool VirtualContainsPoint(S2Point const& p) const;
- private:
+ void Encode(Encoder* const encoder) const;
- std::unique_ptr<S2Loop> _loop;
+ bool Decode(Decoder* const decoder);
- // Cache whether the loop area is at most 2*Pi (the area of hemisphere).
- //
- // S2 guarantees that any loop in a valid (normalized) polygon, no matter a hole
- // or a shell, has to be less than 2*Pi. So if the loop is normalized, it's the same
- // with the border polygon, otherwise, the border polygon is its complement.
- bool _isNormalized;
+ bool DecodeWithinScope(Decoder* const decoder);
- // Cached to do Intersects() and Contains() with S2Polylines.
- mutable std::unique_ptr<S2Polyline> _borderLine;
- mutable std::unique_ptr<S2Polygon> _borderPoly;
- };
+private:
+ std::unique_ptr<S2Loop> _loop;
+ // Cache whether the loop area is at most 2*Pi (the area of hemisphere).
+ //
+ // S2 guarantees that any loop in a valid (normalized) polygon, no matter a hole
+ // or a shell, has to be less than 2*Pi. So if the loop is normalized, it's the same
+ // with the border polygon, otherwise, the border polygon is its complement.
+ bool _isNormalized;
+
+ // Cached to do Intersects() and Contains() with S2Polylines.
+ mutable std::unique_ptr<S2Polyline> _borderLine;
+ mutable std::unique_ptr<S2Polygon> _borderPoly;
+};
}
-
diff --git a/src/mongo/db/geo/big_polygon_test.cpp b/src/mongo/db/geo/big_polygon_test.cpp
index c0c01abdba7..3ac82b03768 100644
--- a/src/mongo/db/geo/big_polygon_test.cpp
+++ b/src/mongo/db/geo/big_polygon_test.cpp
@@ -34,562 +34,534 @@
namespace {
- using namespace mongo;
- using std::unique_ptr;
- using std::string;
- using std::vector;
-
- // Helper to build a vector of S2Point
- struct PointBuilder {
-
- vector<S2Point> points;
-
- PointBuilder& operator<<(const S2LatLng& LatLng) {
- points.push_back(LatLng.ToPoint());
- return *this;
- }
- };
-
- vector<S2Point> pointVec(const PointBuilder& builder) {
- vector<S2Point> points(builder.points.begin(), builder.points.end());
- return points;
- }
-
- S2Loop* loop(const PointBuilder& builder) {
- return new S2Loop(builder.points);
- }
-
- vector<S2Loop*>* loopVec(const PointBuilder& builder) {
- static vector<S2Loop*> loops;
- loops.clear();
- loops.push_back(loop(builder));
- return &loops;
- }
-
- S2LatLng LatLng(double lat, double lng) {
- return S2LatLng::FromDegrees(lat, lng);
- }
-
- // Syntax sugar for PointBuilder, which can be used to construct
- // - vector<S2Point> pointVec()
- // - S2Loop* loop()
- // - vector<S2Loop*>* loopVec()
- //
- // e.g. points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0) << LatLng(0.0, 0.0))
- typedef PointBuilder points;
-
- TEST(BigSimplePolygon, Basic) {
-
- // A 20x20 square centered at [0,0]
- BigSimplePolygon bigPoly20(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
- << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
-
- // A 10x10 square centered at [0,0]
- S2Polygon poly10(loopVec(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0)
- << LatLng(-5.0, -5.0) << LatLng(-5.0, 5.0)));
-
- ASSERT_LESS_THAN(bigPoly20.GetArea(), 2 * M_PI);
- ASSERT_LESS_THAN(poly10.GetArea(), bigPoly20.GetArea());
- ASSERT(bigPoly20.Contains(poly10));
- ASSERT(bigPoly20.Intersects(poly10));
-
- // A 20x20 square centered at [0,20]
- BigSimplePolygon bigPoly20Offset(loop(points() << LatLng(10.0, 30.0) << LatLng(10.0, 10.0)
- << LatLng(-10.0, 10.0) << LatLng(-10.0, 30.0)));
-
- ASSERT_LESS_THAN(bigPoly20Offset.GetArea(), 2 * M_PI);
- ASSERT_LESS_THAN(poly10.GetArea(), bigPoly20Offset.GetArea());
- ASSERT_FALSE(bigPoly20Offset.Contains(poly10));
- ASSERT_FALSE(bigPoly20Offset.Intersects(poly10));
- }
-
- TEST(BigSimplePolygon, BasicWithHole) {
- // A 30x30 square centered at [0,0] with a 20X20 hole
- vector<S2Loop*> loops;
- loops.push_back(loop(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
- << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
- loops.push_back(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
- << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
-
- S2Polygon holePoly(&loops);
-
- // A 16X16 square centered at [0,0]
- BigSimplePolygon bigPoly16(loop(points() << LatLng(8.0, 8.0) << LatLng(8.0, -8.0)
- << LatLng(-8.0, -8.0) << LatLng(-8.0, 8.0)));
-
- ASSERT_LESS_THAN(bigPoly16.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly16.Contains(holePoly));
- ASSERT_FALSE(bigPoly16.Intersects(holePoly));
-
- // A big polygon bigger than the hole.
- BigSimplePolygon bigPoly24(loop(points() << LatLng(12.0, 12.0) << LatLng(12.0, -12.0)
- << LatLng(-12.0, -12.0) << LatLng(-12.0, 12.0)));
- ASSERT_LESS_THAN(bigPoly24.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly24.Contains(holePoly));
- ASSERT_TRUE(bigPoly24.Intersects(holePoly));
- }
-
- TEST(BigSimplePolygon, BasicWithHoleAndShell) {
- // A 30x30 square centered at [0,0] with a 20X20 hole and 10X10 shell
- vector<S2Loop*> loops;
- // Border
- loops.push_back(loop(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
- << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
- // Hole
- loops.push_back(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
- << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
- // Shell
- loops.push_back(loop(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0)
- << LatLng(-5.0, -5.0) << LatLng(-5.0, 5.0)));
- S2Polygon shellPoly(&loops);
-
- // A 16X16 square centered at [0,0] containing the shell
- BigSimplePolygon bigPoly16(loop(points() << LatLng(8.0, 8.0) << LatLng(8.0, -8.0)
- << LatLng(-8.0, -8.0) << LatLng(-8.0, 8.0)));
- ASSERT_LESS_THAN(bigPoly16.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly16.Contains(shellPoly));
- ASSERT_TRUE(bigPoly16.Intersects(shellPoly));
-
- // Try a big polygon bigger than the hole.
- BigSimplePolygon bigPoly24(loop(points() << LatLng(12.0, 12.0) << LatLng(12.0, -12.0)
- << LatLng(-12.0, -12.0) << LatLng(-12.0, 12.0)));
- ASSERT_LESS_THAN(bigPoly24.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly24.Contains(shellPoly));
- ASSERT_TRUE(bigPoly24.Intersects(shellPoly));
-
- // Try a big polygon smaller than the shell.
- BigSimplePolygon bigPoly8(loop(points() << LatLng(4.0, 4.0) << LatLng(4.0, -4.0)
- << LatLng(-4.0, -4.0) << LatLng(-4.0, 4.0)));
- ASSERT_LESS_THAN(bigPoly8.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly8.Contains(shellPoly));
- ASSERT_TRUE(bigPoly8.Intersects(shellPoly));
- }
-
- TEST(BigSimplePolygon, BasicComplement) {
-
- // Everything *not* in a 20x20 square centered at [0,0]
- BigSimplePolygon bigPoly20Comp(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
- << LatLng(-10.0, -10.0)
- << LatLng(-10.0, 10.0)));
- bigPoly20Comp.Invert();
-
- // A 10x10 square centered at [0,0]
- S2Polygon poly10(loopVec(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0)
- << LatLng(-5.0, -5.0) << LatLng(-5.0, 5.0)));
-
- ASSERT_GREATER_THAN(bigPoly20Comp.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly20Comp.Contains(poly10));
- ASSERT_FALSE(bigPoly20Comp.Intersects(poly10));
-
- // A 10x10 square centered at [0,20], contained by bigPoly20Comp
- S2Polygon poly10Contained(loopVec(points() << LatLng(25.0, 25.0) << LatLng(25.0, 15.0)
- << LatLng(15.0, 15.0) << LatLng(15.0, 25.0)));
-
- ASSERT_LESS_THAN(poly10Contained.GetArea(), bigPoly20Comp.GetArea());
- ASSERT(bigPoly20Comp.Contains(poly10Contained));
- ASSERT(bigPoly20Comp.Intersects(poly10Contained));
-
- // A 30x30 square centered at [0,0], so that bigPoly20Comp contains its complement entirely,
- // which is not allowed by S2.
- S2Polygon poly30(loopVec(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
- << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
- ASSERT_LESS_THAN(poly30.GetArea(), bigPoly20Comp.GetArea());
- ASSERT_FALSE(bigPoly20Comp.Contains(poly30));
- ASSERT_TRUE(bigPoly20Comp.Intersects(poly30));
- }
-
- TEST(BigSimplePolygon, BasicIntersects) {
-
- // Everything *not* in a 20x20 square centered at [0,0]
- BigSimplePolygon bigPoly20(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
- << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
- bigPoly20.Invert();
-
- // A 10x10 square centered at [10,10] (partial overlap)
- S2Polygon poly10(loopVec(points() << LatLng(15.0, 15.0) << LatLng(15.0, 5.0)
- << LatLng(5.0, 5.0) << LatLng(5.0, 15.0)));
-
- ASSERT_FALSE(bigPoly20.Contains(poly10));
- ASSERT(bigPoly20.Intersects(poly10));
+using namespace mongo;
+using std::unique_ptr;
+using std::string;
+using std::vector;
+
+// Helper to build a vector of S2Point
+struct PointBuilder {
+ vector<S2Point> points;
+
+ PointBuilder& operator<<(const S2LatLng& LatLng) {
+ points.push_back(LatLng.ToPoint());
+ return *this;
}
+};
- TEST(BigSimplePolygon, BasicComplementWithHole) {
- // A 30x30 square centered at [0,0] with a 20X20 hole
- vector<S2Loop*> loops;
- loops.push_back(loop(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
- << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
- loops.push_back(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
- << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
-
- S2Polygon holePoly(&loops);
-
- // 1. BigPolygon doesn't touch holePoly
- // Everything *not* in a 40x40 square centered at [0,0]
- BigSimplePolygon bigPoly40Comp(loop(points() << LatLng(20.0, 20.0) << LatLng(20.0, -20.0)
- << LatLng(-20.0, -20.0)
- << LatLng(-20.0, 20.0)));
- bigPoly40Comp.Invert();
- ASSERT_GREATER_THAN(bigPoly40Comp.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly40Comp.Contains(holePoly));
- ASSERT_FALSE(bigPoly40Comp.Intersects(holePoly));
-
- // 2. BigPolygon intersects holePoly
- // Everything *not* in a 24X24 square centered at [0,0]
- BigSimplePolygon bigPoly24Comp(loop(points() << LatLng(12.0, 12.0) << LatLng(12.0, -12.0)
- << LatLng(-12.0, -12.0)
- << LatLng(-12.0, 12.0)));
- bigPoly24Comp.Invert();
- ASSERT_GREATER_THAN(bigPoly24Comp.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly24Comp.Contains(holePoly));
- ASSERT_TRUE(bigPoly24Comp.Intersects(holePoly));
-
- // 3. BigPolygon contains holePoly
- // Everything *not* in a 16X16 square centered at [0,0]
- BigSimplePolygon bigPoly16Comp(loop(points() << LatLng(8.0, 8.0) << LatLng(8.0, -8.0)
- << LatLng(-8.0, -8.0)
- << LatLng(-8.0, 8.0)));
- bigPoly16Comp.Invert();
- ASSERT_GREATER_THAN(bigPoly16Comp.GetArea(), 2 * M_PI);
- ASSERT_TRUE(bigPoly16Comp.Contains(holePoly));
- ASSERT_TRUE(bigPoly16Comp.Intersects(holePoly));
-
- // 4. BigPolygon contains the right half of holePoly
- // Everything *not* in a 40x40 square centered at [0,20]
- BigSimplePolygon bigPoly40CompOffset(loop(points() << LatLng(20.0, 40.0)
- << LatLng(20.0, 0.0)
- << LatLng(-20.0, 0.0)
- << LatLng(-20.0, 40.0)));
- bigPoly40CompOffset.Invert();
- ASSERT_GREATER_THAN(bigPoly40CompOffset.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly40CompOffset.Contains(holePoly));
- ASSERT_TRUE(bigPoly40CompOffset.Intersects(holePoly));
- }
+vector<S2Point> pointVec(const PointBuilder& builder) {
+ vector<S2Point> points(builder.points.begin(), builder.points.end());
+ return points;
+}
- TEST(BigSimplePolygon, BasicComplementWithHoleAndShell) {
- // A 30x30 square centered at [0,0] with a 20X20 hole and 10X10 shell
- vector<S2Loop*> loops;
- // Border
- loops.push_back(loop(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
- << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
- // Hole
- loops.push_back(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
- << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
- // Shell
- loops.push_back(loop(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0)
- << LatLng(-5.0, -5.0) << LatLng(-5.0, 5.0)));
- S2Polygon shellPoly(&loops);
-
- // 1. BigPolygon doesn't touch shellPoly
- // Everything *not* in a 40x40 square centered at [0,0]
- BigSimplePolygon bigPoly40Comp(loop(points() << LatLng(20.0, 20.0) << LatLng(20.0, -20.0)
- << LatLng(-20.0, -20.0)
- << LatLng(-20.0, 20.0)));
- bigPoly40Comp.Invert();
- ASSERT_GREATER_THAN(bigPoly40Comp.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly40Comp.Contains(shellPoly));
- ASSERT_FALSE(bigPoly40Comp.Intersects(shellPoly));
-
- // 2. BigPolygon intersects shellPoly
- // Everything *not* in a 24X24 square centered at [0,0]
- BigSimplePolygon bigPoly24Comp(loop(points() << LatLng(12.0, 12.0) << LatLng(12.0, -12.0)
- << LatLng(-12.0, -12.0)
- << LatLng(-12.0, 12.0)));
- bigPoly24Comp.Invert();
- ASSERT_GREATER_THAN(bigPoly24Comp.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly24Comp.Contains(shellPoly));
- ASSERT_TRUE(bigPoly24Comp.Intersects(shellPoly));
-
- // 3. BigPolygon contains shellPoly's outer ring
- // Everything *not* in a 16X16 square centered at [0,0]
- BigSimplePolygon bigPoly16Comp(loop(points() << LatLng(8.0, 8.0) << LatLng(8.0, -8.0)
- << LatLng(-8.0, -8.0)
- << LatLng(-8.0, 8.0)));
- bigPoly16Comp.Invert();
- ASSERT_GREATER_THAN(bigPoly16Comp.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly16Comp.Contains(shellPoly));
- ASSERT_TRUE(bigPoly16Comp.Intersects(shellPoly));
-
- // 4. BigPolygon contains the right half of shellPoly
- // Everything *not* in a 40x40 square centered at [0,20]
- BigSimplePolygon bigPoly40CompOffset(loop(points() << LatLng(20.0, 40.0)
- << LatLng(20.0, 0.0)
- << LatLng(-20.0, 0.0)
- << LatLng(-20.0, 40.0)));
- bigPoly40CompOffset.Invert();
- ASSERT_GREATER_THAN(bigPoly40CompOffset.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly40CompOffset.Contains(shellPoly));
- ASSERT_TRUE(bigPoly40CompOffset.Intersects(shellPoly));
-
- // 5. BigPolygon contain shellPoly (CW)
- BigSimplePolygon bigPolyCompOffset(loop(points() << LatLng(6.0, 6.0)
- << LatLng(6.0, 8.0)
- << LatLng(-6.0, 8.0)
- << LatLng(-6.0, 6.0)));
- ASSERT_GREATER_THAN(bigPolyCompOffset.GetArea(), 2 * M_PI);
- ASSERT_TRUE(bigPolyCompOffset.Contains(shellPoly));
- ASSERT_TRUE(bigPolyCompOffset.Intersects(shellPoly));
- }
+S2Loop* loop(const PointBuilder& builder) {
+ return new S2Loop(builder.points);
+}
- TEST(BigSimplePolygon, BasicWinding) {
+vector<S2Loop*>* loopVec(const PointBuilder& builder) {
+ static vector<S2Loop*> loops;
+ loops.clear();
+ loops.push_back(loop(builder));
+ return &loops;
+}
- // A 20x20 square centered at [0,0] (CCW)
- BigSimplePolygon bigPoly20(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
- << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
+S2LatLng LatLng(double lat, double lng) {
+ return S2LatLng::FromDegrees(lat, lng);
+}
- // Everything *not* in a 20x20 square centered at [0,0] (CW)
- BigSimplePolygon bigPoly20Comp(loop(points() << LatLng(10.0, 10.0) << LatLng(-10.0, 10.0)
- << LatLng(-10.0, -10.0)
- << LatLng(10.0, -10.0)));
+// Syntax sugar for PointBuilder, which can be used to construct
+// - vector<S2Point> pointVec()
+// - S2Loop* loop()
+// - vector<S2Loop*>* loopVec()
+//
+// e.g. points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0) << LatLng(0.0, 0.0))
+typedef PointBuilder points;
+
+TEST(BigSimplePolygon, Basic) {
+ // A 20x20 square centered at [0,0]
+ BigSimplePolygon bigPoly20(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
+ << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
+
+ // A 10x10 square centered at [0,0]
+ S2Polygon poly10(loopVec(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0) << LatLng(-5.0, -5.0)
+ << LatLng(-5.0, 5.0)));
+
+ ASSERT_LESS_THAN(bigPoly20.GetArea(), 2 * M_PI);
+ ASSERT_LESS_THAN(poly10.GetArea(), bigPoly20.GetArea());
+ ASSERT(bigPoly20.Contains(poly10));
+ ASSERT(bigPoly20.Intersects(poly10));
+
+ // A 20x20 square centered at [0,20]
+ BigSimplePolygon bigPoly20Offset(loop(points() << LatLng(10.0, 30.0) << LatLng(10.0, 10.0)
+ << LatLng(-10.0, 10.0) << LatLng(-10.0, 30.0)));
+
+ ASSERT_LESS_THAN(bigPoly20Offset.GetArea(), 2 * M_PI);
+ ASSERT_LESS_THAN(poly10.GetArea(), bigPoly20Offset.GetArea());
+ ASSERT_FALSE(bigPoly20Offset.Contains(poly10));
+ ASSERT_FALSE(bigPoly20Offset.Intersects(poly10));
+}
- ASSERT_LESS_THAN(bigPoly20.GetArea(), 2 * M_PI);
- ASSERT_GREATER_THAN(bigPoly20Comp.GetArea(), 2 * M_PI);
- }
+TEST(BigSimplePolygon, BasicWithHole) {
+ // A 30x30 square centered at [0,0] with a 20X20 hole
+ vector<S2Loop*> loops;
+ loops.push_back(loop(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
+ << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
+ loops.push_back(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
+ << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
+
+ S2Polygon holePoly(&loops);
+
+ // A 16X16 square centered at [0,0]
+ BigSimplePolygon bigPoly16(loop(points() << LatLng(8.0, 8.0) << LatLng(8.0, -8.0)
+ << LatLng(-8.0, -8.0) << LatLng(-8.0, 8.0)));
+
+ ASSERT_LESS_THAN(bigPoly16.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly16.Contains(holePoly));
+ ASSERT_FALSE(bigPoly16.Intersects(holePoly));
+
+ // A big polygon bigger than the hole.
+ BigSimplePolygon bigPoly24(loop(points() << LatLng(12.0, 12.0) << LatLng(12.0, -12.0)
+ << LatLng(-12.0, -12.0) << LatLng(-12.0, 12.0)));
+ ASSERT_LESS_THAN(bigPoly24.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly24.Contains(holePoly));
+ ASSERT_TRUE(bigPoly24.Intersects(holePoly));
+}
- TEST(BigSimplePolygon, LineRelations) {
+TEST(BigSimplePolygon, BasicWithHoleAndShell) {
+ // A 30x30 square centered at [0,0] with a 20X20 hole and 10X10 shell
+ vector<S2Loop*> loops;
+ // Border
+ loops.push_back(loop(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
+ << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
+ // Hole
+ loops.push_back(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
+ << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
+ // Shell
+ loops.push_back(loop(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0) << LatLng(-5.0, -5.0)
+ << LatLng(-5.0, 5.0)));
+ S2Polygon shellPoly(&loops);
+
+ // A 16X16 square centered at [0,0] containing the shell
+ BigSimplePolygon bigPoly16(loop(points() << LatLng(8.0, 8.0) << LatLng(8.0, -8.0)
+ << LatLng(-8.0, -8.0) << LatLng(-8.0, 8.0)));
+ ASSERT_LESS_THAN(bigPoly16.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly16.Contains(shellPoly));
+ ASSERT_TRUE(bigPoly16.Intersects(shellPoly));
+
+ // Try a big polygon bigger than the hole.
+ BigSimplePolygon bigPoly24(loop(points() << LatLng(12.0, 12.0) << LatLng(12.0, -12.0)
+ << LatLng(-12.0, -12.0) << LatLng(-12.0, 12.0)));
+ ASSERT_LESS_THAN(bigPoly24.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly24.Contains(shellPoly));
+ ASSERT_TRUE(bigPoly24.Intersects(shellPoly));
+
+ // Try a big polygon smaller than the shell.
+ BigSimplePolygon bigPoly8(loop(points() << LatLng(4.0, 4.0) << LatLng(4.0, -4.0)
+ << LatLng(-4.0, -4.0) << LatLng(-4.0, 4.0)));
+ ASSERT_LESS_THAN(bigPoly8.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly8.Contains(shellPoly));
+ ASSERT_TRUE(bigPoly8.Intersects(shellPoly));
+}
- // A 20x20 square centered at [0,0]
- BigSimplePolygon bigPoly20(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
+TEST(BigSimplePolygon, BasicComplement) {
+ // Everything *not* in a 20x20 square centered at [0,0]
+ BigSimplePolygon bigPoly20Comp(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
<< LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
+ bigPoly20Comp.Invert();
+
+ // A 10x10 square centered at [0,0]
+ S2Polygon poly10(loopVec(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0) << LatLng(-5.0, -5.0)
+ << LatLng(-5.0, 5.0)));
+
+ ASSERT_GREATER_THAN(bigPoly20Comp.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly20Comp.Contains(poly10));
+ ASSERT_FALSE(bigPoly20Comp.Intersects(poly10));
+
+ // A 10x10 square centered at [0,20], contained by bigPoly20Comp
+ S2Polygon poly10Contained(loopVec(points() << LatLng(25.0, 25.0) << LatLng(25.0, 15.0)
+ << LatLng(15.0, 15.0) << LatLng(15.0, 25.0)));
+
+ ASSERT_LESS_THAN(poly10Contained.GetArea(), bigPoly20Comp.GetArea());
+ ASSERT(bigPoly20Comp.Contains(poly10Contained));
+ ASSERT(bigPoly20Comp.Intersects(poly10Contained));
+
+ // A 30x30 square centered at [0,0], so that bigPoly20Comp contains its complement entirely,
+ // which is not allowed by S2.
+ S2Polygon poly30(loopVec(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
+ << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
+ ASSERT_LESS_THAN(poly30.GetArea(), bigPoly20Comp.GetArea());
+ ASSERT_FALSE(bigPoly20Comp.Contains(poly30));
+ ASSERT_TRUE(bigPoly20Comp.Intersects(poly30));
+}
- // A 10x10 line circling [0,0]
- S2Polyline line10(pointVec(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0)
- << LatLng(-5.0, -5.0) << LatLng(-5.0, 5.0)));
-
- ASSERT_LESS_THAN(bigPoly20.GetArea(), 2 * M_PI);
- ASSERT(bigPoly20.Contains(line10));
- ASSERT(bigPoly20.Intersects(line10));
+TEST(BigSimplePolygon, BasicIntersects) {
+ // Everything *not* in a 20x20 square centered at [0,0]
+ BigSimplePolygon bigPoly20(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
+ << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
+ bigPoly20.Invert();
- // Line segment disjoint from big polygon
- S2Polyline lineDisjoint(pointVec(points() << LatLng(15.0, 5.0) << LatLng(15.0, -5.0)));
- ASSERT_FALSE(bigPoly20.Contains(lineDisjoint));
- ASSERT_FALSE(bigPoly20.Intersects(lineDisjoint));
+ // A 10x10 square centered at [10,10] (partial overlap)
+ S2Polygon poly10(loopVec(points() << LatLng(15.0, 15.0) << LatLng(15.0, 5.0) << LatLng(5.0, 5.0)
+ << LatLng(5.0, 15.0)));
- // Line segment intersects big polygon
- S2Polyline lineIntersect(pointVec(points() << LatLng(0.0, 0.0) << LatLng(15.0, 0.0)));
- ASSERT_FALSE(bigPoly20.Contains(lineIntersect));
- ASSERT_TRUE(bigPoly20.Intersects(lineIntersect));
- }
+ ASSERT_FALSE(bigPoly20.Contains(poly10));
+ ASSERT(bigPoly20.Intersects(poly10));
+}
- TEST(BigSimplePolygon, LineRelationsComplement) {
+TEST(BigSimplePolygon, BasicComplementWithHole) {
+ // A 30x30 square centered at [0,0] with a 20X20 hole
+ vector<S2Loop*> loops;
+ loops.push_back(loop(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
+ << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
+ loops.push_back(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
+ << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
+
+ S2Polygon holePoly(&loops);
+
+ // 1. BigPolygon doesn't touch holePoly
+ // Everything *not* in a 40x40 square centered at [0,0]
+ BigSimplePolygon bigPoly40Comp(loop(points() << LatLng(20.0, 20.0) << LatLng(20.0, -20.0)
+ << LatLng(-20.0, -20.0) << LatLng(-20.0, 20.0)));
+ bigPoly40Comp.Invert();
+ ASSERT_GREATER_THAN(bigPoly40Comp.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly40Comp.Contains(holePoly));
+ ASSERT_FALSE(bigPoly40Comp.Intersects(holePoly));
+
+ // 2. BigPolygon intersects holePoly
+ // Everything *not* in a 24X24 square centered at [0,0]
+ BigSimplePolygon bigPoly24Comp(loop(points() << LatLng(12.0, 12.0) << LatLng(12.0, -12.0)
+ << LatLng(-12.0, -12.0) << LatLng(-12.0, 12.0)));
+ bigPoly24Comp.Invert();
+ ASSERT_GREATER_THAN(bigPoly24Comp.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly24Comp.Contains(holePoly));
+ ASSERT_TRUE(bigPoly24Comp.Intersects(holePoly));
+
+ // 3. BigPolygon contains holePoly
+ // Everything *not* in a 16X16 square centered at [0,0]
+ BigSimplePolygon bigPoly16Comp(loop(points() << LatLng(8.0, 8.0) << LatLng(8.0, -8.0)
+ << LatLng(-8.0, -8.0) << LatLng(-8.0, 8.0)));
+ bigPoly16Comp.Invert();
+ ASSERT_GREATER_THAN(bigPoly16Comp.GetArea(), 2 * M_PI);
+ ASSERT_TRUE(bigPoly16Comp.Contains(holePoly));
+ ASSERT_TRUE(bigPoly16Comp.Intersects(holePoly));
+
+ // 4. BigPolygon contains the right half of holePoly
+ // Everything *not* in a 40x40 square centered at [0,20]
+ BigSimplePolygon bigPoly40CompOffset(loop(points() << LatLng(20.0, 40.0) << LatLng(20.0, 0.0)
+ << LatLng(-20.0, 0.0)
+ << LatLng(-20.0, 40.0)));
+ bigPoly40CompOffset.Invert();
+ ASSERT_GREATER_THAN(bigPoly40CompOffset.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly40CompOffset.Contains(holePoly));
+ ASSERT_TRUE(bigPoly40CompOffset.Intersects(holePoly));
+}
- // A 20x20 square centered at [0,0]
- BigSimplePolygon bigPoly20Comp(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
- << LatLng(-10.0, -10.0)
- << LatLng(-10.0, 10.0)));
- bigPoly20Comp.Invert();
+TEST(BigSimplePolygon, BasicComplementWithHoleAndShell) {
+ // A 30x30 square centered at [0,0] with a 20X20 hole and 10X10 shell
+ vector<S2Loop*> loops;
+ // Border
+ loops.push_back(loop(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
+ << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
+ // Hole
+ loops.push_back(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
+ << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
+ // Shell
+ loops.push_back(loop(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0) << LatLng(-5.0, -5.0)
+ << LatLng(-5.0, 5.0)));
+ S2Polygon shellPoly(&loops);
+
+ // 1. BigPolygon doesn't touch shellPoly
+ // Everything *not* in a 40x40 square centered at [0,0]
+ BigSimplePolygon bigPoly40Comp(loop(points() << LatLng(20.0, 20.0) << LatLng(20.0, -20.0)
+ << LatLng(-20.0, -20.0) << LatLng(-20.0, 20.0)));
+ bigPoly40Comp.Invert();
+ ASSERT_GREATER_THAN(bigPoly40Comp.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly40Comp.Contains(shellPoly));
+ ASSERT_FALSE(bigPoly40Comp.Intersects(shellPoly));
+
+ // 2. BigPolygon intersects shellPoly
+ // Everything *not* in a 24X24 square centered at [0,0]
+ BigSimplePolygon bigPoly24Comp(loop(points() << LatLng(12.0, 12.0) << LatLng(12.0, -12.0)
+ << LatLng(-12.0, -12.0) << LatLng(-12.0, 12.0)));
+ bigPoly24Comp.Invert();
+ ASSERT_GREATER_THAN(bigPoly24Comp.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly24Comp.Contains(shellPoly));
+ ASSERT_TRUE(bigPoly24Comp.Intersects(shellPoly));
+
+ // 3. BigPolygon contains shellPoly's outer ring
+ // Everything *not* in a 16X16 square centered at [0,0]
+ BigSimplePolygon bigPoly16Comp(loop(points() << LatLng(8.0, 8.0) << LatLng(8.0, -8.0)
+ << LatLng(-8.0, -8.0) << LatLng(-8.0, 8.0)));
+ bigPoly16Comp.Invert();
+ ASSERT_GREATER_THAN(bigPoly16Comp.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly16Comp.Contains(shellPoly));
+ ASSERT_TRUE(bigPoly16Comp.Intersects(shellPoly));
+
+ // 4. BigPolygon contains the right half of shellPoly
+ // Everything *not* in a 40x40 square centered at [0,20]
+ BigSimplePolygon bigPoly40CompOffset(loop(points() << LatLng(20.0, 40.0) << LatLng(20.0, 0.0)
+ << LatLng(-20.0, 0.0)
+ << LatLng(-20.0, 40.0)));
+ bigPoly40CompOffset.Invert();
+ ASSERT_GREATER_THAN(bigPoly40CompOffset.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly40CompOffset.Contains(shellPoly));
+ ASSERT_TRUE(bigPoly40CompOffset.Intersects(shellPoly));
+
+ // 5. BigPolygon contain shellPoly (CW)
+ BigSimplePolygon bigPolyCompOffset(loop(points() << LatLng(6.0, 6.0) << LatLng(6.0, 8.0)
+ << LatLng(-6.0, 8.0) << LatLng(-6.0, 6.0)));
+ ASSERT_GREATER_THAN(bigPolyCompOffset.GetArea(), 2 * M_PI);
+ ASSERT_TRUE(bigPolyCompOffset.Contains(shellPoly));
+ ASSERT_TRUE(bigPolyCompOffset.Intersects(shellPoly));
+}
- // A 10x10 line circling [0,0]
- S2Polyline line10(pointVec(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0)
- << LatLng(-5.0, -5.0) << LatLng(-5.0, 5.0)));
+TEST(BigSimplePolygon, BasicWinding) {
+ // A 20x20 square centered at [0,0] (CCW)
+ BigSimplePolygon bigPoly20(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
+ << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
- ASSERT_GREATER_THAN(bigPoly20Comp.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly20Comp.Contains(line10));
- ASSERT_FALSE(bigPoly20Comp.Intersects(line10));
+ // Everything *not* in a 20x20 square centered at [0,0] (CW)
+ BigSimplePolygon bigPoly20Comp(loop(points() << LatLng(10.0, 10.0) << LatLng(-10.0, 10.0)
+ << LatLng(-10.0, -10.0) << LatLng(10.0, -10.0)));
- // Line segment (0, 0) -> (0, 15)
- S2Polyline lineIntersect(pointVec(points() << LatLng(0.0, 0.0) << LatLng(0.0, 15.0)));
- ASSERT_FALSE(bigPoly20Comp.Contains(lineIntersect));
- ASSERT_TRUE(bigPoly20Comp.Intersects(lineIntersect));
+ ASSERT_LESS_THAN(bigPoly20.GetArea(), 2 * M_PI);
+ ASSERT_GREATER_THAN(bigPoly20Comp.GetArea(), 2 * M_PI);
+}
- // A 10x10 line circling [0,0]
- S2Polyline line30(pointVec(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
- << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
- ASSERT_TRUE(bigPoly20Comp.Contains(line30));
- ASSERT_TRUE(bigPoly20Comp.Intersects(line30));
- }
+TEST(BigSimplePolygon, LineRelations) {
+ // A 20x20 square centered at [0,0]
+ BigSimplePolygon bigPoly20(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
+ << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
- TEST(BigSimplePolygon, LineRelationsWinding) {
+ // A 10x10 line circling [0,0]
+ S2Polyline line10(pointVec(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0)
+ << LatLng(-5.0, -5.0) << LatLng(-5.0, 5.0)));
- // Everything *not* in a 20x20 square centered at [0,0] (CW winding)
- BigSimplePolygon bigPoly20Comp(loop(points() << LatLng(10.0, 10.0) << LatLng(-10.0, 10.0)
- << LatLng(-10.0, -10.0)
- << LatLng(10.0, -10.0)));
+ ASSERT_LESS_THAN(bigPoly20.GetArea(), 2 * M_PI);
+ ASSERT(bigPoly20.Contains(line10));
+ ASSERT(bigPoly20.Intersects(line10));
- // A 10x10 line circling [0,0]
- S2Polyline line10(pointVec(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0)
- << LatLng(-5.0, -5.0) << LatLng(-5.0, 5.0)));
+ // Line segment disjoint from big polygon
+ S2Polyline lineDisjoint(pointVec(points() << LatLng(15.0, 5.0) << LatLng(15.0, -5.0)));
+ ASSERT_FALSE(bigPoly20.Contains(lineDisjoint));
+ ASSERT_FALSE(bigPoly20.Intersects(lineDisjoint));
- ASSERT_GREATER_THAN(bigPoly20Comp.GetArea(), 2 * M_PI);
- ASSERT_FALSE(bigPoly20Comp.Contains(line10));
- ASSERT_FALSE(bigPoly20Comp.Intersects(line10));
- }
+ // Line segment intersects big polygon
+ S2Polyline lineIntersect(pointVec(points() << LatLng(0.0, 0.0) << LatLng(15.0, 0.0)));
+ ASSERT_FALSE(bigPoly20.Contains(lineIntersect));
+ ASSERT_TRUE(bigPoly20.Intersects(lineIntersect));
+}
- TEST(BigSimplePolygon, PolarContains) {
+TEST(BigSimplePolygon, LineRelationsComplement) {
+ // A 20x20 square centered at [0,0]
+ BigSimplePolygon bigPoly20Comp(loop(points() << LatLng(10.0, 10.0) << LatLng(10.0, -10.0)
+ << LatLng(-10.0, -10.0) << LatLng(-10.0, 10.0)));
+ bigPoly20Comp.Invert();
- // Square 10 degrees from the north pole [90,0]
- BigSimplePolygon bigNorthPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(80.0, 90.0)
- << LatLng(80.0, 180.0) << LatLng(80.0, -90.0)));
+ // A 10x10 line circling [0,0]
+ S2Polyline line10(pointVec(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0)
+ << LatLng(-5.0, -5.0) << LatLng(-5.0, 5.0)));
- // Square 5 degrees from the north pole [90, 0]
- S2Polygon northPoly(loopVec(points() << LatLng(85.0, 0.0) << LatLng(85.0, 90.0)
- << LatLng(85.0, 180.0) << LatLng(85.0, -90.0)));
+ ASSERT_GREATER_THAN(bigPoly20Comp.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly20Comp.Contains(line10));
+ ASSERT_FALSE(bigPoly20Comp.Intersects(line10));
- ASSERT_LESS_THAN(bigNorthPoly.GetArea(), 2 * M_PI);
- ASSERT_LESS_THAN(northPoly.GetArea(), bigNorthPoly.GetArea());
- ASSERT(bigNorthPoly.Contains(northPoly));
- ASSERT(bigNorthPoly.Intersects(northPoly));
- }
+ // Line segment (0, 0) -> (0, 15)
+ S2Polyline lineIntersect(pointVec(points() << LatLng(0.0, 0.0) << LatLng(0.0, 15.0)));
+ ASSERT_FALSE(bigPoly20Comp.Contains(lineIntersect));
+ ASSERT_TRUE(bigPoly20Comp.Intersects(lineIntersect));
- TEST(BigSimplePolygon, PolarContainsWithHoles) {
+ // A 10x10 line circling [0,0]
+ S2Polyline line30(pointVec(points() << LatLng(15.0, 15.0) << LatLng(15.0, -15.0)
+ << LatLng(-15.0, -15.0) << LatLng(-15.0, 15.0)));
+ ASSERT_TRUE(bigPoly20Comp.Contains(line30));
+ ASSERT_TRUE(bigPoly20Comp.Intersects(line30));
+}
- // Square 10 degrees from the north pole [90,0]
- BigSimplePolygon bigNorthPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(80.0, 90.0)
- << LatLng(80.0, 180.0) << LatLng(80.0, -90.0)));
+TEST(BigSimplePolygon, LineRelationsWinding) {
+ // Everything *not* in a 20x20 square centered at [0,0] (CW winding)
+ BigSimplePolygon bigPoly20Comp(loop(points() << LatLng(10.0, 10.0) << LatLng(-10.0, 10.0)
+ << LatLng(-10.0, -10.0) << LatLng(10.0, -10.0)));
- // Square 5 degrees from the north pole [90, 0] with a concentric hole 1 degree from the
- // north pole
- vector<S2Loop*> loops;
- loops.push_back(loop(points() << LatLng(85.0, 0.0) << LatLng(85.0, 90.0)
- << LatLng(85.0, 180.0) << LatLng(85.0, -90.0)));
- loops.push_back(loop(points() << LatLng(89.0, 0.0) << LatLng(89.0, 90.0)
- << LatLng(89.0, 180.0) << LatLng(89.0, -90.0)));
- S2Polygon northPolyHole(&loops);
+ // A 10x10 line circling [0,0]
+ S2Polyline line10(pointVec(points() << LatLng(5.0, 5.0) << LatLng(5.0, -5.0)
+ << LatLng(-5.0, -5.0) << LatLng(-5.0, 5.0)));
- ASSERT_LESS_THAN(northPolyHole.GetArea(), bigNorthPoly.GetArea());
- ASSERT(bigNorthPoly.Contains(northPolyHole));
- ASSERT(bigNorthPoly.Intersects(northPolyHole));
- }
+ ASSERT_GREATER_THAN(bigPoly20Comp.GetArea(), 2 * M_PI);
+ ASSERT_FALSE(bigPoly20Comp.Contains(line10));
+ ASSERT_FALSE(bigPoly20Comp.Intersects(line10));
+}
- TEST(BigSimplePolygon, PolarIntersectsWithHoles) {
+TEST(BigSimplePolygon, PolarContains) {
+ // Square 10 degrees from the north pole [90,0]
+ BigSimplePolygon bigNorthPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(80.0, 90.0)
+ << LatLng(80.0, 180.0) << LatLng(80.0, -90.0)));
- // Square 10 degrees from the north pole [90,0]
- BigSimplePolygon bigNorthPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(80.0, 90.0)
- << LatLng(80.0, 180.0) << LatLng(80.0, -90.0)));
+ // Square 5 degrees from the north pole [90, 0]
+ S2Polygon northPoly(loopVec(points() << LatLng(85.0, 0.0) << LatLng(85.0, 90.0)
+ << LatLng(85.0, 180.0) << LatLng(85.0, -90.0)));
- // 5-degree square with 1-degree-wide concentric hole, centered on [80.0, 0.0]
- vector<S2Loop*> loops;
- loops.push_back(loop(points() << LatLng(85.0, 5.0) << LatLng(85.0, -5.0)
- << LatLng(75.0, -5.0) << LatLng(75.0, 5.0)));
- loops.push_back(loop(points() << LatLng(81.0, 1.0) << LatLng(81.0, -1.0)
- << LatLng(79.0, -1.0) << LatLng(79.0, 1.0)));
- S2Polygon northPolyHole(&loops);
+ ASSERT_LESS_THAN(bigNorthPoly.GetArea(), 2 * M_PI);
+ ASSERT_LESS_THAN(northPoly.GetArea(), bigNorthPoly.GetArea());
+ ASSERT(bigNorthPoly.Contains(northPoly));
+ ASSERT(bigNorthPoly.Intersects(northPoly));
+}
- ASSERT_LESS_THAN(northPolyHole.GetArea(), bigNorthPoly.GetArea());
- ASSERT_FALSE(bigNorthPoly.Contains(northPolyHole));
- ASSERT(bigNorthPoly.Intersects(northPolyHole));
- }
+TEST(BigSimplePolygon, PolarContainsWithHoles) {
+ // Square 10 degrees from the north pole [90,0]
+ BigSimplePolygon bigNorthPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(80.0, 90.0)
+ << LatLng(80.0, 180.0) << LatLng(80.0, -90.0)));
+
+ // Square 5 degrees from the north pole [90, 0] with a concentric hole 1 degree from the
+ // north pole
+ vector<S2Loop*> loops;
+ loops.push_back(loop(points() << LatLng(85.0, 0.0) << LatLng(85.0, 90.0) << LatLng(85.0, 180.0)
+ << LatLng(85.0, -90.0)));
+ loops.push_back(loop(points() << LatLng(89.0, 0.0) << LatLng(89.0, 90.0) << LatLng(89.0, 180.0)
+ << LatLng(89.0, -90.0)));
+ S2Polygon northPolyHole(&loops);
+
+ ASSERT_LESS_THAN(northPolyHole.GetArea(), bigNorthPoly.GetArea());
+ ASSERT(bigNorthPoly.Contains(northPolyHole));
+ ASSERT(bigNorthPoly.Intersects(northPolyHole));
+}
- // Edge cases
- //
- // No promise in terms of points on border - they may be inside or outside the big polygon.
- // But we need to ensure the result is consistent:
- // 1. If a polygon/line is contained by a big polygon, they must intersect with each other.
- // 2. Relation doesn't change as long as the touch point doesn't change, no matter the big
- // polygon is larger or less then a hemisphere.
- // 3. Relations for big polygons less than a hemisphere are consistent with ordinary (simple)
- // polygon results.
-
- template <typename TShape>
- void checkConsistency(const BigSimplePolygon& bigPoly,
- const BigSimplePolygon& expandedBigPoly,
- const TShape& shape) {
- // Contain() => Intersects()
- if (bigPoly.Contains(shape)) ASSERT(bigPoly.Intersects(shape));
- if (expandedBigPoly.Contains(shape)) ASSERT(expandedBigPoly.Intersects(shape));
- // Relation doesn't change
- ASSERT_EQUALS(bigPoly.Contains(shape), expandedBigPoly.Contains(shape));
- ASSERT_EQUALS(bigPoly.Intersects(shape), expandedBigPoly.Intersects(shape));
- }
+TEST(BigSimplePolygon, PolarIntersectsWithHoles) {
+ // Square 10 degrees from the north pole [90,0]
+ BigSimplePolygon bigNorthPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(80.0, 90.0)
+ << LatLng(80.0, 180.0) << LatLng(80.0, -90.0)));
+
+ // 5-degree square with 1-degree-wide concentric hole, centered on [80.0, 0.0]
+ vector<S2Loop*> loops;
+ loops.push_back(loop(points() << LatLng(85.0, 5.0) << LatLng(85.0, -5.0) << LatLng(75.0, -5.0)
+ << LatLng(75.0, 5.0)));
+ loops.push_back(loop(points() << LatLng(81.0, 1.0) << LatLng(81.0, -1.0) << LatLng(79.0, -1.0)
+ << LatLng(79.0, 1.0)));
+ S2Polygon northPolyHole(&loops);
+
+ ASSERT_LESS_THAN(northPolyHole.GetArea(), bigNorthPoly.GetArea());
+ ASSERT_FALSE(bigNorthPoly.Contains(northPolyHole));
+ ASSERT(bigNorthPoly.Intersects(northPolyHole));
+}
- // Polygon shares big polygon's edge (disjoint)
- TEST(BigSimplePolygon, ShareEdgeDisjoint) {
- // Big polygon smaller than a hemisphere.
- BigSimplePolygon bigPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
- << LatLng(-80.0, 90.0) << LatLng(80.0, 90.0)));
- ASSERT_LESS_THAN(bigPoly.GetArea(), 2 * M_PI);
-
- // Vertex point and collinear point
- S2Point point = LatLng(80.0, 0.0).ToPoint();
- S2Point collinearPoint = LatLng(0.0, 0.0).ToPoint();
-
- // Polygon shares one edge
- S2Polygon poly(loopVec(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
- << LatLng(-80.0, -10.0) << LatLng(80.0, -10.0)));
- // Polygon shares a segment of one edge
- S2Polygon collinearPoly(loopVec(points() << LatLng(50.0, 0.0) << LatLng(-50.0, 0.0)
- << LatLng(-50.0, -10.0) << LatLng(50.0, -10.0)));
-
- // Line
- S2Polyline line(pointVec(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
- << LatLng(-80.0, -10.0)));
- // Line share a segment of one edge
- S2Polyline collinearLine(pointVec(points() << LatLng(50.0, 0.0) << LatLng(-50.0, 0.0)
- << LatLng(-50.0, -10.0)));
-
- // Big polygon larger than a hemisphere.
- BigSimplePolygon expandedBigPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
- << LatLng(-80.0, 90.0)
- << LatLng(-80.0, 180.0)
- << LatLng(-80.0, -90.0)
- << LatLng(80.0, -90.0) << LatLng(80.0, 180.0)
- << LatLng(80.0, 90.0)));
- ASSERT_GREATER_THAN(expandedBigPoly.GetArea(), 2 * M_PI);
-
- checkConsistency(bigPoly, expandedBigPoly, point);
- checkConsistency(bigPoly, expandedBigPoly, collinearPoint);
- checkConsistency(bigPoly, expandedBigPoly, poly);
- checkConsistency(bigPoly, expandedBigPoly, collinearPoly);
- checkConsistency(bigPoly, expandedBigPoly, line);
- checkConsistency(bigPoly, expandedBigPoly, collinearLine);
-
- // Check the complement of big polygon
- bigPoly.Invert();
- ASSERT_GREATER_THAN(bigPoly.GetArea(), 2 * M_PI);
- expandedBigPoly.Invert();
- ASSERT_LESS_THAN(expandedBigPoly.GetArea(), 2 * M_PI);
-
- checkConsistency(bigPoly, expandedBigPoly, point);
- checkConsistency(bigPoly, expandedBigPoly, collinearPoint);
- checkConsistency(bigPoly, expandedBigPoly, poly);
- checkConsistency(bigPoly, expandedBigPoly, collinearPoly);
- checkConsistency(bigPoly, expandedBigPoly, line);
- checkConsistency(bigPoly, expandedBigPoly, collinearLine);
- }
+// Edge cases
+//
+// No promise in terms of points on border - they may be inside or outside the big polygon.
+// But we need to ensure the result is consistent:
+// 1. If a polygon/line is contained by a big polygon, they must intersect with each other.
+// 2. Relation doesn't change as long as the touch point doesn't change, no matter the big
+// polygon is larger or less then a hemisphere.
+// 3. Relations for big polygons less than a hemisphere are consistent with ordinary (simple)
+// polygon results.
+
+template <typename TShape>
+void checkConsistency(const BigSimplePolygon& bigPoly,
+ const BigSimplePolygon& expandedBigPoly,
+ const TShape& shape) {
+ // Contain() => Intersects()
+ if (bigPoly.Contains(shape))
+ ASSERT(bigPoly.Intersects(shape));
+ if (expandedBigPoly.Contains(shape))
+ ASSERT(expandedBigPoly.Intersects(shape));
+ // Relation doesn't change
+ ASSERT_EQUALS(bigPoly.Contains(shape), expandedBigPoly.Contains(shape));
+ ASSERT_EQUALS(bigPoly.Intersects(shape), expandedBigPoly.Intersects(shape));
+}
- // Polygon/line shares big polygon's edge (contained by big polygon)
- TEST(BigSimplePolygon, ShareEdgeContained) {
- // Big polygon smaller than a hemisphere.
- BigSimplePolygon bigPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
- << LatLng(-80.0, 90.0) << LatLng(80.0, 90.0)));
- ASSERT_LESS_THAN(bigPoly.GetArea(), 2 * M_PI);
-
- // Polygon
- S2Polygon poly(loopVec(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
- << LatLng(-80.0, 10.0) << LatLng(80.0, 10.0)));
- // Polygon shares a segment of one edge
- S2Polygon collinearPoly(loopVec(points() << LatLng(50.0, 0.0) << LatLng(-50.0, 0.0)
- << LatLng(-50.0, 10.0) << LatLng(50.0, 10.0)));
- // Line
- S2Polyline line(pointVec(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
- << LatLng(0.0, 10.0)));
- // Line shares a segment of one edge
- S2Polyline collinearLine(pointVec(points() << LatLng(50.0, 0.0) << LatLng(-50.0, 0.0)
- << LatLng(-50.0, 10.0)));
-
- // Big polygon larger than a hemisphere.
- BigSimplePolygon expandedBigPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
- << LatLng(-80.0, 90.0)
- << LatLng(-80.0, 180.0)
- << LatLng(-80.0, -90.0)
- << LatLng(80.0, -90.0) << LatLng(80.0, 180.0)
- << LatLng(80.0, 90.0)));
- ASSERT_GREATER_THAN(expandedBigPoly.GetArea(), 2 * M_PI);
-
- checkConsistency(bigPoly, expandedBigPoly, poly);
- checkConsistency(bigPoly, expandedBigPoly, collinearPoly);
- checkConsistency(bigPoly, expandedBigPoly, line);
- checkConsistency(bigPoly, expandedBigPoly, collinearLine);
-
- // Check the complement of big polygon
- bigPoly.Invert();
- ASSERT_GREATER_THAN(bigPoly.GetArea(), 2 * M_PI);
- expandedBigPoly.Invert();
- ASSERT_LESS_THAN(expandedBigPoly.GetArea(), 2 * M_PI);
-
- checkConsistency(bigPoly, expandedBigPoly, poly);
- checkConsistency(bigPoly, expandedBigPoly, collinearPoly);
- checkConsistency(bigPoly, expandedBigPoly, line);
- checkConsistency(bigPoly, expandedBigPoly, collinearLine);
- }
+// Polygon shares big polygon's edge (disjoint)
+TEST(BigSimplePolygon, ShareEdgeDisjoint) {
+ // Big polygon smaller than a hemisphere.
+ BigSimplePolygon bigPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
+ << LatLng(-80.0, 90.0) << LatLng(80.0, 90.0)));
+ ASSERT_LESS_THAN(bigPoly.GetArea(), 2 * M_PI);
+
+ // Vertex point and collinear point
+ S2Point point = LatLng(80.0, 0.0).ToPoint();
+ S2Point collinearPoint = LatLng(0.0, 0.0).ToPoint();
+
+ // Polygon shares one edge
+ S2Polygon poly(loopVec(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
+ << LatLng(-80.0, -10.0) << LatLng(80.0, -10.0)));
+ // Polygon shares a segment of one edge
+ S2Polygon collinearPoly(loopVec(points() << LatLng(50.0, 0.0) << LatLng(-50.0, 0.0)
+ << LatLng(-50.0, -10.0) << LatLng(50.0, -10.0)));
+
+ // Line
+ S2Polyline line(
+ pointVec(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0) << LatLng(-80.0, -10.0)));
+ // Line share a segment of one edge
+ S2Polyline collinearLine(
+ pointVec(points() << LatLng(50.0, 0.0) << LatLng(-50.0, 0.0) << LatLng(-50.0, -10.0)));
+
+ // Big polygon larger than a hemisphere.
+ BigSimplePolygon expandedBigPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
+ << LatLng(-80.0, 90.0) << LatLng(-80.0, 180.0)
+ << LatLng(-80.0, -90.0) << LatLng(80.0, -90.0)
+ << LatLng(80.0, 180.0) << LatLng(80.0, 90.0)));
+ ASSERT_GREATER_THAN(expandedBigPoly.GetArea(), 2 * M_PI);
+
+ checkConsistency(bigPoly, expandedBigPoly, point);
+ checkConsistency(bigPoly, expandedBigPoly, collinearPoint);
+ checkConsistency(bigPoly, expandedBigPoly, poly);
+ checkConsistency(bigPoly, expandedBigPoly, collinearPoly);
+ checkConsistency(bigPoly, expandedBigPoly, line);
+ checkConsistency(bigPoly, expandedBigPoly, collinearLine);
+
+ // Check the complement of big polygon
+ bigPoly.Invert();
+ ASSERT_GREATER_THAN(bigPoly.GetArea(), 2 * M_PI);
+ expandedBigPoly.Invert();
+ ASSERT_LESS_THAN(expandedBigPoly.GetArea(), 2 * M_PI);
+
+ checkConsistency(bigPoly, expandedBigPoly, point);
+ checkConsistency(bigPoly, expandedBigPoly, collinearPoint);
+ checkConsistency(bigPoly, expandedBigPoly, poly);
+ checkConsistency(bigPoly, expandedBigPoly, collinearPoly);
+ checkConsistency(bigPoly, expandedBigPoly, line);
+ checkConsistency(bigPoly, expandedBigPoly, collinearLine);
+}
+// Polygon/line shares big polygon's edge (contained by big polygon)
+TEST(BigSimplePolygon, ShareEdgeContained) {
+ // Big polygon smaller than a hemisphere.
+ BigSimplePolygon bigPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
+ << LatLng(-80.0, 90.0) << LatLng(80.0, 90.0)));
+ ASSERT_LESS_THAN(bigPoly.GetArea(), 2 * M_PI);
+
+ // Polygon
+ S2Polygon poly(loopVec(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
+ << LatLng(-80.0, 10.0) << LatLng(80.0, 10.0)));
+ // Polygon shares a segment of one edge
+ S2Polygon collinearPoly(loopVec(points() << LatLng(50.0, 0.0) << LatLng(-50.0, 0.0)
+ << LatLng(-50.0, 10.0) << LatLng(50.0, 10.0)));
+ // Line
+ S2Polyline line(
+ pointVec(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0) << LatLng(0.0, 10.0)));
+ // Line shares a segment of one edge
+ S2Polyline collinearLine(
+ pointVec(points() << LatLng(50.0, 0.0) << LatLng(-50.0, 0.0) << LatLng(-50.0, 10.0)));
+
+ // Big polygon larger than a hemisphere.
+ BigSimplePolygon expandedBigPoly(loop(points() << LatLng(80.0, 0.0) << LatLng(-80.0, 0.0)
+ << LatLng(-80.0, 90.0) << LatLng(-80.0, 180.0)
+ << LatLng(-80.0, -90.0) << LatLng(80.0, -90.0)
+ << LatLng(80.0, 180.0) << LatLng(80.0, 90.0)));
+ ASSERT_GREATER_THAN(expandedBigPoly.GetArea(), 2 * M_PI);
+
+ checkConsistency(bigPoly, expandedBigPoly, poly);
+ checkConsistency(bigPoly, expandedBigPoly, collinearPoly);
+ checkConsistency(bigPoly, expandedBigPoly, line);
+ checkConsistency(bigPoly, expandedBigPoly, collinearLine);
+
+ // Check the complement of big polygon
+ bigPoly.Invert();
+ ASSERT_GREATER_THAN(bigPoly.GetArea(), 2 * M_PI);
+ expandedBigPoly.Invert();
+ ASSERT_LESS_THAN(expandedBigPoly.GetArea(), 2 * M_PI);
+
+ checkConsistency(bigPoly, expandedBigPoly, poly);
+ checkConsistency(bigPoly, expandedBigPoly, collinearPoly);
+ checkConsistency(bigPoly, expandedBigPoly, line);
+ checkConsistency(bigPoly, expandedBigPoly, collinearLine);
+}
}
diff --git a/src/mongo/db/geo/geoconstants.h b/src/mongo/db/geo/geoconstants.h
index e97e1d3b233..5883ae0ee02 100644
--- a/src/mongo/db/geo/geoconstants.h
+++ b/src/mongo/db/geo/geoconstants.h
@@ -30,8 +30,8 @@
namespace mongo {
- // Equatorial radius of earth.
- // Source: http://nssdc.gsfc.nasa.gov/planetary/factsheet/earthfact.html
- const double kRadiusOfEarthInMeters = (6378.1 * 1000);
+// Equatorial radius of earth.
+// Source: http://nssdc.gsfc.nasa.gov/planetary/factsheet/earthfact.html
+const double kRadiusOfEarthInMeters = (6378.1 * 1000);
} // namespace mongo
diff --git a/src/mongo/db/geo/geometry_container.cpp b/src/mongo/db/geo/geometry_container.cpp
index 55afbe5c021..c74918e40c2 100644
--- a/src/mongo/db/geo/geometry_container.cpp
+++ b/src/mongo/db/geo/geometry_container.cpp
@@ -34,1184 +34,1259 @@
namespace mongo {
- using mongoutils::str::equals;
-
- GeometryContainer::GeometryContainer() {
- }
-
- bool GeometryContainer::isSimpleContainer() const {
- return NULL != _point || NULL != _line || NULL != _polygon;
- }
-
- bool GeometryContainer::supportsContains() const {
- return NULL != _polygon
- || NULL != _box
- || NULL != _cap
- || NULL != _multiPolygon
- || (NULL != _geometryCollection
- && (_geometryCollection->polygons.vector().size() > 0
- || _geometryCollection->multiPolygons.vector().size() > 0));
- }
-
- bool GeometryContainer::hasS2Region() const {
- return (NULL != _point && _point->crs == SPHERE)
- || NULL != _line
- || (NULL != _polygon && (_polygon->crs == SPHERE || _polygon->crs == STRICT_SPHERE))
- || (NULL != _cap && _cap->crs == SPHERE)
- || NULL != _multiPoint
- || NULL != _multiLine
- || NULL != _multiPolygon
- || NULL != _geometryCollection;
- }
-
- const S2Region& GeometryContainer::getS2Region() const {
- if (NULL != _point && SPHERE == _point->crs) {
- return _point->cell;
- } else if (NULL != _line) {
- return _line->line;
- } else if (NULL != _polygon && NULL != _polygon->s2Polygon) {
- return *_polygon->s2Polygon;
- } else if (NULL != _polygon && NULL != _polygon->bigPolygon) {
- return *_polygon->bigPolygon;
- } else if (NULL != _cap && SPHERE == _cap->crs) {
- return _cap->cap;
- } else if (NULL != _multiPoint) {
- return *_s2Region;
- } else if (NULL != _multiLine) {
- return *_s2Region;
- } else if (NULL != _multiPolygon) {
- return *_s2Region;
- } else {
- invariant(NULL != _geometryCollection);
- return *_s2Region;
- }
+using mongoutils::str::equals;
+
+GeometryContainer::GeometryContainer() {}
+
+bool GeometryContainer::isSimpleContainer() const {
+ return NULL != _point || NULL != _line || NULL != _polygon;
+}
+
+bool GeometryContainer::supportsContains() const {
+ return NULL != _polygon || NULL != _box || NULL != _cap || NULL != _multiPolygon ||
+ (NULL != _geometryCollection && (_geometryCollection->polygons.vector().size() > 0 ||
+ _geometryCollection->multiPolygons.vector().size() > 0));
+}
+
+bool GeometryContainer::hasS2Region() const {
+ return (NULL != _point && _point->crs == SPHERE) || NULL != _line ||
+ (NULL != _polygon && (_polygon->crs == SPHERE || _polygon->crs == STRICT_SPHERE)) ||
+ (NULL != _cap && _cap->crs == SPHERE) || NULL != _multiPoint || NULL != _multiLine ||
+ NULL != _multiPolygon || NULL != _geometryCollection;
+}
+
+const S2Region& GeometryContainer::getS2Region() const {
+ if (NULL != _point && SPHERE == _point->crs) {
+ return _point->cell;
+ } else if (NULL != _line) {
+ return _line->line;
+ } else if (NULL != _polygon && NULL != _polygon->s2Polygon) {
+ return *_polygon->s2Polygon;
+ } else if (NULL != _polygon && NULL != _polygon->bigPolygon) {
+ return *_polygon->bigPolygon;
+ } else if (NULL != _cap && SPHERE == _cap->crs) {
+ return _cap->cap;
+ } else if (NULL != _multiPoint) {
+ return *_s2Region;
+ } else if (NULL != _multiLine) {
+ return *_s2Region;
+ } else if (NULL != _multiPolygon) {
+ return *_s2Region;
+ } else {
+ invariant(NULL != _geometryCollection);
+ return *_s2Region;
}
+}
- bool GeometryContainer::hasR2Region() const {
- return _cap || _box || _point || (_polygon && _polygon->crs == FLAT)
- || (_multiPoint && FLAT == _multiPoint->crs);
- }
+bool GeometryContainer::hasR2Region() const {
+ return _cap || _box || _point || (_polygon && _polygon->crs == FLAT) ||
+ (_multiPoint && FLAT == _multiPoint->crs);
+}
- class GeometryContainer::R2BoxRegion : public R2Region {
- public:
+class GeometryContainer::R2BoxRegion : public R2Region {
+public:
+ R2BoxRegion(const GeometryContainer* geometry);
+ virtual ~R2BoxRegion();
- R2BoxRegion(const GeometryContainer* geometry);
- virtual ~R2BoxRegion();
+ Box getR2Bounds() const;
- Box getR2Bounds() const;
+ bool fastContains(const Box& other) const;
- bool fastContains(const Box& other) const;
+ bool fastDisjoint(const Box& other) const;
- bool fastDisjoint(const Box& other) const;
+private:
+ static Box buildBounds(const GeometryContainer& geometry);
- private:
+ // Not owned here
+ const GeometryContainer* _geometry;
- static Box buildBounds(const GeometryContainer& geometry);
+ // TODO: For big complex shapes, may be better to use actual shape from above
+ const Box _bounds;
+};
- // Not owned here
- const GeometryContainer* _geometry;
+GeometryContainer::R2BoxRegion::R2BoxRegion(const GeometryContainer* geometry)
+ : _geometry(geometry), _bounds(buildBounds(*geometry)) {}
- // TODO: For big complex shapes, may be better to use actual shape from above
- const Box _bounds;
- };
+GeometryContainer::R2BoxRegion::~R2BoxRegion() {}
- GeometryContainer::R2BoxRegion::R2BoxRegion(const GeometryContainer* geometry) :
- _geometry(geometry), _bounds(buildBounds(*geometry)) {
- }
+Box GeometryContainer::R2BoxRegion::getR2Bounds() const {
+ return _bounds;
+}
- GeometryContainer::R2BoxRegion::~R2BoxRegion() {
+bool GeometryContainer::R2BoxRegion::fastContains(const Box& other) const {
+ // TODO: Add more cases here to make coverings better
+ if (_geometry->_box && FLAT == _geometry->_box->crs) {
+ const Box& box = _geometry->_box->box;
+ if (box.contains(other))
+ return true;
+ } else if (_geometry->_cap && FLAT == _geometry->_cap->crs) {
+ const Circle& circle = _geometry->_cap->circle;
+ // Exact test
+ return circleContainsBox(circle, other);
}
- Box GeometryContainer::R2BoxRegion::getR2Bounds() const {
- return _bounds;
+ if (_geometry->_polygon && FLAT == _geometry->_polygon->crs) {
+ const Polygon& polygon = _geometry->_polygon->oldPolygon;
+ // Exact test
+ return polygonContainsBox(polygon, other);
}
- bool GeometryContainer::R2BoxRegion::fastContains(const Box& other) const {
+ // Not sure
+ return false;
+}
- // TODO: Add more cases here to make coverings better
- if (_geometry->_box && FLAT == _geometry->_box->crs) {
- const Box& box = _geometry->_box->box;
- if (box.contains(other))
- return true;
- } else if (_geometry->_cap && FLAT == _geometry->_cap->crs) {
- const Circle& circle = _geometry->_cap->circle;
- // Exact test
- return circleContainsBox(circle, other);
- }
-
- if (_geometry->_polygon && FLAT == _geometry->_polygon->crs) {
- const Polygon& polygon = _geometry->_polygon->oldPolygon;
- // Exact test
- return polygonContainsBox(polygon, other);
- }
+bool GeometryContainer::R2BoxRegion::fastDisjoint(const Box& other) const {
+ if (!_bounds.intersects(other))
+ return true;
- // Not sure
- return false;
- }
+ // Not sure
+ return false;
+}
- bool GeometryContainer::R2BoxRegion::fastDisjoint(const Box& other) const {
+static Point toLngLatPoint(const S2Point& s2Point) {
+ Point point;
+ S2LatLng latLng(s2Point);
+ point.x = latLng.lng().degrees();
+ point.y = latLng.lat().degrees();
+ return point;
+}
- if (!_bounds.intersects(other))
- return true;
+static void lineR2Bounds(const S2Polyline& flatLine, Box* flatBounds) {
+ int numVertices = flatLine.num_vertices();
+ verify(flatLine.num_vertices() > 0);
- // Not sure
- return false;
- }
+ flatBounds->init(toLngLatPoint(flatLine.vertex(0)), toLngLatPoint(flatLine.vertex(0)));
- static Point toLngLatPoint(const S2Point& s2Point) {
- Point point;
- S2LatLng latLng(s2Point);
- point.x = latLng.lng().degrees();
- point.y = latLng.lat().degrees();
- return point;
+ for (int i = 1; i < numVertices; ++i) {
+ flatBounds->expandToInclude(toLngLatPoint(flatLine.vertex(i)));
}
+}
- static void lineR2Bounds(const S2Polyline& flatLine, Box* flatBounds) {
+static void circleR2Bounds(const Circle& circle, Box* flatBounds) {
+ flatBounds->init(Point(circle.center.x - circle.radius, circle.center.y - circle.radius),
+ Point(circle.center.x + circle.radius, circle.center.y + circle.radius));
+}
- int numVertices = flatLine.num_vertices();
- verify(flatLine.num_vertices() > 0);
+static void multiPointR2Bounds(const vector<S2Point>& points, Box* flatBounds) {
+ verify(!points.empty());
- flatBounds->init(toLngLatPoint(flatLine.vertex(0)), toLngLatPoint(flatLine.vertex(0)));
+ flatBounds->init(toLngLatPoint(points.front()), toLngLatPoint(points.front()));
- for (int i = 1; i < numVertices; ++i) {
- flatBounds->expandToInclude(toLngLatPoint(flatLine.vertex(i)));
- }
+ vector<S2Point>::const_iterator it = points.begin();
+ for (++it; it != points.end(); ++it) {
+ const S2Point& s2Point = *it;
+ flatBounds->expandToInclude(toLngLatPoint(s2Point));
}
-
- static void circleR2Bounds(const Circle& circle, Box* flatBounds) {
- flatBounds->init(Point(circle.center.x - circle.radius, circle.center.y - circle.radius),
- Point(circle.center.x + circle.radius, circle.center.y + circle.radius));
+}
+
+static void polygonR2Bounds(const Polygon& polygon, Box* flatBounds) {
+ *flatBounds = polygon.bounds();
+}
+
+static void s2RegionR2Bounds(const S2Region& region, Box* flatBounds) {
+ S2LatLngRect s2Bounds = region.GetRectBound();
+ flatBounds->init(Point(s2Bounds.lng_lo().degrees(), s2Bounds.lat_lo().degrees()),
+ Point(s2Bounds.lng_hi().degrees(), s2Bounds.lat_hi().degrees()));
+}
+
+Box GeometryContainer::R2BoxRegion::buildBounds(const GeometryContainer& geometry) {
+ Box bounds;
+
+ if (geometry._point && FLAT == geometry._point->crs) {
+ bounds.init(geometry._point->oldPoint, geometry._point->oldPoint);
+ } else if (geometry._line && FLAT == geometry._line->crs) {
+ lineR2Bounds(geometry._line->line, &bounds);
+ } else if (geometry._cap && FLAT == geometry._cap->crs) {
+ circleR2Bounds(geometry._cap->circle, &bounds);
+ } else if (geometry._box && FLAT == geometry._box->crs) {
+ bounds = geometry._box->box;
+ } else if (geometry._polygon && FLAT == geometry._polygon->crs) {
+ polygonR2Bounds(geometry._polygon->oldPolygon, &bounds);
+ } else if (geometry._multiPoint && FLAT == geometry._multiPoint->crs) {
+ multiPointR2Bounds(geometry._multiPoint->points, &bounds);
+ } else if (geometry._multiLine && FLAT == geometry._multiLine->crs) {
+ verify(false);
+ } else if (geometry._multiPolygon && FLAT == geometry._multiPolygon->crs) {
+ verify(false);
+ } else if (geometry._geometryCollection) {
+ verify(false);
+ } else if (geometry.hasS2Region()) {
+ // For now, just support spherical cap for $centerSphere and GeoJSON points
+ verify((geometry._cap && FLAT != geometry._cap->crs) ||
+ (geometry._point && FLAT != geometry._point->crs));
+ s2RegionR2Bounds(geometry.getS2Region(), &bounds);
}
- static void multiPointR2Bounds(const vector<S2Point>& points, Box* flatBounds) {
+ return bounds;
+}
- verify(!points.empty());
+const R2Region& GeometryContainer::getR2Region() const {
+ return *_r2Region;
+}
- flatBounds->init(toLngLatPoint(points.front()), toLngLatPoint(points.front()));
+bool GeometryContainer::contains(const GeometryContainer& otherContainer) const {
+ // First let's deal with the FLAT cases
- vector<S2Point>::const_iterator it = points.begin();
- for (++it; it != points.end(); ++it) {
- const S2Point& s2Point = *it;
- flatBounds->expandToInclude(toLngLatPoint(s2Point));
- }
+ if (_point && FLAT == _point->crs) {
+ return false;
}
- static void polygonR2Bounds(const Polygon& polygon, Box* flatBounds) {
- *flatBounds = polygon.bounds();
+ if (NULL != _polygon && (FLAT == _polygon->crs)) {
+ if (NULL == otherContainer._point) {
+ return false;
+ }
+ return _polygon->oldPolygon.contains(otherContainer._point->oldPoint);
}
- static void s2RegionR2Bounds(const S2Region& region, Box* flatBounds) {
- S2LatLngRect s2Bounds = region.GetRectBound();
- flatBounds->init(Point(s2Bounds.lng_lo().degrees(), s2Bounds.lat_lo().degrees()),
- Point(s2Bounds.lng_hi().degrees(), s2Bounds.lat_hi().degrees()));
+ if (NULL != _box) {
+ verify(FLAT == _box->crs);
+ if (NULL == otherContainer._point) {
+ return false;
+ }
+ return _box->box.inside(otherContainer._point->oldPoint);
}
- Box GeometryContainer::R2BoxRegion::buildBounds(const GeometryContainer& geometry) {
-
- Box bounds;
-
- if (geometry._point && FLAT == geometry._point->crs) {
- bounds.init(geometry._point->oldPoint, geometry._point->oldPoint);
- }
- else if (geometry._line && FLAT == geometry._line->crs) {
- lineR2Bounds(geometry._line->line, &bounds);
- }
- else if (geometry._cap && FLAT == geometry._cap->crs) {
- circleR2Bounds(geometry._cap->circle, &bounds);
- }
- else if (geometry._box && FLAT == geometry._box->crs) {
- bounds = geometry._box->box;
- }
- else if (geometry._polygon && FLAT == geometry._polygon->crs) {
- polygonR2Bounds(geometry._polygon->oldPolygon, &bounds);
- }
- else if (geometry._multiPoint && FLAT == geometry._multiPoint->crs) {
- multiPointR2Bounds(geometry._multiPoint->points, &bounds);
- }
- else if (geometry._multiLine && FLAT == geometry._multiLine->crs) {
- verify(false);
- }
- else if (geometry._multiPolygon && FLAT == geometry._multiPolygon->crs) {
- verify(false);
- }
- else if (geometry._geometryCollection) {
- verify(false);
- }
- else if (geometry.hasS2Region()) {
- // For now, just support spherical cap for $centerSphere and GeoJSON points
- verify((geometry._cap && FLAT != geometry._cap->crs) ||
- (geometry._point && FLAT != geometry._point->crs));
- s2RegionR2Bounds(geometry.getS2Region(), &bounds);
+ if (NULL != _cap && (FLAT == _cap->crs)) {
+ if (NULL == otherContainer._point) {
+ return false;
}
-
- return bounds;
+ // Let's be as consistent epsilon-wise as we can with the '2d' indextype.
+ return distanceWithin(
+ _cap->circle.center, otherContainer._point->oldPoint, _cap->circle.radius);
}
- const R2Region& GeometryContainer::getR2Region() const {
- return *_r2Region;
- }
+ // Now we deal with all the SPHERE stuff.
- bool GeometryContainer::contains(const GeometryContainer& otherContainer) const {
+ // Iterate over the other thing and see if we contain it all.
+ if (NULL != otherContainer._point) {
+ return contains(otherContainer._point->cell, otherContainer._point->point);
+ }
- // First let's deal with the FLAT cases
+ if (NULL != otherContainer._line) {
+ return contains(otherContainer._line->line);
+ }
- if (_point && FLAT == _point->crs) {
- return false;
- }
+ if (NULL != otherContainer._polygon) {
+ invariant(NULL != otherContainer._polygon->s2Polygon);
+ return contains(*otherContainer._polygon->s2Polygon);
+ }
- if (NULL != _polygon && (FLAT == _polygon->crs)) {
- if (NULL == otherContainer._point) { return false; }
- return _polygon->oldPolygon.contains(otherContainer._point->oldPoint);
+ if (NULL != otherContainer._multiPoint) {
+ for (size_t i = 0; i < otherContainer._multiPoint->points.size(); ++i) {
+ if (!contains(otherContainer._multiPoint->cells[i],
+ otherContainer._multiPoint->points[i])) {
+ return false;
+ }
}
+ return true;
+ }
- if (NULL != _box) {
- verify(FLAT == _box->crs);
- if (NULL == otherContainer._point) { return false; }
- return _box->box.inside(otherContainer._point->oldPoint);
+ if (NULL != otherContainer._multiLine) {
+ const vector<S2Polyline*>& lines = otherContainer._multiLine->lines.vector();
+ for (size_t i = 0; i < lines.size(); ++i) {
+ if (!contains(*lines[i])) {
+ return false;
+ }
}
+ return true;
+ }
- if (NULL != _cap && (FLAT == _cap->crs)) {
- if (NULL == otherContainer._point) { return false; }
- // Let's be as consistent epsilon-wise as we can with the '2d' indextype.
- return distanceWithin(_cap->circle.center, otherContainer._point->oldPoint,
- _cap->circle.radius);
+ if (NULL != otherContainer._multiPolygon) {
+ const vector<S2Polygon*>& polys = otherContainer._multiPolygon->polygons.vector();
+ for (size_t i = 0; i < polys.size(); ++i) {
+ if (!contains(*polys[i])) {
+ return false;
+ }
}
+ return true;
+ }
- // Now we deal with all the SPHERE stuff.
+ if (NULL != otherContainer._geometryCollection) {
+ GeometryCollection& c = *otherContainer._geometryCollection;
- // Iterate over the other thing and see if we contain it all.
- if (NULL != otherContainer._point) {
- return contains(otherContainer._point->cell, otherContainer._point->point);
+ for (size_t i = 0; i < c.points.size(); ++i) {
+ if (!contains(c.points[i].cell, c.points[i].point)) {
+ return false;
+ }
}
- if (NULL != otherContainer._line) {
- return contains(otherContainer._line->line);
+ const vector<LineWithCRS*>& lines = c.lines.vector();
+ for (size_t i = 0; i < lines.size(); ++i) {
+ if (!contains(lines[i]->line)) {
+ return false;
+ }
}
- if (NULL != otherContainer._polygon) {
- invariant(NULL != otherContainer._polygon->s2Polygon);
- return contains(*otherContainer._polygon->s2Polygon);
+ const vector<PolygonWithCRS*>& polys = c.polygons.vector();
+ for (size_t i = 0; i < polys.size(); ++i) {
+ if (!contains(*polys[i]->s2Polygon)) {
+ return false;
+ }
}
- if (NULL != otherContainer._multiPoint) {
- for (size_t i = 0; i < otherContainer._multiPoint->points.size(); ++i) {
- if (!contains(otherContainer._multiPoint->cells[i],
- otherContainer._multiPoint->points[i])) {
+ const vector<MultiPointWithCRS*>& multipoints = c.multiPoints.vector();
+ for (size_t i = 0; i < multipoints.size(); ++i) {
+ MultiPointWithCRS* mp = multipoints[i];
+ for (size_t j = 0; j < mp->points.size(); ++j) {
+ if (!contains(mp->cells[j], mp->points[j])) {
return false;
}
}
- return true;
}
- if (NULL != otherContainer._multiLine) {
- const vector<S2Polyline*>& lines = otherContainer._multiLine->lines.vector();
- for (size_t i = 0; i < lines.size(); ++i) {
- if (!contains(*lines[i])) { return false; }
- }
- return true;
- }
-
- if (NULL != otherContainer._multiPolygon) {
- const vector<S2Polygon*>& polys = otherContainer._multiPolygon->polygons.vector();
- for (size_t i = 0; i < polys.size(); ++i) {
- if (!contains(*polys[i])) { return false; }
+ const vector<MultiLineWithCRS*>& multilines = c.multiLines.vector();
+ for (size_t i = 0; i < multilines.size(); ++i) {
+ const vector<S2Polyline*>& lines = multilines[i]->lines.vector();
+ for (size_t j = 0; j < lines.size(); ++j) {
+ if (!contains(*lines[j])) {
+ return false;
+ }
}
- return true;
}
- if (NULL != otherContainer._geometryCollection) {
- GeometryCollection& c = *otherContainer._geometryCollection;
-
- for (size_t i = 0; i < c.points.size(); ++i) {
- if (!contains(c.points[i].cell, c.points[i].point)) {
+ const vector<MultiPolygonWithCRS*>& multipolys = c.multiPolygons.vector();
+ for (size_t i = 0; i < multipolys.size(); ++i) {
+ const vector<S2Polygon*>& polys = multipolys[i]->polygons.vector();
+ for (size_t j = 0; j < polys.size(); ++j) {
+ if (!contains(*polys[j])) {
return false;
}
}
+ }
- const vector<LineWithCRS*>& lines = c.lines.vector();
- for (size_t i = 0; i < lines.size(); ++i) {
- if (!contains(lines[i]->line)) { return false; }
- }
+ return true;
+ }
- const vector<PolygonWithCRS*>& polys = c.polygons.vector();
- for (size_t i = 0; i < polys.size(); ++i) {
- if (!contains(*polys[i]->s2Polygon)) { return false; }
- }
+ return false;
+}
- const vector<MultiPointWithCRS*>& multipoints = c.multiPoints.vector();
- for (size_t i = 0; i < multipoints.size(); ++i) {
- MultiPointWithCRS* mp = multipoints[i];
- for (size_t j = 0; j < mp->points.size(); ++j) {
- if (!contains(mp->cells[j], mp->points[j])) { return false; }
- }
- }
-
- const vector<MultiLineWithCRS*>& multilines = c.multiLines.vector();
- for (size_t i = 0; i < multilines.size(); ++i) {
- const vector<S2Polyline*>& lines = multilines[i]->lines.vector();
- for (size_t j = 0; j < lines.size(); ++j) {
- if (!contains(*lines[j])) { return false; }
- }
- }
+bool containsPoint(const S2Polygon& poly, const S2Cell& otherCell, const S2Point& otherPoint) {
+ // This is much faster for actual containment checking.
+ if (poly.Contains(otherPoint)) {
+ return true;
+ }
+ // This is slower but contains edges/vertices.
+ return poly.MayIntersect(otherCell);
+}
- const vector<MultiPolygonWithCRS*>& multipolys = c.multiPolygons.vector();
- for (size_t i = 0; i < multipolys.size(); ++i) {
- const vector<S2Polygon*>& polys = multipolys[i]->polygons.vector();
- for (size_t j = 0; j < polys.size(); ++j) {
- if (!contains(*polys[j])) { return false; }
- }
- }
+bool GeometryContainer::contains(const S2Cell& otherCell, const S2Point& otherPoint) const {
+ if (NULL != _polygon && (NULL != _polygon->s2Polygon)) {
+ return containsPoint(*_polygon->s2Polygon, otherCell, otherPoint);
+ }
+ if (NULL != _polygon && (NULL != _polygon->bigPolygon)) {
+ if (_polygon->bigPolygon->Contains(otherPoint))
return true;
- }
-
- return false;
+ return _polygon->bigPolygon->MayIntersect(otherCell);
}
- bool containsPoint(const S2Polygon& poly, const S2Cell& otherCell, const S2Point& otherPoint) {
- // This is much faster for actual containment checking.
- if (poly.Contains(otherPoint)) { return true; }
- // This is slower but contains edges/vertices.
- return poly.MayIntersect(otherCell);
+ if (NULL != _cap && (_cap->crs == SPHERE)) {
+ return _cap->cap.MayIntersect(otherCell);
}
- bool GeometryContainer::contains(const S2Cell& otherCell, const S2Point& otherPoint) const {
- if (NULL != _polygon && (NULL != _polygon->s2Polygon)) {
- return containsPoint(*_polygon->s2Polygon, otherCell, otherPoint);
- }
-
- if (NULL != _polygon && (NULL != _polygon->bigPolygon)) {
- if (_polygon->bigPolygon->Contains(otherPoint))
+ if (NULL != _multiPolygon) {
+ const vector<S2Polygon*>& polys = _multiPolygon->polygons.vector();
+ for (size_t i = 0; i < polys.size(); ++i) {
+ if (containsPoint(*polys[i], otherCell, otherPoint)) {
return true;
- return _polygon->bigPolygon->MayIntersect(otherCell);
- }
-
- if (NULL != _cap && (_cap->crs == SPHERE)) {
- return _cap->cap.MayIntersect(otherCell);
- }
-
- if (NULL != _multiPolygon) {
- const vector<S2Polygon*>& polys = _multiPolygon->polygons.vector();
- for (size_t i = 0; i < polys.size(); ++i) {
- if (containsPoint(*polys[i], otherCell, otherPoint)) { return true; }
}
}
+ }
- if (NULL != _geometryCollection) {
- const vector<PolygonWithCRS*>& polys = _geometryCollection->polygons.vector();
- for (size_t i = 0; i < polys.size(); ++i) {
- if (containsPoint(*polys[i]->s2Polygon, otherCell, otherPoint)) { return true; }
+ if (NULL != _geometryCollection) {
+ const vector<PolygonWithCRS*>& polys = _geometryCollection->polygons.vector();
+ for (size_t i = 0; i < polys.size(); ++i) {
+ if (containsPoint(*polys[i]->s2Polygon, otherCell, otherPoint)) {
+ return true;
}
+ }
- const vector<MultiPolygonWithCRS*>& multipolys =_geometryCollection->multiPolygons.vector();
- for (size_t i = 0; i < multipolys.size(); ++i) {
- const vector<S2Polygon*>& innerpolys = multipolys[i]->polygons.vector();
- for (size_t j = 0; j < innerpolys.size(); ++j) {
- if (containsPoint(*innerpolys[j], otherCell, otherPoint)) { return true; }
+ const vector<MultiPolygonWithCRS*>& multipolys =
+ _geometryCollection->multiPolygons.vector();
+ for (size_t i = 0; i < multipolys.size(); ++i) {
+ const vector<S2Polygon*>& innerpolys = multipolys[i]->polygons.vector();
+ for (size_t j = 0; j < innerpolys.size(); ++j) {
+ if (containsPoint(*innerpolys[j], otherCell, otherPoint)) {
+ return true;
}
}
}
+ }
+
+ return false;
+}
+
+bool containsLine(const S2Polygon& poly, const S2Polyline& otherLine) {
+ // Kind of a mess. We get a function for clipping the line to the
+ // polygon. We do this and make sure the line is the same as the
+ // line we're clipping against.
+ OwnedPointerVector<S2Polyline> clippedOwned;
+ vector<S2Polyline*>& clipped = clippedOwned.mutableVector();
+ poly.IntersectWithPolyline(&otherLine, &clipped);
+ if (1 != clipped.size()) {
return false;
}
- bool containsLine(const S2Polygon& poly, const S2Polyline& otherLine) {
- // Kind of a mess. We get a function for clipping the line to the
- // polygon. We do this and make sure the line is the same as the
- // line we're clipping against.
- OwnedPointerVector<S2Polyline> clippedOwned;
- vector<S2Polyline*>& clipped = clippedOwned.mutableVector();
+ // If the line is entirely contained within the polygon, we should be
+ // getting it back verbatim, so really there should be no error.
+ bool ret = clipped[0]->NearlyCoversPolyline(otherLine, S1Angle::Degrees(1e-10));
- poly.IntersectWithPolyline(&otherLine, &clipped);
- if (1 != clipped.size()) { return false; }
+ return ret;
+}
- // If the line is entirely contained within the polygon, we should be
- // getting it back verbatim, so really there should be no error.
- bool ret = clipped[0]->NearlyCoversPolyline(otherLine,
- S1Angle::Degrees(1e-10));
-
- return ret;
+bool GeometryContainer::contains(const S2Polyline& otherLine) const {
+ if (NULL != _polygon && NULL != _polygon->s2Polygon) {
+ return containsLine(*_polygon->s2Polygon, otherLine);
}
- bool GeometryContainer::contains(const S2Polyline& otherLine) const {
- if (NULL != _polygon && NULL != _polygon->s2Polygon) {
- return containsLine(*_polygon->s2Polygon, otherLine);
- }
-
- if (NULL != _polygon && NULL != _polygon->bigPolygon) {
- return _polygon->bigPolygon->Contains(otherLine);
- }
+ if (NULL != _polygon && NULL != _polygon->bigPolygon) {
+ return _polygon->bigPolygon->Contains(otherLine);
+ }
- if (NULL != _multiPolygon) {
- const vector<S2Polygon*>& polys = _multiPolygon->polygons.vector();
- for (size_t i = 0; i < polys.size(); ++i) {
- if (containsLine(*polys[i], otherLine)) { return true; }
+ if (NULL != _multiPolygon) {
+ const vector<S2Polygon*>& polys = _multiPolygon->polygons.vector();
+ for (size_t i = 0; i < polys.size(); ++i) {
+ if (containsLine(*polys[i], otherLine)) {
+ return true;
}
}
+ }
- if (NULL != _geometryCollection) {
- const vector<PolygonWithCRS*>& polys = _geometryCollection->polygons.vector();
- for (size_t i = 0; i < polys.size(); ++i) {
- if (containsLine(*polys[i]->s2Polygon, otherLine)) { return true; }
+ if (NULL != _geometryCollection) {
+ const vector<PolygonWithCRS*>& polys = _geometryCollection->polygons.vector();
+ for (size_t i = 0; i < polys.size(); ++i) {
+ if (containsLine(*polys[i]->s2Polygon, otherLine)) {
+ return true;
}
+ }
- const vector<MultiPolygonWithCRS*>& multipolys =_geometryCollection->multiPolygons.vector();
- for (size_t i = 0; i < multipolys.size(); ++i) {
- const vector<S2Polygon*>& innerpolys = multipolys[i]->polygons.vector();
- for (size_t j = 0; j < innerpolys.size(); ++j) {
- if (containsLine(*innerpolys[j], otherLine)) { return true; }
+ const vector<MultiPolygonWithCRS*>& multipolys =
+ _geometryCollection->multiPolygons.vector();
+ for (size_t i = 0; i < multipolys.size(); ++i) {
+ const vector<S2Polygon*>& innerpolys = multipolys[i]->polygons.vector();
+ for (size_t j = 0; j < innerpolys.size(); ++j) {
+ if (containsLine(*innerpolys[j], otherLine)) {
+ return true;
}
}
}
-
- return false;
}
- bool containsPolygon(const S2Polygon& poly, const S2Polygon& otherPoly) {
- return poly.Contains(&otherPoly);
- }
+ return false;
+}
- bool GeometryContainer::contains(const S2Polygon& otherPolygon) const {
- if (NULL != _polygon && NULL != _polygon->s2Polygon) {
- return containsPolygon(*_polygon->s2Polygon, otherPolygon);
- }
+bool containsPolygon(const S2Polygon& poly, const S2Polygon& otherPoly) {
+ return poly.Contains(&otherPoly);
+}
- if (NULL != _polygon && NULL != _polygon->bigPolygon) {
- return _polygon->bigPolygon->Contains(otherPolygon);
- }
+bool GeometryContainer::contains(const S2Polygon& otherPolygon) const {
+ if (NULL != _polygon && NULL != _polygon->s2Polygon) {
+ return containsPolygon(*_polygon->s2Polygon, otherPolygon);
+ }
- if (NULL != _multiPolygon) {
- const vector<S2Polygon*>& polys = _multiPolygon->polygons.vector();
- for (size_t i = 0; i < polys.size(); ++i) {
- if (containsPolygon(*polys[i], otherPolygon)) { return true; }
+ if (NULL != _polygon && NULL != _polygon->bigPolygon) {
+ return _polygon->bigPolygon->Contains(otherPolygon);
+ }
+
+ if (NULL != _multiPolygon) {
+ const vector<S2Polygon*>& polys = _multiPolygon->polygons.vector();
+ for (size_t i = 0; i < polys.size(); ++i) {
+ if (containsPolygon(*polys[i], otherPolygon)) {
+ return true;
}
}
+ }
- if (NULL != _geometryCollection) {
- const vector<PolygonWithCRS*>& polys = _geometryCollection->polygons.vector();
- for (size_t i = 0; i < polys.size(); ++i) {
- if (containsPolygon(*polys[i]->s2Polygon, otherPolygon)) { return true; }
+ if (NULL != _geometryCollection) {
+ const vector<PolygonWithCRS*>& polys = _geometryCollection->polygons.vector();
+ for (size_t i = 0; i < polys.size(); ++i) {
+ if (containsPolygon(*polys[i]->s2Polygon, otherPolygon)) {
+ return true;
}
+ }
- const vector<MultiPolygonWithCRS*>& multipolys =_geometryCollection->multiPolygons.vector();
- for (size_t i = 0; i < multipolys.size(); ++i) {
- const vector<S2Polygon*>& innerpolys = multipolys[i]->polygons.vector();
- for (size_t j = 0; j < innerpolys.size(); ++j) {
- if (containsPolygon(*innerpolys[j], otherPolygon)) { return true; }
+ const vector<MultiPolygonWithCRS*>& multipolys =
+ _geometryCollection->multiPolygons.vector();
+ for (size_t i = 0; i < multipolys.size(); ++i) {
+ const vector<S2Polygon*>& innerpolys = multipolys[i]->polygons.vector();
+ for (size_t j = 0; j < innerpolys.size(); ++j) {
+ if (containsPolygon(*innerpolys[j], otherPolygon)) {
+ return true;
}
}
}
-
- return false;
}
- bool GeometryContainer::intersects(const GeometryContainer& otherContainer) const {
- if (NULL != otherContainer._point) {
- return intersects(otherContainer._point->cell);
- } else if (NULL != otherContainer._line) {
- return intersects(otherContainer._line->line);
- } else if (NULL != otherContainer._polygon) {
- if (NULL == otherContainer._polygon->s2Polygon) { return false; }
- return intersects(*otherContainer._polygon->s2Polygon);
- } else if (NULL != otherContainer._multiPoint) {
- return intersects(*otherContainer._multiPoint);
- } else if (NULL != otherContainer._multiLine) {
- return intersects(*otherContainer._multiLine);
- } else if (NULL != otherContainer._multiPolygon) {
- return intersects(*otherContainer._multiPolygon);
- } else if (NULL != otherContainer._geometryCollection) {
- const GeometryCollection& c = *otherContainer._geometryCollection;
-
- for (size_t i = 0; i < c.points.size(); ++i) {
- if (intersects(c.points[i].cell)) { return true; }
- }
+ return false;
+}
- for (size_t i = 0; i < c.polygons.vector().size(); ++i) {
- if (intersects(*c.polygons.vector()[i]->s2Polygon)) { return true; }
+bool GeometryContainer::intersects(const GeometryContainer& otherContainer) const {
+ if (NULL != otherContainer._point) {
+ return intersects(otherContainer._point->cell);
+ } else if (NULL != otherContainer._line) {
+ return intersects(otherContainer._line->line);
+ } else if (NULL != otherContainer._polygon) {
+ if (NULL == otherContainer._polygon->s2Polygon) {
+ return false;
+ }
+ return intersects(*otherContainer._polygon->s2Polygon);
+ } else if (NULL != otherContainer._multiPoint) {
+ return intersects(*otherContainer._multiPoint);
+ } else if (NULL != otherContainer._multiLine) {
+ return intersects(*otherContainer._multiLine);
+ } else if (NULL != otherContainer._multiPolygon) {
+ return intersects(*otherContainer._multiPolygon);
+ } else if (NULL != otherContainer._geometryCollection) {
+ const GeometryCollection& c = *otherContainer._geometryCollection;
+
+ for (size_t i = 0; i < c.points.size(); ++i) {
+ if (intersects(c.points[i].cell)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.lines.vector().size(); ++i) {
- if (intersects(c.lines.vector()[i]->line)) { return true; }
+ for (size_t i = 0; i < c.polygons.vector().size(); ++i) {
+ if (intersects(*c.polygons.vector()[i]->s2Polygon)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.multiPolygons.vector().size(); ++i) {
- if (intersects(*c.multiPolygons.vector()[i])) { return true; }
+ for (size_t i = 0; i < c.lines.vector().size(); ++i) {
+ if (intersects(c.lines.vector()[i]->line)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.multiLines.vector().size(); ++i) {
- if (intersects(*c.multiLines.vector()[i])) { return true; }
+ for (size_t i = 0; i < c.multiPolygons.vector().size(); ++i) {
+ if (intersects(*c.multiPolygons.vector()[i])) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.multiPoints.vector().size(); ++i) {
- if (intersects(*c.multiPoints.vector()[i])) { return true; }
+ for (size_t i = 0; i < c.multiLines.vector().size(); ++i) {
+ if (intersects(*c.multiLines.vector()[i])) {
+ return true;
}
}
- return false;
+ for (size_t i = 0; i < c.multiPoints.vector().size(); ++i) {
+ if (intersects(*c.multiPoints.vector()[i])) {
+ return true;
+ }
+ }
}
- bool GeometryContainer::intersects(const MultiPointWithCRS& otherMultiPoint) const {
- for (size_t i = 0; i < otherMultiPoint.cells.size(); ++i) {
- if (intersects(otherMultiPoint.cells[i])) { return true; }
+ return false;
+}
+
+bool GeometryContainer::intersects(const MultiPointWithCRS& otherMultiPoint) const {
+ for (size_t i = 0; i < otherMultiPoint.cells.size(); ++i) {
+ if (intersects(otherMultiPoint.cells[i])) {
+ return true;
}
- return false;
}
+ return false;
+}
- bool GeometryContainer::intersects(const MultiLineWithCRS& otherMultiLine) const {
- for (size_t i = 0; i < otherMultiLine.lines.vector().size(); ++i) {
- if (intersects(*otherMultiLine.lines.vector()[i])) { return true; }
+bool GeometryContainer::intersects(const MultiLineWithCRS& otherMultiLine) const {
+ for (size_t i = 0; i < otherMultiLine.lines.vector().size(); ++i) {
+ if (intersects(*otherMultiLine.lines.vector()[i])) {
+ return true;
}
- return false;
}
+ return false;
+}
- bool GeometryContainer::intersects(const MultiPolygonWithCRS& otherMultiPolygon) const {
- for (size_t i = 0; i < otherMultiPolygon.polygons.vector().size(); ++i) {
- if (intersects(*otherMultiPolygon.polygons.vector()[i])) { return true; }
+bool GeometryContainer::intersects(const MultiPolygonWithCRS& otherMultiPolygon) const {
+ for (size_t i = 0; i < otherMultiPolygon.polygons.vector().size(); ++i) {
+ if (intersects(*otherMultiPolygon.polygons.vector()[i])) {
+ return true;
}
- return false;
}
-
- // Does this (GeometryContainer) intersect the provided data?
- bool GeometryContainer::intersects(const S2Cell &otherPoint) const {
- if (NULL != _point) {
- return _point->cell.MayIntersect(otherPoint);
- } else if (NULL != _line) {
- return _line->line.MayIntersect(otherPoint);
- } else if (NULL != _polygon && NULL != _polygon->s2Polygon) {
- return _polygon->s2Polygon->MayIntersect(otherPoint);
- } else if (NULL != _polygon && NULL != _polygon->bigPolygon) {
- return _polygon->bigPolygon->MayIntersect(otherPoint);
- } else if (NULL != _multiPoint) {
- const vector<S2Cell>& cells = _multiPoint->cells;
- for (size_t i = 0; i < cells.size(); ++i) {
- if (cells[i].MayIntersect(otherPoint)) { return true; }
+ return false;
+}
+
+// Does this (GeometryContainer) intersect the provided data?
+bool GeometryContainer::intersects(const S2Cell& otherPoint) const {
+ if (NULL != _point) {
+ return _point->cell.MayIntersect(otherPoint);
+ } else if (NULL != _line) {
+ return _line->line.MayIntersect(otherPoint);
+ } else if (NULL != _polygon && NULL != _polygon->s2Polygon) {
+ return _polygon->s2Polygon->MayIntersect(otherPoint);
+ } else if (NULL != _polygon && NULL != _polygon->bigPolygon) {
+ return _polygon->bigPolygon->MayIntersect(otherPoint);
+ } else if (NULL != _multiPoint) {
+ const vector<S2Cell>& cells = _multiPoint->cells;
+ for (size_t i = 0; i < cells.size(); ++i) {
+ if (cells[i].MayIntersect(otherPoint)) {
+ return true;
}
- } else if (NULL != _multiLine) {
- const vector<S2Polyline*>& lines = _multiLine->lines.vector();
- for (size_t i = 0; i < lines.size(); ++i) {
- if (lines[i]->MayIntersect(otherPoint)) { return true; }
+ }
+ } else if (NULL != _multiLine) {
+ const vector<S2Polyline*>& lines = _multiLine->lines.vector();
+ for (size_t i = 0; i < lines.size(); ++i) {
+ if (lines[i]->MayIntersect(otherPoint)) {
+ return true;
}
- } else if (NULL != _multiPolygon) {
- const vector<S2Polygon*>& polys = _multiPolygon->polygons.vector();
- for (size_t i = 0; i < polys.size(); ++i) {
- if (polys[i]->MayIntersect(otherPoint)) { return true; }
+ }
+ } else if (NULL != _multiPolygon) {
+ const vector<S2Polygon*>& polys = _multiPolygon->polygons.vector();
+ for (size_t i = 0; i < polys.size(); ++i) {
+ if (polys[i]->MayIntersect(otherPoint)) {
+ return true;
}
- } else if (NULL != _geometryCollection) {
- const GeometryCollection& c = *_geometryCollection;
+ }
+ } else if (NULL != _geometryCollection) {
+ const GeometryCollection& c = *_geometryCollection;
- for (size_t i = 0; i < c.points.size(); ++i) {
- if (c.points[i].cell.MayIntersect(otherPoint)) { return true; }
+ for (size_t i = 0; i < c.points.size(); ++i) {
+ if (c.points[i].cell.MayIntersect(otherPoint)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.polygons.vector().size(); ++i) {
- if (c.polygons.vector()[i]->s2Polygon->MayIntersect(otherPoint)) { return true; }
+ for (size_t i = 0; i < c.polygons.vector().size(); ++i) {
+ if (c.polygons.vector()[i]->s2Polygon->MayIntersect(otherPoint)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.lines.vector().size(); ++i) {
- if (c.lines.vector()[i]->line.MayIntersect(otherPoint)) { return true; }
+ for (size_t i = 0; i < c.lines.vector().size(); ++i) {
+ if (c.lines.vector()[i]->line.MayIntersect(otherPoint)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.multiPolygons.vector().size(); ++i) {
- const vector<S2Polygon*>& innerPolys =
- c.multiPolygons.vector()[i]->polygons.vector();
- for (size_t j = 0; j < innerPolys.size(); ++j) {
- if (innerPolys[j]->MayIntersect(otherPoint)) { return true; }
+ for (size_t i = 0; i < c.multiPolygons.vector().size(); ++i) {
+ const vector<S2Polygon*>& innerPolys = c.multiPolygons.vector()[i]->polygons.vector();
+ for (size_t j = 0; j < innerPolys.size(); ++j) {
+ if (innerPolys[j]->MayIntersect(otherPoint)) {
+ return true;
}
}
+ }
- for (size_t i = 0; i < c.multiLines.vector().size(); ++i) {
- const vector<S2Polyline*>& innerLines =
- c.multiLines.vector()[i]->lines.vector();
- for (size_t j = 0; j < innerLines.size(); ++j) {
- if (innerLines[j]->MayIntersect(otherPoint)) { return true; }
+ for (size_t i = 0; i < c.multiLines.vector().size(); ++i) {
+ const vector<S2Polyline*>& innerLines = c.multiLines.vector()[i]->lines.vector();
+ for (size_t j = 0; j < innerLines.size(); ++j) {
+ if (innerLines[j]->MayIntersect(otherPoint)) {
+ return true;
}
}
+ }
- for (size_t i = 0; i < c.multiPoints.vector().size(); ++i) {
- const vector<S2Cell>& innerCells = c.multiPoints.vector()[i]->cells;
- for (size_t j = 0; j < innerCells.size(); ++j) {
- if (innerCells[j].MayIntersect(otherPoint)) { return true; }
+ for (size_t i = 0; i < c.multiPoints.vector().size(); ++i) {
+ const vector<S2Cell>& innerCells = c.multiPoints.vector()[i]->cells;
+ for (size_t j = 0; j < innerCells.size(); ++j) {
+ if (innerCells[j].MayIntersect(otherPoint)) {
+ return true;
}
}
}
-
- return false;
- }
-
- bool polygonLineIntersection(const S2Polyline& line, const S2Polygon& poly) {
- // TODO(hk): modify s2 library to just let us know if it intersected
- // rather than returning all this.
- vector<S2Polyline*> clipped;
- poly.IntersectWithPolyline(&line, &clipped);
- bool ret = clipped.size() > 0;
- for (size_t i = 0; i < clipped.size(); ++i) delete clipped[i];
- return ret;
}
- bool GeometryContainer::intersects(const S2Polyline& otherLine) const {
- if (NULL != _point) {
- return otherLine.MayIntersect(_point->cell);
- } else if (NULL != _line) {
- return otherLine.Intersects(&_line->line);
- } else if (NULL != _polygon && NULL != _polygon->s2Polygon) {
- return polygonLineIntersection(otherLine, *_polygon->s2Polygon);
- } else if (NULL != _polygon && NULL != _polygon->bigPolygon) {
- return _polygon->bigPolygon->Intersects(otherLine);
- } else if (NULL != _multiPoint) {
- for (size_t i = 0; i < _multiPoint->cells.size(); ++i) {
- if (otherLine.MayIntersect(_multiPoint->cells[i])) { return true; }
+ return false;
+}
+
+bool polygonLineIntersection(const S2Polyline& line, const S2Polygon& poly) {
+ // TODO(hk): modify s2 library to just let us know if it intersected
+ // rather than returning all this.
+ vector<S2Polyline*> clipped;
+ poly.IntersectWithPolyline(&line, &clipped);
+ bool ret = clipped.size() > 0;
+ for (size_t i = 0; i < clipped.size(); ++i)
+ delete clipped[i];
+ return ret;
+}
+
+bool GeometryContainer::intersects(const S2Polyline& otherLine) const {
+ if (NULL != _point) {
+ return otherLine.MayIntersect(_point->cell);
+ } else if (NULL != _line) {
+ return otherLine.Intersects(&_line->line);
+ } else if (NULL != _polygon && NULL != _polygon->s2Polygon) {
+ return polygonLineIntersection(otherLine, *_polygon->s2Polygon);
+ } else if (NULL != _polygon && NULL != _polygon->bigPolygon) {
+ return _polygon->bigPolygon->Intersects(otherLine);
+ } else if (NULL != _multiPoint) {
+ for (size_t i = 0; i < _multiPoint->cells.size(); ++i) {
+ if (otherLine.MayIntersect(_multiPoint->cells[i])) {
+ return true;
}
- } else if (NULL != _multiLine) {
- for (size_t i = 0; i < _multiLine->lines.vector().size(); ++i) {
- if (otherLine.Intersects(_multiLine->lines.vector()[i])) {
- return true;
- }
+ }
+ } else if (NULL != _multiLine) {
+ for (size_t i = 0; i < _multiLine->lines.vector().size(); ++i) {
+ if (otherLine.Intersects(_multiLine->lines.vector()[i])) {
+ return true;
}
- } else if (NULL != _multiPolygon) {
- for (size_t i = 0; i < _multiPolygon->polygons.vector().size(); ++i) {
- if (polygonLineIntersection(otherLine, *_multiPolygon->polygons.vector()[i])) {
- return true;
- }
+ }
+ } else if (NULL != _multiPolygon) {
+ for (size_t i = 0; i < _multiPolygon->polygons.vector().size(); ++i) {
+ if (polygonLineIntersection(otherLine, *_multiPolygon->polygons.vector()[i])) {
+ return true;
}
- } else if (NULL != _geometryCollection) {
- const GeometryCollection& c = *_geometryCollection;
+ }
+ } else if (NULL != _geometryCollection) {
+ const GeometryCollection& c = *_geometryCollection;
- for (size_t i = 0; i < c.points.size(); ++i) {
- if (otherLine.MayIntersect(c.points[i].cell)) { return true; }
+ for (size_t i = 0; i < c.points.size(); ++i) {
+ if (otherLine.MayIntersect(c.points[i].cell)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.polygons.vector().size(); ++i) {
- if (polygonLineIntersection(otherLine, *c.polygons.vector()[i]->s2Polygon)) {
- return true;
- }
+ for (size_t i = 0; i < c.polygons.vector().size(); ++i) {
+ if (polygonLineIntersection(otherLine, *c.polygons.vector()[i]->s2Polygon)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.lines.vector().size(); ++i) {
- if (c.lines.vector()[i]->line.Intersects(&otherLine)) { return true; }
+ for (size_t i = 0; i < c.lines.vector().size(); ++i) {
+ if (c.lines.vector()[i]->line.Intersects(&otherLine)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.multiPolygons.vector().size(); ++i) {
- const vector<S2Polygon*>& innerPolys =
- c.multiPolygons.vector()[i]->polygons.vector();
- for (size_t j = 0; j < innerPolys.size(); ++j) {
- if (polygonLineIntersection(otherLine, *innerPolys[j])) {
- return true;
- }
+ for (size_t i = 0; i < c.multiPolygons.vector().size(); ++i) {
+ const vector<S2Polygon*>& innerPolys = c.multiPolygons.vector()[i]->polygons.vector();
+ for (size_t j = 0; j < innerPolys.size(); ++j) {
+ if (polygonLineIntersection(otherLine, *innerPolys[j])) {
+ return true;
}
}
+ }
- for (size_t i = 0; i < c.multiLines.vector().size(); ++i) {
- const vector<S2Polyline*>& innerLines =
- c.multiLines.vector()[i]->lines.vector();
- for (size_t j = 0; j < innerLines.size(); ++j) {
- if (innerLines[j]->Intersects(&otherLine)) { return true; }
+ for (size_t i = 0; i < c.multiLines.vector().size(); ++i) {
+ const vector<S2Polyline*>& innerLines = c.multiLines.vector()[i]->lines.vector();
+ for (size_t j = 0; j < innerLines.size(); ++j) {
+ if (innerLines[j]->Intersects(&otherLine)) {
+ return true;
}
}
+ }
- for (size_t i = 0; i < c.multiPoints.vector().size(); ++i) {
- const vector<S2Cell>& innerCells = c.multiPoints.vector()[i]->cells;
- for (size_t j = 0; j < innerCells.size(); ++j) {
- if (otherLine.MayIntersect(innerCells[j])) { return true; }
+ for (size_t i = 0; i < c.multiPoints.vector().size(); ++i) {
+ const vector<S2Cell>& innerCells = c.multiPoints.vector()[i]->cells;
+ for (size_t j = 0; j < innerCells.size(); ++j) {
+ if (otherLine.MayIntersect(innerCells[j])) {
+ return true;
}
}
}
-
- return false;
}
- // Does 'this' intersect with the provided polygon?
- bool GeometryContainer::intersects(const S2Polygon& otherPolygon) const {
- if (NULL != _point) {
- return otherPolygon.MayIntersect(_point->cell);
- } else if (NULL != _line) {
- return polygonLineIntersection(_line->line, otherPolygon);
- } else if (NULL != _polygon && NULL != _polygon->s2Polygon) {
- return otherPolygon.Intersects(_polygon->s2Polygon.get());
- } else if (NULL != _polygon && NULL != _polygon->bigPolygon) {
- return _polygon->bigPolygon->Intersects(otherPolygon);
- } else if (NULL != _multiPoint) {
- for (size_t i = 0; i < _multiPoint->cells.size(); ++i) {
- if (otherPolygon.MayIntersect(_multiPoint->cells[i])) { return true; }
+ return false;
+}
+
+// Does 'this' intersect with the provided polygon?
+bool GeometryContainer::intersects(const S2Polygon& otherPolygon) const {
+ if (NULL != _point) {
+ return otherPolygon.MayIntersect(_point->cell);
+ } else if (NULL != _line) {
+ return polygonLineIntersection(_line->line, otherPolygon);
+ } else if (NULL != _polygon && NULL != _polygon->s2Polygon) {
+ return otherPolygon.Intersects(_polygon->s2Polygon.get());
+ } else if (NULL != _polygon && NULL != _polygon->bigPolygon) {
+ return _polygon->bigPolygon->Intersects(otherPolygon);
+ } else if (NULL != _multiPoint) {
+ for (size_t i = 0; i < _multiPoint->cells.size(); ++i) {
+ if (otherPolygon.MayIntersect(_multiPoint->cells[i])) {
+ return true;
}
- } else if (NULL != _multiLine) {
- for (size_t i = 0; i < _multiLine->lines.vector().size(); ++i) {
- if (polygonLineIntersection(*_multiLine->lines.vector()[i], otherPolygon)) {
- return true;
- }
+ }
+ } else if (NULL != _multiLine) {
+ for (size_t i = 0; i < _multiLine->lines.vector().size(); ++i) {
+ if (polygonLineIntersection(*_multiLine->lines.vector()[i], otherPolygon)) {
+ return true;
}
- } else if (NULL != _multiPolygon) {
- for (size_t i = 0; i < _multiPolygon->polygons.vector().size(); ++i) {
- if (otherPolygon.Intersects(_multiPolygon->polygons.vector()[i])) {
- return true;
- }
+ }
+ } else if (NULL != _multiPolygon) {
+ for (size_t i = 0; i < _multiPolygon->polygons.vector().size(); ++i) {
+ if (otherPolygon.Intersects(_multiPolygon->polygons.vector()[i])) {
+ return true;
}
- } else if (NULL != _geometryCollection) {
- const GeometryCollection& c = *_geometryCollection;
+ }
+ } else if (NULL != _geometryCollection) {
+ const GeometryCollection& c = *_geometryCollection;
- for (size_t i = 0; i < c.points.size(); ++i) {
- if (otherPolygon.MayIntersect(c.points[i].cell)) { return true; }
+ for (size_t i = 0; i < c.points.size(); ++i) {
+ if (otherPolygon.MayIntersect(c.points[i].cell)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.polygons.vector().size(); ++i) {
- if (otherPolygon.Intersects(c.polygons.vector()[i]->s2Polygon.get())) {
- return true;
- }
+ for (size_t i = 0; i < c.polygons.vector().size(); ++i) {
+ if (otherPolygon.Intersects(c.polygons.vector()[i]->s2Polygon.get())) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.lines.vector().size(); ++i) {
- if (polygonLineIntersection(c.lines.vector()[i]->line, otherPolygon)) {
- return true;
- }
+ for (size_t i = 0; i < c.lines.vector().size(); ++i) {
+ if (polygonLineIntersection(c.lines.vector()[i]->line, otherPolygon)) {
+ return true;
}
+ }
- for (size_t i = 0; i < c.multiPolygons.vector().size(); ++i) {
- const vector<S2Polygon*>& innerPolys =
- c.multiPolygons.vector()[i]->polygons.vector();
- for (size_t j = 0; j < innerPolys.size(); ++j) {
- if (otherPolygon.Intersects(innerPolys[j])) {
- return true;
- }
+ for (size_t i = 0; i < c.multiPolygons.vector().size(); ++i) {
+ const vector<S2Polygon*>& innerPolys = c.multiPolygons.vector()[i]->polygons.vector();
+ for (size_t j = 0; j < innerPolys.size(); ++j) {
+ if (otherPolygon.Intersects(innerPolys[j])) {
+ return true;
}
}
+ }
- for (size_t i = 0; i < c.multiLines.vector().size(); ++i) {
- const vector<S2Polyline*>& innerLines =
- c.multiLines.vector()[i]->lines.vector();
- for (size_t j = 0; j < innerLines.size(); ++j) {
- if (polygonLineIntersection(*innerLines[j], otherPolygon)) {
- return true;
- }
+ for (size_t i = 0; i < c.multiLines.vector().size(); ++i) {
+ const vector<S2Polyline*>& innerLines = c.multiLines.vector()[i]->lines.vector();
+ for (size_t j = 0; j < innerLines.size(); ++j) {
+ if (polygonLineIntersection(*innerLines[j], otherPolygon)) {
+ return true;
}
}
+ }
- for (size_t i = 0; i < c.multiPoints.vector().size(); ++i) {
- const vector<S2Cell>& innerCells = c.multiPoints.vector()[i]->cells;
- for (size_t j = 0; j < innerCells.size(); ++j) {
- if (otherPolygon.MayIntersect(innerCells[j])) {
- return true;
- }
+ for (size_t i = 0; i < c.multiPoints.vector().size(); ++i) {
+ const vector<S2Cell>& innerCells = c.multiPoints.vector()[i]->cells;
+ for (size_t j = 0; j < innerCells.size(); ++j) {
+ if (otherPolygon.MayIntersect(innerCells[j])) {
+ return true;
}
}
}
-
- return false;
}
- Status GeometryContainer::parseFromGeoJSON(const BSONObj& obj) {
- GeoParser::GeoJSONType type = GeoParser::parseGeoJSONType(obj);
-
- if (GeoParser::GEOJSON_UNKNOWN == type) {
- return Status(ErrorCodes::BadValue, str::stream() << "unknown GeoJSON type: " << obj);
- }
-
- Status status = Status::OK();
- vector<S2Region*> regions;
-
- if (GeoParser::GEOJSON_POINT == type) {
- _point.reset(new PointWithCRS());
- status = GeoParser::parseGeoJSONPoint(obj, _point.get());
- } else if (GeoParser::GEOJSON_LINESTRING == type) {
- _line.reset(new LineWithCRS());
- status = GeoParser::parseGeoJSONLine(obj, _line.get());
- } else if (GeoParser::GEOJSON_POLYGON == type) {
- _polygon.reset(new PolygonWithCRS());
- status = GeoParser::parseGeoJSONPolygon(obj, _polygon.get());
- } else if (GeoParser::GEOJSON_MULTI_POINT == type) {
- _multiPoint.reset(new MultiPointWithCRS());
- status = GeoParser::parseMultiPoint(obj, _multiPoint.get());
- for (size_t i = 0; i < _multiPoint->cells.size(); ++i) {
- regions.push_back(&_multiPoint->cells[i]);
- }
- } else if (GeoParser::GEOJSON_MULTI_LINESTRING == type) {
- _multiLine.reset(new MultiLineWithCRS());
- status = GeoParser::parseMultiLine(obj, _multiLine.get());
- for (size_t i = 0; i < _multiLine->lines.size(); ++i) {
- regions.push_back(_multiLine->lines[i]);
- }
- } else if (GeoParser::GEOJSON_MULTI_POLYGON == type) {
- _multiPolygon.reset(new MultiPolygonWithCRS());
- status = GeoParser::parseMultiPolygon(obj, _multiPolygon.get());
- for (size_t i = 0; i < _multiPolygon->polygons.size(); ++i) {
- regions.push_back(_multiPolygon->polygons[i]);
- }
- } else if (GeoParser::GEOJSON_GEOMETRY_COLLECTION == type) {
- _geometryCollection.reset(new GeometryCollection());
- status = GeoParser::parseGeometryCollection(obj, _geometryCollection.get());
-
- // Add regions
- for (size_t i = 0; i < _geometryCollection->points.size(); ++i) {
- regions.push_back(&_geometryCollection->points[i].cell);
- }
- for (size_t i = 0; i < _geometryCollection->lines.size(); ++i) {
- regions.push_back(&_geometryCollection->lines[i]->line);
- }
- for (size_t i = 0; i < _geometryCollection->polygons.size(); ++i) {
- regions.push_back(_geometryCollection->polygons[i]->s2Polygon.get());
- }
- for (size_t i = 0; i < _geometryCollection->multiPoints.size(); ++i) {
- MultiPointWithCRS* multiPoint = _geometryCollection->multiPoints[i];
- for (size_t j = 0; j < multiPoint->cells.size(); ++j) {
- regions.push_back(&multiPoint->cells[j]);
- }
- }
- for (size_t i = 0; i < _geometryCollection->multiLines.size(); ++i) {
- const MultiLineWithCRS* multiLine = _geometryCollection->multiLines[i];
- for (size_t j = 0; j < multiLine->lines.size(); ++j) {
- regions.push_back(multiLine->lines[j]);
- }
- }
- for (size_t i = 0; i < _geometryCollection->multiPolygons.size(); ++i) {
- const MultiPolygonWithCRS* multiPolygon = _geometryCollection->multiPolygons[i];
- for (size_t j = 0; j < multiPolygon->polygons.size(); ++j) {
- regions.push_back(multiPolygon->polygons[j]);
- }
- }
- } else {
- // Should not reach here.
- invariant(false);
- }
+ return false;
+}
- // Check parsing result.
- if (!status.isOK()) return status;
+Status GeometryContainer::parseFromGeoJSON(const BSONObj& obj) {
+ GeoParser::GeoJSONType type = GeoParser::parseGeoJSONType(obj);
- if (regions.size() > 0) {
- // S2RegionUnion doesn't take ownership of pointers.
- _s2Region.reset(new S2RegionUnion(&regions));
- }
+ if (GeoParser::GEOJSON_UNKNOWN == type) {
+ return Status(ErrorCodes::BadValue, str::stream() << "unknown GeoJSON type: " << obj);
+ }
- return Status::OK();
+ Status status = Status::OK();
+ vector<S2Region*> regions;
+
+ if (GeoParser::GEOJSON_POINT == type) {
+ _point.reset(new PointWithCRS());
+ status = GeoParser::parseGeoJSONPoint(obj, _point.get());
+ } else if (GeoParser::GEOJSON_LINESTRING == type) {
+ _line.reset(new LineWithCRS());
+ status = GeoParser::parseGeoJSONLine(obj, _line.get());
+ } else if (GeoParser::GEOJSON_POLYGON == type) {
+ _polygon.reset(new PolygonWithCRS());
+ status = GeoParser::parseGeoJSONPolygon(obj, _polygon.get());
+ } else if (GeoParser::GEOJSON_MULTI_POINT == type) {
+ _multiPoint.reset(new MultiPointWithCRS());
+ status = GeoParser::parseMultiPoint(obj, _multiPoint.get());
+ for (size_t i = 0; i < _multiPoint->cells.size(); ++i) {
+ regions.push_back(&_multiPoint->cells[i]);
+ }
+ } else if (GeoParser::GEOJSON_MULTI_LINESTRING == type) {
+ _multiLine.reset(new MultiLineWithCRS());
+ status = GeoParser::parseMultiLine(obj, _multiLine.get());
+ for (size_t i = 0; i < _multiLine->lines.size(); ++i) {
+ regions.push_back(_multiLine->lines[i]);
+ }
+ } else if (GeoParser::GEOJSON_MULTI_POLYGON == type) {
+ _multiPolygon.reset(new MultiPolygonWithCRS());
+ status = GeoParser::parseMultiPolygon(obj, _multiPolygon.get());
+ for (size_t i = 0; i < _multiPolygon->polygons.size(); ++i) {
+ regions.push_back(_multiPolygon->polygons[i]);
+ }
+ } else if (GeoParser::GEOJSON_GEOMETRY_COLLECTION == type) {
+ _geometryCollection.reset(new GeometryCollection());
+ status = GeoParser::parseGeometryCollection(obj, _geometryCollection.get());
+
+ // Add regions
+ for (size_t i = 0; i < _geometryCollection->points.size(); ++i) {
+ regions.push_back(&_geometryCollection->points[i].cell);
+ }
+ for (size_t i = 0; i < _geometryCollection->lines.size(); ++i) {
+ regions.push_back(&_geometryCollection->lines[i]->line);
+ }
+ for (size_t i = 0; i < _geometryCollection->polygons.size(); ++i) {
+ regions.push_back(_geometryCollection->polygons[i]->s2Polygon.get());
+ }
+ for (size_t i = 0; i < _geometryCollection->multiPoints.size(); ++i) {
+ MultiPointWithCRS* multiPoint = _geometryCollection->multiPoints[i];
+ for (size_t j = 0; j < multiPoint->cells.size(); ++j) {
+ regions.push_back(&multiPoint->cells[j]);
+ }
+ }
+ for (size_t i = 0; i < _geometryCollection->multiLines.size(); ++i) {
+ const MultiLineWithCRS* multiLine = _geometryCollection->multiLines[i];
+ for (size_t j = 0; j < multiLine->lines.size(); ++j) {
+ regions.push_back(multiLine->lines[j]);
+ }
+ }
+ for (size_t i = 0; i < _geometryCollection->multiPolygons.size(); ++i) {
+ const MultiPolygonWithCRS* multiPolygon = _geometryCollection->multiPolygons[i];
+ for (size_t j = 0; j < multiPolygon->polygons.size(); ++j) {
+ regions.push_back(multiPolygon->polygons[j]);
+ }
+ }
+ } else {
+ // Should not reach here.
+ invariant(false);
}
- // Examples:
- // { $geoWithin : { $geometry : <GeoJSON> } }
- // { $geoIntersects : { $geometry : <GeoJSON> } }
- // { $geoWithin : { $box : [[x1, y1], [x2, y2]] } }
- // { $geoWithin : { $polygon : [[x1, y1], [x1, y2], [x2, y2], [x2, y1]] } }
- // { $geoWithin : { $center : [[x1, y1], r], } }
- // { $geoWithin : { $centerSphere : [[x, y], radius] } }
- // { $geoIntersects : { $geometry : [1, 2] } }
- //
- // "elem" is the first element of the object after $geoWithin / $geoIntersects predicates.
- // i.e. { $box: ... }, { $geometry: ... }
- Status GeometryContainer::parseFromQuery(const BSONElement& elem) {
- // Check elem is an object and has geo specifier.
- GeoParser::GeoSpecifier specifier = GeoParser::parseGeoSpecifier(elem);
-
- if (GeoParser::UNKNOWN == specifier) {
- // Cannot parse geo specifier.
- return Status(ErrorCodes::BadValue, str::stream() << "unknown geo specifier: " << elem);
- }
-
- Status status = Status::OK();
- BSONObj obj = elem.Obj();
- if (GeoParser::BOX == specifier) {
- _box.reset(new BoxWithCRS());
- status = GeoParser::parseLegacyBox(obj, _box.get());
- } else if (GeoParser::CENTER == specifier) {
- _cap.reset(new CapWithCRS());
- status = GeoParser::parseLegacyCenter(obj, _cap.get());
- } else if (GeoParser::POLYGON == specifier) {
- _polygon.reset(new PolygonWithCRS());
- status = GeoParser::parseLegacyPolygon(obj, _polygon.get());
- } else if (GeoParser::CENTER_SPHERE == specifier) {
- _cap.reset(new CapWithCRS());
- status = GeoParser::parseCenterSphere(obj, _cap.get());
- } else if (GeoParser::GEOMETRY == specifier) {
- // GeoJSON geometry or legacy point
- if (Array == elem.type() || obj.firstElement().isNumber()) {
- // legacy point
- _point.reset(new PointWithCRS());
- status = GeoParser::parseQueryPoint(elem, _point.get());
- } else {
- // GeoJSON geometry
- status = parseFromGeoJSON(obj);
- }
- }
- if (!status.isOK()) return status;
+ // Check parsing result.
+ if (!status.isOK())
+ return status;
- // If we support R2 regions, build the region immediately
- if (hasR2Region()) {
- _r2Region.reset(new R2BoxRegion(this));
- }
+ if (regions.size() > 0) {
+ // S2RegionUnion doesn't take ownership of pointers.
+ _s2Region.reset(new S2RegionUnion(&regions));
+ }
- return status;
+ return Status::OK();
+}
+
+// Examples:
+// { $geoWithin : { $geometry : <GeoJSON> } }
+// { $geoIntersects : { $geometry : <GeoJSON> } }
+// { $geoWithin : { $box : [[x1, y1], [x2, y2]] } }
+// { $geoWithin : { $polygon : [[x1, y1], [x1, y2], [x2, y2], [x2, y1]] } }
+// { $geoWithin : { $center : [[x1, y1], r], } }
+// { $geoWithin : { $centerSphere : [[x, y], radius] } }
+// { $geoIntersects : { $geometry : [1, 2] } }
+//
+// "elem" is the first element of the object after $geoWithin / $geoIntersects predicates.
+// i.e. { $box: ... }, { $geometry: ... }
+Status GeometryContainer::parseFromQuery(const BSONElement& elem) {
+ // Check elem is an object and has geo specifier.
+ GeoParser::GeoSpecifier specifier = GeoParser::parseGeoSpecifier(elem);
+
+ if (GeoParser::UNKNOWN == specifier) {
+ // Cannot parse geo specifier.
+ return Status(ErrorCodes::BadValue, str::stream() << "unknown geo specifier: " << elem);
}
- // Examples:
- // { location: <GeoJSON> }
- // { location: [1, 2] }
- // { location: [1, 2, 3] }
- // { location: {x: 1, y: 2} }
- //
- // "elem" is the element that contains geo data. e.g. "location": [1, 2]
- // We need the type information to determine whether it's legacy point.
- Status GeometryContainer::parseFromStorage(const BSONElement& elem) {
- if (!elem.isABSONObj()) {
- return Status(ErrorCodes::BadValue,
- str::stream() << "geo element must be an array or object: " << elem);
- }
-
- BSONObj geoObj = elem.Obj();
- Status status = Status::OK();
- if (Array == elem.type() || geoObj.firstElement().isNumber()) {
- // Legacy point
- // { location: [1, 2] }
- // { location: [1, 2, 3] }
- // { location: {x: 1, y: 2} }
- // { location: {x: 1, y: 2, type: "Point" } }
+ Status status = Status::OK();
+ BSONObj obj = elem.Obj();
+ if (GeoParser::BOX == specifier) {
+ _box.reset(new BoxWithCRS());
+ status = GeoParser::parseLegacyBox(obj, _box.get());
+ } else if (GeoParser::CENTER == specifier) {
+ _cap.reset(new CapWithCRS());
+ status = GeoParser::parseLegacyCenter(obj, _cap.get());
+ } else if (GeoParser::POLYGON == specifier) {
+ _polygon.reset(new PolygonWithCRS());
+ status = GeoParser::parseLegacyPolygon(obj, _polygon.get());
+ } else if (GeoParser::CENTER_SPHERE == specifier) {
+ _cap.reset(new CapWithCRS());
+ status = GeoParser::parseCenterSphere(obj, _cap.get());
+ } else if (GeoParser::GEOMETRY == specifier) {
+ // GeoJSON geometry or legacy point
+ if (Array == elem.type() || obj.firstElement().isNumber()) {
+ // legacy point
_point.reset(new PointWithCRS());
- // Allow more than two dimensions or extra fields, like [1, 2, 3]
- status = GeoParser::parseLegacyPoint(elem, _point.get(), true);
+ status = GeoParser::parseQueryPoint(elem, _point.get());
} else {
- // GeoJSON
- // { location: { type: "Point", coordinates: [...] } }
- status = parseFromGeoJSON(elem.Obj());
+ // GeoJSON geometry
+ status = parseFromGeoJSON(obj);
}
- if (!status.isOK()) return status;
-
- // If we support R2 regions, build the region immediately
- if (hasR2Region()) _r2Region.reset(new R2BoxRegion(this));
-
- return Status::OK();
}
+ if (!status.isOK())
+ return status;
- string GeometryContainer::getDebugType() const {
- if (NULL != _point) { return "pt"; }
- else if (NULL != _line) { return "ln"; }
- else if (NULL != _box) { return "bx"; }
- else if (NULL != _polygon) { return "pl"; }
- else if (NULL != _cap ) { return "cc"; }
- else if (NULL != _multiPoint) { return "mp"; }
- else if (NULL != _multiLine) { return "ml"; }
- else if (NULL != _multiPolygon) { return "my"; }
- else if (NULL != _geometryCollection) { return "gc"; }
- else {
- invariant(false);
- return "";
- }
+ // If we support R2 regions, build the region immediately
+ if (hasR2Region()) {
+ _r2Region.reset(new R2BoxRegion(this));
}
- CRS GeometryContainer::getNativeCRS() const {
-
- // TODO: Fix geometry collection reporting when/if we support multiple CRSes
-
- if (NULL != _point) { return _point->crs; }
- else if (NULL != _line) { return _line->crs; }
- else if (NULL != _box) { return _box->crs; }
- else if (NULL != _polygon) { return _polygon->crs; }
- else if (NULL != _cap ) { return _cap->crs; }
- else if (NULL != _multiPoint) { return _multiPoint->crs; }
- else if (NULL != _multiLine) { return _multiLine->crs; }
- else if (NULL != _multiPolygon) { return _multiPolygon->crs; }
- else if (NULL != _geometryCollection) { return SPHERE; }
- else {
- invariant(false);
- return FLAT;
- }
+ return status;
+}
+
+// Examples:
+// { location: <GeoJSON> }
+// { location: [1, 2] }
+// { location: [1, 2, 3] }
+// { location: {x: 1, y: 2} }
+//
+// "elem" is the element that contains geo data. e.g. "location": [1, 2]
+// We need the type information to determine whether it's legacy point.
+Status GeometryContainer::parseFromStorage(const BSONElement& elem) {
+ if (!elem.isABSONObj()) {
+ return Status(ErrorCodes::BadValue,
+ str::stream() << "geo element must be an array or object: " << elem);
}
- bool GeometryContainer::supportsProject(CRS otherCRS) const {
-
- // TODO: Fix geometry collection reporting when/if we support more CRSes
-
- if (NULL != _point) {
- return ShapeProjection::supportsProject(*_point, otherCRS);
- }
- else if (NULL != _line) { return _line->crs == otherCRS; }
- else if (NULL != _box) { return _box->crs == otherCRS; }
- else if (NULL != _polygon) {
- return ShapeProjection::supportsProject(*_polygon, otherCRS);
- }
- else if (NULL != _cap ) { return _cap->crs == otherCRS; }
- else if (NULL != _multiPoint) { return _multiPoint->crs == otherCRS; }
- else if (NULL != _multiLine) { return _multiLine->crs == otherCRS; }
- else if (NULL != _multiPolygon) { return _multiPolygon->crs == otherCRS; }
- else {
- invariant(NULL != _geometryCollection);
- return SPHERE == otherCRS;
- }
+ BSONObj geoObj = elem.Obj();
+ Status status = Status::OK();
+ if (Array == elem.type() || geoObj.firstElement().isNumber()) {
+ // Legacy point
+ // { location: [1, 2] }
+ // { location: [1, 2, 3] }
+ // { location: {x: 1, y: 2} }
+ // { location: {x: 1, y: 2, type: "Point" } }
+ _point.reset(new PointWithCRS());
+ // Allow more than two dimensions or extra fields, like [1, 2, 3]
+ status = GeoParser::parseLegacyPoint(elem, _point.get(), true);
+ } else {
+ // GeoJSON
+ // { location: { type: "Point", coordinates: [...] } }
+ status = parseFromGeoJSON(elem.Obj());
}
+ if (!status.isOK())
+ return status;
- void GeometryContainer::projectInto(CRS otherCRS) {
+ // If we support R2 regions, build the region immediately
+ if (hasR2Region())
+ _r2Region.reset(new R2BoxRegion(this));
+
+ return Status::OK();
+}
+
+string GeometryContainer::getDebugType() const {
+ if (NULL != _point) {
+ return "pt";
+ } else if (NULL != _line) {
+ return "ln";
+ } else if (NULL != _box) {
+ return "bx";
+ } else if (NULL != _polygon) {
+ return "pl";
+ } else if (NULL != _cap) {
+ return "cc";
+ } else if (NULL != _multiPoint) {
+ return "mp";
+ } else if (NULL != _multiLine) {
+ return "ml";
+ } else if (NULL != _multiPolygon) {
+ return "my";
+ } else if (NULL != _geometryCollection) {
+ return "gc";
+ } else {
+ invariant(false);
+ return "";
+ }
+}
+
+CRS GeometryContainer::getNativeCRS() const {
+ // TODO: Fix geometry collection reporting when/if we support multiple CRSes
+
+ if (NULL != _point) {
+ return _point->crs;
+ } else if (NULL != _line) {
+ return _line->crs;
+ } else if (NULL != _box) {
+ return _box->crs;
+ } else if (NULL != _polygon) {
+ return _polygon->crs;
+ } else if (NULL != _cap) {
+ return _cap->crs;
+ } else if (NULL != _multiPoint) {
+ return _multiPoint->crs;
+ } else if (NULL != _multiLine) {
+ return _multiLine->crs;
+ } else if (NULL != _multiPolygon) {
+ return _multiPolygon->crs;
+ } else if (NULL != _geometryCollection) {
+ return SPHERE;
+ } else {
+ invariant(false);
+ return FLAT;
+ }
+}
+
+bool GeometryContainer::supportsProject(CRS otherCRS) const {
+ // TODO: Fix geometry collection reporting when/if we support more CRSes
+
+ if (NULL != _point) {
+ return ShapeProjection::supportsProject(*_point, otherCRS);
+ } else if (NULL != _line) {
+ return _line->crs == otherCRS;
+ } else if (NULL != _box) {
+ return _box->crs == otherCRS;
+ } else if (NULL != _polygon) {
+ return ShapeProjection::supportsProject(*_polygon, otherCRS);
+ } else if (NULL != _cap) {
+ return _cap->crs == otherCRS;
+ } else if (NULL != _multiPoint) {
+ return _multiPoint->crs == otherCRS;
+ } else if (NULL != _multiLine) {
+ return _multiLine->crs == otherCRS;
+ } else if (NULL != _multiPolygon) {
+ return _multiPolygon->crs == otherCRS;
+ } else {
+ invariant(NULL != _geometryCollection);
+ return SPHERE == otherCRS;
+ }
+}
- if (getNativeCRS() == otherCRS) return;
+void GeometryContainer::projectInto(CRS otherCRS) {
+ if (getNativeCRS() == otherCRS)
+ return;
- if (NULL != _polygon) {
- ShapeProjection::projectInto(_polygon.get(), otherCRS);
- return;
- }
-
- invariant(NULL != _point);
- ShapeProjection::projectInto(_point.get(), otherCRS);
+ if (NULL != _polygon) {
+ ShapeProjection::projectInto(_polygon.get(), otherCRS);
+ return;
}
- static double s2MinDistanceRad(const S2Point& s2Point, const MultiPointWithCRS& s2MultiPoint) {
+ invariant(NULL != _point);
+ ShapeProjection::projectInto(_point.get(), otherCRS);
+}
- double minDistance = -1;
- for (vector<S2Point>::const_iterator it = s2MultiPoint.points.begin();
- it != s2MultiPoint.points.end(); ++it) {
-
- double nextDistance = S2Distance::distanceRad(s2Point, *it);
- if (minDistance < 0 || nextDistance < minDistance) {
- minDistance = nextDistance;
- }
+static double s2MinDistanceRad(const S2Point& s2Point, const MultiPointWithCRS& s2MultiPoint) {
+ double minDistance = -1;
+ for (vector<S2Point>::const_iterator it = s2MultiPoint.points.begin();
+ it != s2MultiPoint.points.end();
+ ++it) {
+ double nextDistance = S2Distance::distanceRad(s2Point, *it);
+ if (minDistance < 0 || nextDistance < minDistance) {
+ minDistance = nextDistance;
}
-
- return minDistance;
}
- static double s2MinDistanceRad(const S2Point& s2Point, const MultiLineWithCRS& s2MultiLine) {
-
- double minDistance = -1;
- for (vector<S2Polyline*>::const_iterator it = s2MultiLine.lines.vector().begin();
- it != s2MultiLine.lines.vector().end(); ++it) {
+ return minDistance;
+}
- double nextDistance = S2Distance::minDistanceRad(s2Point, **it);
- if (minDistance < 0 || nextDistance < minDistance) {
- minDistance = nextDistance;
- }
+static double s2MinDistanceRad(const S2Point& s2Point, const MultiLineWithCRS& s2MultiLine) {
+ double minDistance = -1;
+ for (vector<S2Polyline*>::const_iterator it = s2MultiLine.lines.vector().begin();
+ it != s2MultiLine.lines.vector().end();
+ ++it) {
+ double nextDistance = S2Distance::minDistanceRad(s2Point, **it);
+ if (minDistance < 0 || nextDistance < minDistance) {
+ minDistance = nextDistance;
}
-
- return minDistance;
}
- static double s2MinDistanceRad(const S2Point& s2Point, const MultiPolygonWithCRS& s2MultiPolygon) {
+ return minDistance;
+}
- double minDistance = -1;
- for (vector<S2Polygon*>::const_iterator it = s2MultiPolygon.polygons.vector().begin();
- it != s2MultiPolygon.polygons.vector().end(); ++it) {
-
- double nextDistance = S2Distance::minDistanceRad(s2Point, **it);
- if (minDistance < 0 || nextDistance < minDistance) {
- minDistance = nextDistance;
- }
+static double s2MinDistanceRad(const S2Point& s2Point, const MultiPolygonWithCRS& s2MultiPolygon) {
+ double minDistance = -1;
+ for (vector<S2Polygon*>::const_iterator it = s2MultiPolygon.polygons.vector().begin();
+ it != s2MultiPolygon.polygons.vector().end();
+ ++it) {
+ double nextDistance = S2Distance::minDistanceRad(s2Point, **it);
+ if (minDistance < 0 || nextDistance < minDistance) {
+ minDistance = nextDistance;
}
-
- return minDistance;
}
- static double s2MinDistanceRad(const S2Point& s2Point,
- const GeometryCollection& geometryCollection) {
-
- double minDistance = -1;
- for (vector<PointWithCRS>::const_iterator it = geometryCollection.points.begin();
- it != geometryCollection.points.end(); ++it) {
+ return minDistance;
+}
- invariant(SPHERE == it->crs);
- double nextDistance = S2Distance::distanceRad(s2Point, it->point);
- if (minDistance < 0 || nextDistance < minDistance) {
- minDistance = nextDistance;
- }
+static double s2MinDistanceRad(const S2Point& s2Point,
+ const GeometryCollection& geometryCollection) {
+ double minDistance = -1;
+ for (vector<PointWithCRS>::const_iterator it = geometryCollection.points.begin();
+ it != geometryCollection.points.end();
+ ++it) {
+ invariant(SPHERE == it->crs);
+ double nextDistance = S2Distance::distanceRad(s2Point, it->point);
+ if (minDistance < 0 || nextDistance < minDistance) {
+ minDistance = nextDistance;
}
+ }
- for (vector<LineWithCRS*>::const_iterator it = geometryCollection.lines.vector().begin();
- it != geometryCollection.lines.vector().end(); ++it) {
-
- invariant(SPHERE == (*it)->crs);
- double nextDistance = S2Distance::minDistanceRad(s2Point, (*it)->line);
- if (minDistance < 0 || nextDistance < minDistance) {
- minDistance = nextDistance;
- }
+ for (vector<LineWithCRS*>::const_iterator it = geometryCollection.lines.vector().begin();
+ it != geometryCollection.lines.vector().end();
+ ++it) {
+ invariant(SPHERE == (*it)->crs);
+ double nextDistance = S2Distance::minDistanceRad(s2Point, (*it)->line);
+ if (minDistance < 0 || nextDistance < minDistance) {
+ minDistance = nextDistance;
}
+ }
- for (vector<PolygonWithCRS*>::const_iterator it = geometryCollection.polygons.vector().begin();
- it != geometryCollection.polygons.vector().end(); ++it) {
-
- invariant(SPHERE == (*it)->crs);
- // We don't support distances for big polygons yet.
- invariant(NULL != (*it)->s2Polygon);
- double nextDistance = S2Distance::minDistanceRad(s2Point, *((*it)->s2Polygon));
- if (minDistance < 0 || nextDistance < minDistance) {
- minDistance = nextDistance;
- }
+ for (vector<PolygonWithCRS*>::const_iterator it = geometryCollection.polygons.vector().begin();
+ it != geometryCollection.polygons.vector().end();
+ ++it) {
+ invariant(SPHERE == (*it)->crs);
+ // We don't support distances for big polygons yet.
+ invariant(NULL != (*it)->s2Polygon);
+ double nextDistance = S2Distance::minDistanceRad(s2Point, *((*it)->s2Polygon));
+ if (minDistance < 0 || nextDistance < minDistance) {
+ minDistance = nextDistance;
}
+ }
- for (vector<MultiPointWithCRS*>::const_iterator it = geometryCollection.multiPoints.vector()
- .begin(); it != geometryCollection.multiPoints.vector().end(); ++it) {
-
- double nextDistance = s2MinDistanceRad(s2Point, **it);
- if (minDistance < 0 || nextDistance < minDistance) {
- minDistance = nextDistance;
- }
+ for (vector<MultiPointWithCRS*>::const_iterator it =
+ geometryCollection.multiPoints.vector().begin();
+ it != geometryCollection.multiPoints.vector().end();
+ ++it) {
+ double nextDistance = s2MinDistanceRad(s2Point, **it);
+ if (minDistance < 0 || nextDistance < minDistance) {
+ minDistance = nextDistance;
}
+ }
- for (vector<MultiLineWithCRS*>::const_iterator it = geometryCollection.multiLines.vector()
- .begin(); it != geometryCollection.multiLines.vector().end(); ++it) {
-
- double nextDistance = s2MinDistanceRad(s2Point, **it);
- if (minDistance < 0 || nextDistance < minDistance) {
- minDistance = nextDistance;
- }
+ for (vector<MultiLineWithCRS*>::const_iterator it =
+ geometryCollection.multiLines.vector().begin();
+ it != geometryCollection.multiLines.vector().end();
+ ++it) {
+ double nextDistance = s2MinDistanceRad(s2Point, **it);
+ if (minDistance < 0 || nextDistance < minDistance) {
+ minDistance = nextDistance;
}
+ }
- for (vector<MultiPolygonWithCRS*>::const_iterator it = geometryCollection.multiPolygons
- .vector().begin(); it != geometryCollection.multiPolygons.vector().end(); ++it) {
-
- double nextDistance = s2MinDistanceRad(s2Point, **it);
- if (minDistance < 0 || nextDistance < minDistance) {
- minDistance = nextDistance;
- }
+ for (vector<MultiPolygonWithCRS*>::const_iterator it =
+ geometryCollection.multiPolygons.vector().begin();
+ it != geometryCollection.multiPolygons.vector().end();
+ ++it) {
+ double nextDistance = s2MinDistanceRad(s2Point, **it);
+ if (minDistance < 0 || nextDistance < minDistance) {
+ minDistance = nextDistance;
}
-
- return minDistance;
}
- double GeometryContainer::minDistance(const PointWithCRS& otherPoint) const {
-
- const CRS crs = getNativeCRS();
+ return minDistance;
+}
- if (FLAT == crs) {
+double GeometryContainer::minDistance(const PointWithCRS& otherPoint) const {
+ const CRS crs = getNativeCRS();
- invariant(NULL != _point);
+ if (FLAT == crs) {
+ invariant(NULL != _point);
- if (FLAT == otherPoint.crs) {
- return distance(_point->oldPoint, otherPoint.oldPoint);
- }
- else {
- S2LatLng latLng(otherPoint.point);
- return distance(_point->oldPoint,
- Point(latLng.lng().degrees(), latLng.lat().degrees()));
- }
+ if (FLAT == otherPoint.crs) {
+ return distance(_point->oldPoint, otherPoint.oldPoint);
+ } else {
+ S2LatLng latLng(otherPoint.point);
+ return distance(_point->oldPoint,
+ Point(latLng.lng().degrees(), latLng.lat().degrees()));
}
- else {
- invariant(SPHERE == crs);
-
- double minDistance = -1;
+ } else {
+ invariant(SPHERE == crs);
- if (NULL != _point) {
- minDistance = S2Distance::distanceRad(otherPoint.point, _point->point);
- }
- else if (NULL != _line) {
- minDistance = S2Distance::minDistanceRad(otherPoint.point, _line->line);
- }
- else if (NULL != _polygon) {
- // We don't support distances for big polygons yet.
- invariant(NULL != _polygon->s2Polygon);
- minDistance = S2Distance::minDistanceRad(otherPoint.point, *_polygon->s2Polygon);
- }
- else if (NULL != _cap) {
- minDistance = S2Distance::minDistanceRad(otherPoint.point, _cap->cap);
- }
- else if (NULL != _multiPoint) {
- minDistance = s2MinDistanceRad(otherPoint.point, *_multiPoint);
- }
- else if (NULL != _multiLine) {
- minDistance = s2MinDistanceRad(otherPoint.point, *_multiLine);
- }
- else if (NULL != _multiPolygon) {
- minDistance = s2MinDistanceRad(otherPoint.point, *_multiPolygon);
- }
- else if (NULL != _geometryCollection) {
- minDistance = s2MinDistanceRad(otherPoint.point, *_geometryCollection);
- }
+ double minDistance = -1;
- invariant(minDistance != -1);
- return minDistance * kRadiusOfEarthInMeters;
+ if (NULL != _point) {
+ minDistance = S2Distance::distanceRad(otherPoint.point, _point->point);
+ } else if (NULL != _line) {
+ minDistance = S2Distance::minDistanceRad(otherPoint.point, _line->line);
+ } else if (NULL != _polygon) {
+ // We don't support distances for big polygons yet.
+ invariant(NULL != _polygon->s2Polygon);
+ minDistance = S2Distance::minDistanceRad(otherPoint.point, *_polygon->s2Polygon);
+ } else if (NULL != _cap) {
+ minDistance = S2Distance::minDistanceRad(otherPoint.point, _cap->cap);
+ } else if (NULL != _multiPoint) {
+ minDistance = s2MinDistanceRad(otherPoint.point, *_multiPoint);
+ } else if (NULL != _multiLine) {
+ minDistance = s2MinDistanceRad(otherPoint.point, *_multiLine);
+ } else if (NULL != _multiPolygon) {
+ minDistance = s2MinDistanceRad(otherPoint.point, *_multiPolygon);
+ } else if (NULL != _geometryCollection) {
+ minDistance = s2MinDistanceRad(otherPoint.point, *_geometryCollection);
}
- }
- const CapWithCRS* GeometryContainer::getCapGeometryHack() const {
- return _cap.get();
+ invariant(minDistance != -1);
+ return minDistance * kRadiusOfEarthInMeters;
}
+}
+
+const CapWithCRS* GeometryContainer::getCapGeometryHack() const {
+ return _cap.get();
+}
} // namespace mongo
diff --git a/src/mongo/db/geo/geometry_container.h b/src/mongo/db/geo/geometry_container.h
index 05ca1ed2962..95dc8525440 100644
--- a/src/mongo/db/geo/geometry_container.h
+++ b/src/mongo/db/geo/geometry_container.h
@@ -36,126 +36,125 @@
namespace mongo {
- class GeometryContainer {
- MONGO_DISALLOW_COPYING(GeometryContainer);
- public:
-
- /**
- * Creates an empty geometry container which may then be loaded from BSON or directly.
- */
- GeometryContainer();
-
- /**
- * Loads an empty GeometryContainer from query.
- */
- Status parseFromQuery(const BSONElement& elem);
-
- /**
- * Loads an empty GeometryContainer from stored geometry.
- */
- Status parseFromStorage(const BSONElement& elem);
-
- /**
- * Is the geometry any of {Point, Line, Polygon}?
- */
- bool isSimpleContainer() const;
-
- /**
- * Reports the CRS of the contained geometry.
- * TODO: Rework once we have collections of multiple CRSes
- */
- CRS getNativeCRS() const;
-
- /**
- * Whether or not this geometry can be projected into a particular CRS
- */
- bool supportsProject(CRS crs) const;
-
- /**
- * Projects the current geometry into the supplied crs.
- * It is an error to call this function if canProjectInto(crs) is false.
- */
- void projectInto(CRS crs);
-
- /**
- * Minimum distance between this geometry and the supplied point.
- * TODO: Rework and generalize to full GeometryContainer distance
- */
- double minDistance(const PointWithCRS& point) const;
-
- /**
- * Only polygons (and aggregate types thereof) support contains.
- */
- bool supportsContains() const;
-
- /**
- * To check containment, we iterate over the otherContainer's geometries. If we don't
- * contain any sub-geometry of the otherContainer, the otherContainer is not contained
- * within us. If each sub-geometry of the otherContainer is contained within us, we contain
- * the entire otherContainer.
- */
- bool contains(const GeometryContainer& otherContainer) const;
-
- /**
- * To check intersection, we iterate over the otherContainer's geometries, checking each
- * geometry to see if we intersect it. If we intersect one geometry, we intersect the
- * entire other container.
- */
- bool intersects(const GeometryContainer& otherContainer) const;
-
- // Region which can be used to generate a covering of the query object in the S2 space.
- bool hasS2Region() const;
- const S2Region& getS2Region() const;
-
- // Region which can be used to generate a covering of the query object in euclidean space.
- bool hasR2Region() const;
- const R2Region& getR2Region() const;
-
- // Returns a string related to the type of the geometry (for debugging queries)
- std::string getDebugType() const;
-
- // Needed for 2D wrapping check (for now)
- // TODO: Remove these hacks
- const CapWithCRS* getCapGeometryHack() const;
-
- private:
-
- class R2BoxRegion;
-
- Status parseFromGeoJSON(const BSONObj& obj);
-
- // Does 'this' intersect with the provided type?
- bool intersects(const S2Cell& otherPoint) const;
- bool intersects(const S2Polyline& otherLine) const;
- bool intersects(const S2Polygon& otherPolygon) const;
- // These three just iterate over the geometries and call the 3 methods above.
- bool intersects(const MultiPointWithCRS& otherMultiPoint) const;
- bool intersects(const MultiLineWithCRS& otherMultiLine) const;
- bool intersects(const MultiPolygonWithCRS& otherMultiPolygon) const;
-
- // Used when 'this' has a polygon somewhere, either in _polygon or _multiPolygon or
- // _geometryCollection.
- bool contains(const S2Cell& otherCell, const S2Point& otherPoint) const;
- bool contains(const S2Polyline& otherLine) const;
- bool contains(const S2Polygon& otherPolygon) const;
-
- // Only one of these shared_ptrs should be non-NULL. S2Region is a
- // superclass but it only supports testing against S2Cells. We need
- // the most specific class we can get.
- std::unique_ptr<PointWithCRS> _point;
- std::unique_ptr<LineWithCRS> _line;
- std::unique_ptr<BoxWithCRS> _box;
- std::unique_ptr<PolygonWithCRS> _polygon;
- std::unique_ptr<CapWithCRS> _cap;
- std::unique_ptr<MultiPointWithCRS> _multiPoint;
- std::unique_ptr<MultiLineWithCRS> _multiLine;
- std::unique_ptr<MultiPolygonWithCRS> _multiPolygon;
- std::unique_ptr<GeometryCollection> _geometryCollection;
-
- // Cached for use during covering calculations
- // TODO: _s2Region is currently generated immediately - don't necessarily need to do this
- std::unique_ptr<S2RegionUnion> _s2Region;
- std::unique_ptr<R2Region> _r2Region;
- };
-
-} // namespace mongo
+class GeometryContainer {
+ MONGO_DISALLOW_COPYING(GeometryContainer);
+
+public:
+ /**
+ * Creates an empty geometry container which may then be loaded from BSON or directly.
+ */
+ GeometryContainer();
+
+ /**
+ * Loads an empty GeometryContainer from query.
+ */
+ Status parseFromQuery(const BSONElement& elem);
+
+ /**
+ * Loads an empty GeometryContainer from stored geometry.
+ */
+ Status parseFromStorage(const BSONElement& elem);
+
+ /**
+ * Is the geometry any of {Point, Line, Polygon}?
+ */
+ bool isSimpleContainer() const;
+
+ /**
+ * Reports the CRS of the contained geometry.
+ * TODO: Rework once we have collections of multiple CRSes
+ */
+ CRS getNativeCRS() const;
+
+ /**
+ * Whether or not this geometry can be projected into a particular CRS
+ */
+ bool supportsProject(CRS crs) const;
+
+ /**
+ * Projects the current geometry into the supplied crs.
+ * It is an error to call this function if canProjectInto(crs) is false.
+ */
+ void projectInto(CRS crs);
+
+ /**
+ * Minimum distance between this geometry and the supplied point.
+ * TODO: Rework and generalize to full GeometryContainer distance
+ */
+ double minDistance(const PointWithCRS& point) const;
+
+ /**
+ * Only polygons (and aggregate types thereof) support contains.
+ */
+ bool supportsContains() const;
+
+ /**
+ * To check containment, we iterate over the otherContainer's geometries. If we don't
+ * contain any sub-geometry of the otherContainer, the otherContainer is not contained
+ * within us. If each sub-geometry of the otherContainer is contained within us, we contain
+ * the entire otherContainer.
+ */
+ bool contains(const GeometryContainer& otherContainer) const;
+
+ /**
+ * To check intersection, we iterate over the otherContainer's geometries, checking each
+ * geometry to see if we intersect it. If we intersect one geometry, we intersect the
+ * entire other container.
+ */
+ bool intersects(const GeometryContainer& otherContainer) const;
+
+ // Region which can be used to generate a covering of the query object in the S2 space.
+ bool hasS2Region() const;
+ const S2Region& getS2Region() const;
+
+ // Region which can be used to generate a covering of the query object in euclidean space.
+ bool hasR2Region() const;
+ const R2Region& getR2Region() const;
+
+ // Returns a string related to the type of the geometry (for debugging queries)
+ std::string getDebugType() const;
+
+ // Needed for 2D wrapping check (for now)
+ // TODO: Remove these hacks
+ const CapWithCRS* getCapGeometryHack() const;
+
+private:
+ class R2BoxRegion;
+
+ Status parseFromGeoJSON(const BSONObj& obj);
+
+ // Does 'this' intersect with the provided type?
+ bool intersects(const S2Cell& otherPoint) const;
+ bool intersects(const S2Polyline& otherLine) const;
+ bool intersects(const S2Polygon& otherPolygon) const;
+ // These three just iterate over the geometries and call the 3 methods above.
+ bool intersects(const MultiPointWithCRS& otherMultiPoint) const;
+ bool intersects(const MultiLineWithCRS& otherMultiLine) const;
+ bool intersects(const MultiPolygonWithCRS& otherMultiPolygon) const;
+
+ // Used when 'this' has a polygon somewhere, either in _polygon or _multiPolygon or
+ // _geometryCollection.
+ bool contains(const S2Cell& otherCell, const S2Point& otherPoint) const;
+ bool contains(const S2Polyline& otherLine) const;
+ bool contains(const S2Polygon& otherPolygon) const;
+
+ // Only one of these shared_ptrs should be non-NULL. S2Region is a
+ // superclass but it only supports testing against S2Cells. We need
+ // the most specific class we can get.
+ std::unique_ptr<PointWithCRS> _point;
+ std::unique_ptr<LineWithCRS> _line;
+ std::unique_ptr<BoxWithCRS> _box;
+ std::unique_ptr<PolygonWithCRS> _polygon;
+ std::unique_ptr<CapWithCRS> _cap;
+ std::unique_ptr<MultiPointWithCRS> _multiPoint;
+ std::unique_ptr<MultiLineWithCRS> _multiLine;
+ std::unique_ptr<MultiPolygonWithCRS> _multiPolygon;
+ std::unique_ptr<GeometryCollection> _geometryCollection;
+
+ // Cached for use during covering calculations
+ // TODO: _s2Region is currently generated immediately - don't necessarily need to do this
+ std::unique_ptr<S2RegionUnion> _s2Region;
+ std::unique_ptr<R2Region> _r2Region;
+};
+
+} // namespace mongo
diff --git a/src/mongo/db/geo/geoparser.cpp b/src/mongo/db/geo/geoparser.cpp
index 95c0f33ac57..1f374f42215 100644
--- a/src/mongo/db/geo/geoparser.cpp
+++ b/src/mongo/db/geo/geoparser.cpp
@@ -44,683 +44,745 @@
namespace mongo {
- using std::unique_ptr;
- using std::stringstream;
-
- // This field must be present, and...
- static const string GEOJSON_TYPE = "type";
- // Have one of these values:
- static const string GEOJSON_TYPE_POINT = "Point";
- static const string GEOJSON_TYPE_LINESTRING = "LineString";
- static const string GEOJSON_TYPE_POLYGON = "Polygon";
- static const string GEOJSON_TYPE_MULTI_POINT = "MultiPoint";
- static const string GEOJSON_TYPE_MULTI_LINESTRING = "MultiLineString";
- static const string GEOJSON_TYPE_MULTI_POLYGON = "MultiPolygon";
- static const string GEOJSON_TYPE_GEOMETRY_COLLECTION = "GeometryCollection";
- // This field must also be present. The value depends on the type.
- static const string GEOJSON_COORDINATES = "coordinates";
- static const string GEOJSON_GEOMETRIES = "geometries";
-
- // Coordinate System Reference
- // see http://portal.opengeospatial.org/files/?artifact_id=24045
- // and http://spatialreference.org/ref/epsg/4326/
- // and http://www.geojson.org/geojson-spec.html#named-crs
- static const string CRS_CRS84 = "urn:ogc:def:crs:OGC:1.3:CRS84";
- static const string CRS_EPSG_4326 = "EPSG:4326";
- static const string CRS_STRICT_WINDING = "urn:x-mongodb:crs:strictwinding:EPSG:4326";
-
- static Status parseFlatPoint(const BSONElement &elem, Point *out, bool allowAddlFields = false) {
- if (!elem.isABSONObj()) return BAD_VALUE("Point must be an array or object");
- BSONObjIterator it(elem.Obj());
- BSONElement x = it.next();
- if (!x.isNumber()) { return BAD_VALUE("Point must only contain numeric elements"); }
- BSONElement y = it.next();
- if (!y.isNumber()) { return BAD_VALUE("Point must only contain numeric elements"); }
- if (!allowAddlFields && it.more()) { return BAD_VALUE("Point must only contain two numeric elements"); }
- out->x = x.number();
- out->y = y.number();
- // Point coordinates must be finite numbers, neither NaN or infinite.
- if (!std::isfinite(out->x) || !std::isfinite(out->y)) {
- return BAD_VALUE("Point coordinates must be finite numbers");
- }
- return Status::OK();
+using std::unique_ptr;
+using std::stringstream;
+
+// This field must be present, and...
+static const string GEOJSON_TYPE = "type";
+// Have one of these values:
+static const string GEOJSON_TYPE_POINT = "Point";
+static const string GEOJSON_TYPE_LINESTRING = "LineString";
+static const string GEOJSON_TYPE_POLYGON = "Polygon";
+static const string GEOJSON_TYPE_MULTI_POINT = "MultiPoint";
+static const string GEOJSON_TYPE_MULTI_LINESTRING = "MultiLineString";
+static const string GEOJSON_TYPE_MULTI_POLYGON = "MultiPolygon";
+static const string GEOJSON_TYPE_GEOMETRY_COLLECTION = "GeometryCollection";
+// This field must also be present. The value depends on the type.
+static const string GEOJSON_COORDINATES = "coordinates";
+static const string GEOJSON_GEOMETRIES = "geometries";
+
+// Coordinate System Reference
+// see http://portal.opengeospatial.org/files/?artifact_id=24045
+// and http://spatialreference.org/ref/epsg/4326/
+// and http://www.geojson.org/geojson-spec.html#named-crs
+static const string CRS_CRS84 = "urn:ogc:def:crs:OGC:1.3:CRS84";
+static const string CRS_EPSG_4326 = "EPSG:4326";
+static const string CRS_STRICT_WINDING = "urn:x-mongodb:crs:strictwinding:EPSG:4326";
+
+static Status parseFlatPoint(const BSONElement& elem, Point* out, bool allowAddlFields = false) {
+ if (!elem.isABSONObj())
+ return BAD_VALUE("Point must be an array or object");
+ BSONObjIterator it(elem.Obj());
+ BSONElement x = it.next();
+ if (!x.isNumber()) {
+ return BAD_VALUE("Point must only contain numeric elements");
}
-
- Status GeoParser::parseLegacyPoint(const BSONElement &elem, PointWithCRS *out, bool allowAddlFields) {
- out->crs = FLAT;
- return parseFlatPoint(elem, &out->oldPoint, allowAddlFields);
+ BSONElement y = it.next();
+ if (!y.isNumber()) {
+ return BAD_VALUE("Point must only contain numeric elements");
+ }
+ if (!allowAddlFields && it.more()) {
+ return BAD_VALUE("Point must only contain two numeric elements");
}
+ out->x = x.number();
+ out->y = y.number();
+ // Point coordinates must be finite numbers, neither NaN or infinite.
+ if (!std::isfinite(out->x) || !std::isfinite(out->y)) {
+ return BAD_VALUE("Point coordinates must be finite numbers");
+ }
+ return Status::OK();
+}
+
+Status GeoParser::parseLegacyPoint(const BSONElement& elem,
+ PointWithCRS* out,
+ bool allowAddlFields) {
+ out->crs = FLAT;
+ return parseFlatPoint(elem, &out->oldPoint, allowAddlFields);
+}
+
+static Status coordToPoint(double lng, double lat, S2Point* out) {
+ // We don't rely on drem to clean up non-sane points. We just don't let them become
+ // spherical.
+ if (!isValidLngLat(lng, lat))
+ return BAD_VALUE("longitude/latitude is out of bounds, lng: " << lng << " lat: " << lat);
+ // Note that it's (lat, lng) for S2 but (lng, lat) for MongoDB.
+ S2LatLng ll = S2LatLng::FromDegrees(lat, lng).Normalized();
+ // This shouldn't happen since we should only have valid lng/lats.
+ if (!ll.is_valid()) {
+ stringstream ss;
+ ss << "coords invalid after normalization, lng = " << lng << " lat = " << lat << endl;
+ uasserted(17125, ss.str());
+ }
+ *out = ll.ToPoint();
+ return Status::OK();
+}
- static Status coordToPoint(double lng, double lat, S2Point* out) {
- // We don't rely on drem to clean up non-sane points. We just don't let them become
- // spherical.
- if (!isValidLngLat(lng, lat))
- return BAD_VALUE("longitude/latitude is out of bounds, lng: " << lng << " lat: " << lat);
- // Note that it's (lat, lng) for S2 but (lng, lat) for MongoDB.
- S2LatLng ll = S2LatLng::FromDegrees(lat, lng).Normalized();
- // This shouldn't happen since we should only have valid lng/lats.
- if (!ll.is_valid()) {
- stringstream ss;
- ss << "coords invalid after normalization, lng = " << lng << " lat = " << lat << endl;
- uasserted(17125, ss.str());
- }
- *out = ll.ToPoint();
- return Status::OK();
+static Status parseGeoJSONCoordinate(const BSONElement& elem, S2Point* out) {
+ if (Array != elem.type()) {
+ return BAD_VALUE("GeoJSON coordinates must be an array");
}
+ Point p;
+ // GeoJSON allows extra elements, e.g. altitude.
+ Status status = parseFlatPoint(elem, &p, true);
+ if (!status.isOK())
+ return status;
- static Status parseGeoJSONCoordinate(const BSONElement& elem, S2Point* out) {
- if (Array != elem.type()) { return BAD_VALUE("GeoJSON coordinates must be an array"); }
- Point p;
- // GeoJSON allows extra elements, e.g. altitude.
- Status status = parseFlatPoint(elem, &p, true);
- if (!status.isOK()) return status;
+ status = coordToPoint(p.x, p.y, out);
+ return status;
+}
- status = coordToPoint(p.x, p.y, out);
- return status;
+// "coordinates": [ [100.0, 0.0], [101.0, 1.0] ]
+static Status parseArrayOfCoordinates(const BSONElement& elem, vector<S2Point>* out) {
+ if (Array != elem.type()) {
+ return BAD_VALUE("GeoJSON coordinates must be an array of coordinates");
}
-
- // "coordinates": [ [100.0, 0.0], [101.0, 1.0] ]
- static Status parseArrayOfCoordinates(const BSONElement& elem, vector<S2Point>* out) {
- if (Array != elem.type()) { return BAD_VALUE("GeoJSON coordinates must be an array of coordinates"); }
- BSONObjIterator it(elem.Obj());
- // Iterate all coordinates in array
- while (it.more()) {
- S2Point p;
- Status status = parseGeoJSONCoordinate(it.next(), &p);
- if (!status.isOK()) return status;
- out->push_back(p);
- }
- return Status::OK();
+ BSONObjIterator it(elem.Obj());
+ // Iterate all coordinates in array
+ while (it.more()) {
+ S2Point p;
+ Status status = parseGeoJSONCoordinate(it.next(), &p);
+ if (!status.isOK())
+ return status;
+ out->push_back(p);
}
-
- static void eraseDuplicatePoints(vector<S2Point>* vertices) {
- for (size_t i = 1; i < vertices->size(); ++i) {
- if ((*vertices)[i - 1] == (*vertices)[i]) {
- vertices->erase(vertices->begin() + i);
- // We could have > 2 adjacent identical vertices, and must examine i again.
- --i;
- }
+ return Status::OK();
+}
+
+static void eraseDuplicatePoints(vector<S2Point>* vertices) {
+ for (size_t i = 1; i < vertices->size(); ++i) {
+ if ((*vertices)[i - 1] == (*vertices)[i]) {
+ vertices->erase(vertices->begin() + i);
+ // We could have > 2 adjacent identical vertices, and must examine i again.
+ --i;
}
}
+}
- static Status isLoopClosed(const vector<S2Point>& loop, const BSONElement loopElt) {
- if (loop.empty()) {
+static Status isLoopClosed(const vector<S2Point>& loop, const BSONElement loopElt) {
+ if (loop.empty()) {
return BAD_VALUE("Loop has no vertices: " << loopElt.toString(false));
- }
+ }
- if (loop[0] != loop[loop.size() - 1]) {
+ if (loop[0] != loop[loop.size() - 1]) {
return BAD_VALUE("Loop is not closed: " << loopElt.toString(false));
- }
+ }
- return Status::OK();
+ return Status::OK();
+}
+
+static Status parseGeoJSONPolygonCoordinates(const BSONElement& elem, S2Polygon* out) {
+ if (Array != elem.type()) {
+ return BAD_VALUE("Polygon coordinates must be an array");
}
- static Status parseGeoJSONPolygonCoordinates(const BSONElement& elem, S2Polygon *out) {
- if (Array != elem.type()) { return BAD_VALUE("Polygon coordinates must be an array"); }
+ OwnedPointerVector<S2Loop> loops;
+ Status status = Status::OK();
+ string err;
+
+ BSONObjIterator it(elem.Obj());
+ // Iterate all loops of the polygon.
+ while (it.more()) {
+ // Parse the array of vertices of a loop.
+ BSONElement coordinateElt = it.next();
+ vector<S2Point> points;
+ status = parseArrayOfCoordinates(coordinateElt, &points);
+ if (!status.isOK())
+ return status;
+
+ // Check if the loop is closed.
+ status = isLoopClosed(points, coordinateElt);
+ if (!status.isOK())
+ return status;
+
+ eraseDuplicatePoints(&points);
+ // Drop the duplicated last point.
+ points.resize(points.size() - 1);
- OwnedPointerVector<S2Loop> loops;
- Status status = Status::OK();
- string err;
-
- BSONObjIterator it(elem.Obj());
- // Iterate all loops of the polygon.
- while (it.more()) {
- // Parse the array of vertices of a loop.
- BSONElement coordinateElt = it.next();
- vector<S2Point> points;
- status = parseArrayOfCoordinates(coordinateElt, &points);
- if (!status.isOK()) return status;
-
- // Check if the loop is closed.
- status = isLoopClosed(points, coordinateElt);
- if (!status.isOK()) return status;
-
- eraseDuplicatePoints(&points);
- // Drop the duplicated last point.
- points.resize(points.size() - 1);
-
- // At least 3 vertices.
- if (points.size() < 3) {
- return BAD_VALUE("Loop must have at least 3 different vertices: " <<
- coordinateElt.toString(false));
- }
-
- S2Loop* loop = new S2Loop(points);
- loops.push_back(loop);
-
- // Check whether this loop is valid.
- // 1. At least 3 vertices.
- // 2. All vertices must be unit length. Guaranteed by parsePoints().
- // 3. Loops are not allowed to have any duplicate vertices.
- // 4. Non-adjacent edges are not allowed to intersect.
- if (!loop->IsValid(&err)) {
- return BAD_VALUE("Loop is not valid: " << coordinateElt.toString(false) << " "
- << err);
- }
- // If the loop is more than one hemisphere, invert it.
- loop->Normalize();
-
- // Check the first loop must be the exterior ring and any others must be
- // interior rings or holes.
- if (loops.size() > 1 && !loops[0]->Contains(loop)) {
- return BAD_VALUE("Secondary loops not contained by first exterior loop - "
- "secondary loops must be holes: " << coordinateElt.toString(false)
- << " first loop: " << elem.Obj().firstElement().toString(false));
- }
+ // At least 3 vertices.
+ if (points.size() < 3) {
+ return BAD_VALUE(
+ "Loop must have at least 3 different vertices: " << coordinateElt.toString(false));
}
- if (loops.empty()) {
- return BAD_VALUE("Polygon has no loops.");
- }
+ S2Loop* loop = new S2Loop(points);
+ loops.push_back(loop);
- // Check if the given loops form a valid polygon.
- // 1. If a loop contains an edge AB, then no other loop may contain AB or BA.
- // 2. No loop covers more than half of the sphere.
- // 3. No two loops cross.
- if (!S2Polygon::IsValid(loops.vector(), &err))
- return BAD_VALUE("Polygon isn't valid: " << err << " " << elem.toString(false));
-
- // Given all loops are valid / normalized and S2Polygon::IsValid() above returns true.
- // The polygon must be valid. See S2Polygon member function IsValid().
-
- // Transfer ownership of the loops and clears loop vector.
- out->Init(&loops.mutableVector());
-
- // Check if every loop of this polygon shares at most one vertex with
- // its parent loop.
- if (!out->IsNormalized(&err))
- // "err" looks like "Loop 1 shares more than one vertex with its parent loop 0"
- return BAD_VALUE(err << ": " << elem.toString(false));
-
- // S2Polygon contains more than one ring, which is allowed by S2, but not by GeoJSON.
- //
- // Loops are indexed according to a preorder traversal of the nesting hierarchy.
- // GetLastDescendant() returns the index of the last loop that is contained within
- // a given loop. We guarantee that the first loop is the exterior ring.
- if (out->GetLastDescendant(0) < out->num_loops() - 1) {
- return BAD_VALUE("Only one exterior polygon loop is allowed: " << elem.toString(false));
+ // Check whether this loop is valid.
+ // 1. At least 3 vertices.
+ // 2. All vertices must be unit length. Guaranteed by parsePoints().
+ // 3. Loops are not allowed to have any duplicate vertices.
+ // 4. Non-adjacent edges are not allowed to intersect.
+ if (!loop->IsValid(&err)) {
+ return BAD_VALUE("Loop is not valid: " << coordinateElt.toString(false) << " " << err);
}
-
- // In GeoJSON, only one nesting is allowed.
- // The depth of a loop is set by polygon according to the nesting hierarchy of polygon,
- // so the exterior ring's depth is 0, a hole in it is 1, etc.
- for (int i = 0; i < out->num_loops(); i++) {
- if (out->loop(i)->depth() > 1) {
- return BAD_VALUE("Polygon interior loops cannot be nested: "<< elem.toString(false));
- }
+ // If the loop is more than one hemisphere, invert it.
+ loop->Normalize();
+
+ // Check the first loop must be the exterior ring and any others must be
+ // interior rings or holes.
+ if (loops.size() > 1 && !loops[0]->Contains(loop)) {
+ return BAD_VALUE(
+ "Secondary loops not contained by first exterior loop - "
+ "secondary loops must be holes: "
+ << coordinateElt.toString(false)
+ << " first loop: " << elem.Obj().firstElement().toString(false));
}
- return Status::OK();
}
- static Status parseBigSimplePolygonCoordinates(const BSONElement& elem,
- BigSimplePolygon *out) {
- if (Array != elem.type())
- return BAD_VALUE("Coordinates of polygon must be an array");
+ if (loops.empty()) {
+ return BAD_VALUE("Polygon has no loops.");
+ }
+ // Check if the given loops form a valid polygon.
+ // 1. If a loop contains an edge AB, then no other loop may contain AB or BA.
+ // 2. No loop covers more than half of the sphere.
+ // 3. No two loops cross.
+ if (!S2Polygon::IsValid(loops.vector(), &err))
+ return BAD_VALUE("Polygon isn't valid: " << err << " " << elem.toString(false));
+
+ // Given all loops are valid / normalized and S2Polygon::IsValid() above returns true.
+ // The polygon must be valid. See S2Polygon member function IsValid().
+
+ // Transfer ownership of the loops and clears loop vector.
+ out->Init(&loops.mutableVector());
+
+ // Check if every loop of this polygon shares at most one vertex with
+ // its parent loop.
+ if (!out->IsNormalized(&err))
+ // "err" looks like "Loop 1 shares more than one vertex with its parent loop 0"
+ return BAD_VALUE(err << ": " << elem.toString(false));
+
+ // S2Polygon contains more than one ring, which is allowed by S2, but not by GeoJSON.
+ //
+ // Loops are indexed according to a preorder traversal of the nesting hierarchy.
+ // GetLastDescendant() returns the index of the last loop that is contained within
+ // a given loop. We guarantee that the first loop is the exterior ring.
+ if (out->GetLastDescendant(0) < out->num_loops() - 1) {
+ return BAD_VALUE("Only one exterior polygon loop is allowed: " << elem.toString(false));
+ }
- const vector<BSONElement>& coordinates = elem.Array();
- // Only one loop is allowed in a BigSimplePolygon
- if (coordinates.size() != 1) {
- return BAD_VALUE("Only one simple loop is allowed in a big polygon: "
- << elem.toString(false));
+ // In GeoJSON, only one nesting is allowed.
+ // The depth of a loop is set by polygon according to the nesting hierarchy of polygon,
+ // so the exterior ring's depth is 0, a hole in it is 1, etc.
+ for (int i = 0; i < out->num_loops(); i++) {
+ if (out->loop(i)->depth() > 1) {
+ return BAD_VALUE("Polygon interior loops cannot be nested: " << elem.toString(false));
}
+ }
+ return Status::OK();
+}
- vector<S2Point> exteriorVertices;
- Status status = Status::OK();
- string err;
+static Status parseBigSimplePolygonCoordinates(const BSONElement& elem, BigSimplePolygon* out) {
+ if (Array != elem.type())
+ return BAD_VALUE("Coordinates of polygon must be an array");
- status = parseArrayOfCoordinates(coordinates.front(), &exteriorVertices);
- if (!status.isOK()) return status;
- status = isLoopClosed(exteriorVertices, coordinates.front());
- if (!status.isOK()) return status;
+ const vector<BSONElement>& coordinates = elem.Array();
+ // Only one loop is allowed in a BigSimplePolygon
+ if (coordinates.size() != 1) {
+ return BAD_VALUE(
+ "Only one simple loop is allowed in a big polygon: " << elem.toString(false));
+ }
- eraseDuplicatePoints(&exteriorVertices);
+ vector<S2Point> exteriorVertices;
+ Status status = Status::OK();
+ string err;
- // The last point is duplicated. We drop it, since S2Loop expects no
- // duplicate points
- exteriorVertices.resize(exteriorVertices.size() - 1);
+ status = parseArrayOfCoordinates(coordinates.front(), &exteriorVertices);
+ if (!status.isOK())
+ return status;
- // At least 3 vertices.
- if (exteriorVertices.size() < 3) {
- return BAD_VALUE("Loop must have at least 3 different vertices: " <<
- elem.toString(false));
- }
+ status = isLoopClosed(exteriorVertices, coordinates.front());
+ if (!status.isOK())
+ return status;
- unique_ptr<S2Loop> loop(new S2Loop(exteriorVertices));
- // Check whether this loop is valid.
- if (!loop->IsValid(&err)) {
- return BAD_VALUE("Loop is not valid: " << elem.toString(false) << " " << err);
- }
+ eraseDuplicatePoints(&exteriorVertices);
- out->Init(loop.release());
- return Status::OK();
+ // The last point is duplicated. We drop it, since S2Loop expects no
+ // duplicate points
+ exteriorVertices.resize(exteriorVertices.size() - 1);
+
+ // At least 3 vertices.
+ if (exteriorVertices.size() < 3) {
+ return BAD_VALUE("Loop must have at least 3 different vertices: " << elem.toString(false));
}
- // Parse "crs" field of BSON object.
- // "crs": {
- // "type": "name",
- // "properties": {
- // "name": "urn:ogc:def:crs:OGC:1.3:CRS84"
- // }
- // }
- static Status parseGeoJSONCRS(const BSONObj &obj, CRS* crs, bool allowStrictSphere = false) {
- *crs = SPHERE;
+ unique_ptr<S2Loop> loop(new S2Loop(exteriorVertices));
+ // Check whether this loop is valid.
+ if (!loop->IsValid(&err)) {
+ return BAD_VALUE("Loop is not valid: " << elem.toString(false) << " " << err);
+ }
- BSONElement crsElt = obj["crs"];
- // "crs" field doesn't exist, return the default SPHERE
- if (crsElt.eoo()) {
- return Status::OK();
- }
+ out->Init(loop.release());
+ return Status::OK();
+}
+
+// Parse "crs" field of BSON object.
+// "crs": {
+// "type": "name",
+// "properties": {
+// "name": "urn:ogc:def:crs:OGC:1.3:CRS84"
+// }
+// }
+static Status parseGeoJSONCRS(const BSONObj& obj, CRS* crs, bool allowStrictSphere = false) {
+ *crs = SPHERE;
+
+ BSONElement crsElt = obj["crs"];
+ // "crs" field doesn't exist, return the default SPHERE
+ if (crsElt.eoo()) {
+ return Status::OK();
+ }
- if (!crsElt.isABSONObj()) return BAD_VALUE("GeoJSON CRS must be an object");
- BSONObj crsObj = crsElt.embeddedObject();
-
- // "type": "name"
- if (String != crsObj["type"].type() || "name" != crsObj["type"].String())
- return BAD_VALUE("GeoJSON CRS must have field \"type\": \"name\"");
-
- // "properties"
- BSONElement propertiesElt = crsObj["properties"];
- if (!propertiesElt.isABSONObj())
- return BAD_VALUE("CRS must have field \"properties\" which is an object");
- BSONObj propertiesObj = propertiesElt.embeddedObject();
- if (String != propertiesObj["name"].type())
- return BAD_VALUE("In CRS, \"properties.name\" must be a string");
- const string& name = propertiesObj["name"].String();
- if (CRS_CRS84 == name || CRS_EPSG_4326 == name) {
- *crs = SPHERE;
- } else if (CRS_STRICT_WINDING == name) {
- if (!allowStrictSphere) {
- return BAD_VALUE("Strict winding order is only supported by polygon");
- }
- *crs = STRICT_SPHERE;
- } else {
- return BAD_VALUE("Unknown CRS name: " << name);
+ if (!crsElt.isABSONObj())
+ return BAD_VALUE("GeoJSON CRS must be an object");
+ BSONObj crsObj = crsElt.embeddedObject();
+
+ // "type": "name"
+ if (String != crsObj["type"].type() || "name" != crsObj["type"].String())
+ return BAD_VALUE("GeoJSON CRS must have field \"type\": \"name\"");
+
+ // "properties"
+ BSONElement propertiesElt = crsObj["properties"];
+ if (!propertiesElt.isABSONObj())
+ return BAD_VALUE("CRS must have field \"properties\" which is an object");
+ BSONObj propertiesObj = propertiesElt.embeddedObject();
+ if (String != propertiesObj["name"].type())
+ return BAD_VALUE("In CRS, \"properties.name\" must be a string");
+ const string& name = propertiesObj["name"].String();
+ if (CRS_CRS84 == name || CRS_EPSG_4326 == name) {
+ *crs = SPHERE;
+ } else if (CRS_STRICT_WINDING == name) {
+ if (!allowStrictSphere) {
+ return BAD_VALUE("Strict winding order is only supported by polygon");
}
- return Status::OK();
+ *crs = STRICT_SPHERE;
+ } else {
+ return BAD_VALUE("Unknown CRS name: " << name);
}
+ return Status::OK();
+}
+
+// Parse "coordinates" field of GeoJSON LineString
+// e.g. "coordinates": [ [100.0, 0.0], [101.0, 1.0] ]
+// Or a line in "coordinates" field of GeoJSON MultiLineString
+static Status parseGeoJSONLineCoordinates(const BSONElement& elem, S2Polyline* out) {
+ vector<S2Point> vertices;
+ Status status = parseArrayOfCoordinates(elem, &vertices);
+ if (!status.isOK())
+ return status;
- // Parse "coordinates" field of GeoJSON LineString
- // e.g. "coordinates": [ [100.0, 0.0], [101.0, 1.0] ]
- // Or a line in "coordinates" field of GeoJSON MultiLineString
- static Status parseGeoJSONLineCoordinates(const BSONElement& elem, S2Polyline* out) {
- vector<S2Point> vertices;
- Status status = parseArrayOfCoordinates(elem, &vertices);
- if (!status.isOK()) return status;
+ eraseDuplicatePoints(&vertices);
+ if (vertices.size() < 2)
+ return BAD_VALUE(
+ "GeoJSON LineString must have at least 2 vertices: " << elem.toString(false));
+
+ string err;
+ if (!S2Polyline::IsValid(vertices, &err))
+ return BAD_VALUE("GeoJSON LineString is not valid: " << err << " " << elem.toString(false));
+ out->Init(vertices);
+ return Status::OK();
+}
+
+// Parse legacy point or GeoJSON point, used by geo near.
+// Only stored legacy points allow additional fields.
+Status parsePoint(const BSONElement& elem, PointWithCRS* out, bool allowAddlFields) {
+ if (!elem.isABSONObj())
+ return BAD_VALUE("Point must be an array or object");
+
+ BSONObj obj = elem.Obj();
+ // location: [1, 2] or location: {x: 1, y:2}
+ if (Array == elem.type() || obj.firstElement().isNumber()) {
+ // Legacy point
+ return GeoParser::parseLegacyPoint(elem, out, allowAddlFields);
+ }
- eraseDuplicatePoints(&vertices);
- if (vertices.size() < 2)
- return BAD_VALUE("GeoJSON LineString must have at least 2 vertices: " << elem.toString(false));
+ // GeoJSON point. location: { type: "Point", coordinates: [1, 2] }
+ return GeoParser::parseGeoJSONPoint(obj, out);
+}
- string err;
- if (!S2Polyline::IsValid(vertices, &err))
- return BAD_VALUE("GeoJSON LineString is not valid: " << err << " " << elem.toString(false));
- out->Init(vertices);
- return Status::OK();
- }
+/** exported **/
+Status GeoParser::parseStoredPoint(const BSONElement& elem, PointWithCRS* out) {
+ return parsePoint(elem, out, true);
+}
- // Parse legacy point or GeoJSON point, used by geo near.
- // Only stored legacy points allow additional fields.
- Status parsePoint(const BSONElement &elem, PointWithCRS *out, bool allowAddlFields) {
- if (!elem.isABSONObj()) return BAD_VALUE("Point must be an array or object");
+Status GeoParser::parseQueryPoint(const BSONElement& elem, PointWithCRS* out) {
+ return parsePoint(elem, out, false);
+}
- BSONObj obj = elem.Obj();
- // location: [1, 2] or location: {x: 1, y:2}
- if (Array == elem.type() || obj.firstElement().isNumber()) {
- // Legacy point
- return GeoParser::parseLegacyPoint(elem, out, allowAddlFields);
- }
+Status GeoParser::parseLegacyBox(const BSONObj& obj, BoxWithCRS* out) {
+ Point ptA, ptB;
+ Status status = Status::OK();
- // GeoJSON point. location: { type: "Point", coordinates: [1, 2] }
- return GeoParser::parseGeoJSONPoint(obj, out);
+ BSONObjIterator coordIt(obj);
+ status = parseFlatPoint(coordIt.next(), &ptA);
+ if (!status.isOK()) {
+ return status;
}
-
- /** exported **/
- Status GeoParser::parseStoredPoint(const BSONElement &elem, PointWithCRS *out) {
- return parsePoint(elem, out, true);
+ status = parseFlatPoint(coordIt.next(), &ptB);
+ if (!status.isOK()) {
+ return status;
}
+ // XXX: VERIFY AREA >= 0
- Status GeoParser::parseQueryPoint(const BSONElement &elem, PointWithCRS *out) {
- return parsePoint(elem, out, false);
+ out->box.init(ptA, ptB);
+ out->crs = FLAT;
+ return status;
+}
+
+Status GeoParser::parseLegacyPolygon(const BSONObj& obj, PolygonWithCRS* out) {
+ BSONObjIterator coordIt(obj);
+ vector<Point> points;
+ while (coordIt.more()) {
+ Point p;
+ // A coordinate
+ Status status = parseFlatPoint(coordIt.next(), &p);
+ if (!status.isOK())
+ return status;
+ points.push_back(p);
}
+ if (points.size() < 3)
+ return BAD_VALUE("Polygon must have at least 3 points");
+ out->oldPolygon.init(points);
+ out->crs = FLAT;
+ return Status::OK();
+}
+
+// { "type": "Point", "coordinates": [100.0, 0.0] }
+Status GeoParser::parseGeoJSONPoint(const BSONObj& obj, PointWithCRS* out) {
+ Status status = Status::OK();
+ // "crs"
+ status = parseGeoJSONCRS(obj, &out->crs);
+ if (!status.isOK())
+ return status;
- Status GeoParser::parseLegacyBox(const BSONObj& obj, BoxWithCRS *out) {
- Point ptA, ptB;
- Status status = Status::OK();
+ // "coordinates"
+ status = parseFlatPoint(obj[GEOJSON_COORDINATES], &out->oldPoint, true);
+ if (!status.isOK())
+ return status;
- BSONObjIterator coordIt(obj);
- status = parseFlatPoint(coordIt.next(), &ptA);
- if (!status.isOK()) { return status; }
- status = parseFlatPoint(coordIt.next(), &ptB);
- if (!status.isOK()) { return status; }
- // XXX: VERIFY AREA >= 0
+ // Projection
+ out->crs = FLAT;
+ if (!ShapeProjection::supportsProject(*out, SPHERE))
+ return BAD_VALUE("longitude/latitude is out of bounds, lng: " << out->oldPoint.x << " lat: "
+ << out->oldPoint.y);
+ ShapeProjection::projectInto(out, SPHERE);
+ return Status::OK();
+}
+
+// { "type": "LineString", "coordinates": [ [100.0, 0.0], [101.0, 1.0] ] }
+Status GeoParser::parseGeoJSONLine(const BSONObj& obj, LineWithCRS* out) {
+ Status status = Status::OK();
+ // "crs"
+ status = parseGeoJSONCRS(obj, &out->crs);
+ if (!status.isOK())
+ return status;
- out->box.init(ptA, ptB);
- out->crs = FLAT;
+ // "coordinates"
+ status = parseGeoJSONLineCoordinates(obj[GEOJSON_COORDINATES], &out->line);
+ if (!status.isOK())
return status;
- }
- Status GeoParser::parseLegacyPolygon(const BSONObj& obj, PolygonWithCRS *out) {
- BSONObjIterator coordIt(obj);
- vector<Point> points;
- while (coordIt.more()) {
- Point p;
- // A coordinate
- Status status = parseFlatPoint(coordIt.next(), &p);
- if (!status.isOK()) return status;
- points.push_back(p);
- }
- if (points.size() < 3) return BAD_VALUE("Polygon must have at least 3 points");
- out->oldPolygon.init(points);
- out->crs = FLAT;
- return Status::OK();
- }
+ return Status::OK();
+}
- // { "type": "Point", "coordinates": [100.0, 0.0] }
- Status GeoParser::parseGeoJSONPoint(const BSONObj &obj, PointWithCRS *out) {
- Status status = Status::OK();
- // "crs"
- status = parseGeoJSONCRS(obj, &out->crs);
- if (!status.isOK()) return status;
-
- // "coordinates"
- status = parseFlatPoint(obj[GEOJSON_COORDINATES], &out->oldPoint, true);
- if (!status.isOK()) return status;
-
- // Projection
- out->crs = FLAT;
- if (!ShapeProjection::supportsProject(*out, SPHERE))
- return BAD_VALUE("longitude/latitude is out of bounds, lng: "
- << out->oldPoint.x << " lat: " << out->oldPoint.y);
- ShapeProjection::projectInto(out, SPHERE);
- return Status::OK();
+Status GeoParser::parseGeoJSONPolygon(const BSONObj& obj, PolygonWithCRS* out) {
+ const BSONElement coordinates = obj[GEOJSON_COORDINATES];
+
+ Status status = Status::OK();
+ // "crs", allow strict sphere
+ status = parseGeoJSONCRS(obj, &out->crs, true);
+ if (!status.isOK())
+ return status;
+
+ // "coordinates"
+ if (out->crs == SPHERE) {
+ out->s2Polygon.reset(new S2Polygon());
+ status = parseGeoJSONPolygonCoordinates(coordinates, out->s2Polygon.get());
+ } else if (out->crs == STRICT_SPHERE) {
+ out->bigPolygon.reset(new BigSimplePolygon());
+ status = parseBigSimplePolygonCoordinates(coordinates, out->bigPolygon.get());
}
+ return status;
+}
- // { "type": "LineString", "coordinates": [ [100.0, 0.0], [101.0, 1.0] ] }
- Status GeoParser::parseGeoJSONLine(const BSONObj& obj, LineWithCRS* out) {
- Status status = Status::OK();
- // "crs"
- status = parseGeoJSONCRS(obj, &out->crs);
- if (!status.isOK()) return status;
+Status GeoParser::parseMultiPoint(const BSONObj& obj, MultiPointWithCRS* out) {
+ Status status = Status::OK();
+ status = parseGeoJSONCRS(obj, &out->crs);
+ if (!status.isOK())
+ return status;
- // "coordinates"
- status = parseGeoJSONLineCoordinates(obj[GEOJSON_COORDINATES], &out->line);
- if (!status.isOK()) return status;
+ out->points.clear();
+ BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES);
+ status = parseArrayOfCoordinates(coordElt, &out->points);
+ if (!status.isOK())
+ return status;
- return Status::OK();
+ if (0 == out->points.size())
+ return BAD_VALUE("MultiPoint coordinates must have at least 1 element");
+ out->cells.resize(out->points.size());
+ for (size_t i = 0; i < out->points.size(); ++i) {
+ out->cells[i] = S2Cell(out->points[i]);
}
- Status GeoParser::parseGeoJSONPolygon(const BSONObj &obj, PolygonWithCRS *out) {
- const BSONElement coordinates = obj[GEOJSON_COORDINATES];
+ return Status::OK();
+}
- Status status = Status::OK();
- // "crs", allow strict sphere
- status = parseGeoJSONCRS(obj, &out->crs, true);
- if (!status.isOK()) return status;
-
- // "coordinates"
- if (out->crs == SPHERE) {
- out->s2Polygon.reset(new S2Polygon());
- status = parseGeoJSONPolygonCoordinates(coordinates, out->s2Polygon.get());
- }
- else if (out->crs == STRICT_SPHERE) {
- out->bigPolygon.reset(new BigSimplePolygon());
- status = parseBigSimplePolygonCoordinates(coordinates, out->bigPolygon.get());
- }
+Status GeoParser::parseMultiLine(const BSONObj& obj, MultiLineWithCRS* out) {
+ Status status = Status::OK();
+ status = parseGeoJSONCRS(obj, &out->crs);
+ if (!status.isOK())
return status;
- }
- Status GeoParser::parseMultiPoint(const BSONObj &obj, MultiPointWithCRS *out) {
- Status status = Status::OK();
- status = parseGeoJSONCRS(obj, &out->crs);
- if (!status.isOK()) return status;
-
- out->points.clear();
- BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES);
- status = parseArrayOfCoordinates(coordElt, &out->points);
- if (!status.isOK()) return status;
-
- if (0 == out->points.size())
- return BAD_VALUE("MultiPoint coordinates must have at least 1 element");
- out->cells.resize(out->points.size());
- for (size_t i = 0; i < out->points.size(); ++i) {
- out->cells[i] = S2Cell(out->points[i]);
- }
+ BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES);
+ if (Array != coordElt.type())
+ return BAD_VALUE("MultiLineString coordinates must be an array");
- return Status::OK();
- }
+ out->lines.clear();
+ vector<S2Polyline*>& lines = out->lines.mutableVector();
- Status GeoParser::parseMultiLine(const BSONObj &obj, MultiLineWithCRS *out) {
- Status status = Status::OK();
- status = parseGeoJSONCRS(obj, &out->crs);
- if (!status.isOK()) return status;
+ BSONObjIterator it(coordElt.Obj());
- BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES);
- if (Array != coordElt.type())
- return BAD_VALUE("MultiLineString coordinates must be an array");
+ // Iterate array
+ while (it.more()) {
+ lines.push_back(new S2Polyline());
+ status = parseGeoJSONLineCoordinates(it.next(), lines.back());
+ if (!status.isOK())
+ return status;
+ }
+ if (0 == lines.size())
+ return BAD_VALUE("MultiLineString coordinates must have at least 1 element");
- out->lines.clear();
- vector<S2Polyline*>& lines = out->lines.mutableVector();
+ return Status::OK();
+}
- BSONObjIterator it(coordElt.Obj());
+Status GeoParser::parseMultiPolygon(const BSONObj& obj, MultiPolygonWithCRS* out) {
+ Status status = Status::OK();
+ status = parseGeoJSONCRS(obj, &out->crs);
+ if (!status.isOK())
+ return status;
- // Iterate array
- while (it.more()) {
- lines.push_back(new S2Polyline());
- status = parseGeoJSONLineCoordinates(it.next(), lines.back());
- if (!status.isOK()) return status;
- }
- if (0 == lines.size())
- return BAD_VALUE("MultiLineString coordinates must have at least 1 element");
+ BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES);
+ if (Array != coordElt.type())
+ return BAD_VALUE("MultiPolygon coordinates must be an array");
- return Status::OK();
+ out->polygons.clear();
+ vector<S2Polygon*>& polygons = out->polygons.mutableVector();
+
+ BSONObjIterator it(coordElt.Obj());
+ // Iterate array
+ while (it.more()) {
+ polygons.push_back(new S2Polygon());
+ status = parseGeoJSONPolygonCoordinates(it.next(), polygons.back());
+ if (!status.isOK())
+ return status;
}
+ if (0 == polygons.size())
+ return BAD_VALUE("MultiPolygon coordinates must have at least 1 element");
- Status GeoParser::parseMultiPolygon(const BSONObj &obj, MultiPolygonWithCRS *out) {
- Status status = Status::OK();
- status = parseGeoJSONCRS(obj, &out->crs);
- if (!status.isOK()) return status;
-
- BSONElement coordElt = obj.getFieldDotted(GEOJSON_COORDINATES);
- if (Array != coordElt.type())
- return BAD_VALUE("MultiPolygon coordinates must be an array");
-
- out->polygons.clear();
- vector<S2Polygon*>& polygons = out->polygons.mutableVector();
-
- BSONObjIterator it(coordElt.Obj());
- // Iterate array
- while (it.more()) {
- polygons.push_back(new S2Polygon());
- status = parseGeoJSONPolygonCoordinates(it.next(), polygons.back());
- if (!status.isOK()) return status;
- }
- if (0 == polygons.size())
- return BAD_VALUE("MultiPolygon coordinates must have at least 1 element");
+ return Status::OK();
+}
- return Status::OK();
- }
+Status GeoParser::parseLegacyCenter(const BSONObj& obj, CapWithCRS* out) {
+ BSONObjIterator objIt(obj);
+
+ // Center
+ BSONElement center = objIt.next();
+ Status status = parseFlatPoint(center, &out->circle.center);
+ if (!status.isOK())
+ return status;
- Status GeoParser::parseLegacyCenter(const BSONObj& obj, CapWithCRS *out) {
- BSONObjIterator objIt(obj);
+ // Radius
+ BSONElement radius = objIt.next();
+ // radius >= 0 and is not NaN
+ if (!radius.isNumber() || !(radius.number() >= 0))
+ return BAD_VALUE("radius must be a non-negative number");
+
+ // No more
+ if (objIt.more())
+ return BAD_VALUE("Only 2 fields allowed for circular region");
+
+ out->circle.radius = radius.number();
+ out->crs = FLAT;
+ return Status::OK();
+}
+
+Status GeoParser::parseCenterSphere(const BSONObj& obj, CapWithCRS* out) {
+ BSONObjIterator objIt(obj);
+
+ // Center
+ BSONElement center = objIt.next();
+ Point p;
+ // Check the object has and only has 2 numbers.
+ Status status = parseFlatPoint(center, &p);
+ if (!status.isOK())
+ return status;
- // Center
- BSONElement center = objIt.next();
- Status status = parseFlatPoint(center, &out->circle.center);
- if (!status.isOK()) return status;
+ S2Point centerPoint;
+ status = coordToPoint(p.x, p.y, &centerPoint);
+ if (!status.isOK())
+ return status;
- // Radius
- BSONElement radius = objIt.next();
- // radius >= 0 and is not NaN
- if (!radius.isNumber() || !(radius.number() >= 0))
- return BAD_VALUE("radius must be a non-negative number");
+ // Radius
+ BSONElement radiusElt = objIt.next();
+ // radius >= 0 and is not NaN
+ if (!radiusElt.isNumber() || !(radiusElt.number() >= 0))
+ return BAD_VALUE("radius must be a non-negative number");
+ double radius = radiusElt.number();
+
+ // No more elements
+ if (objIt.more())
+ return BAD_VALUE("Only 2 fields allowed for circular region");
+
+ out->cap = S2Cap::FromAxisAngle(centerPoint, S1Angle::Radians(radius));
+ out->circle.radius = radius;
+ out->circle.center = p;
+ out->crs = SPHERE;
+ return Status::OK();
+}
+
+// { "type": "GeometryCollection",
+// "geometries": [
+// { "type": "Point",
+// "coordinates": [100.0, 0.0]
+// },
+// { "type": "LineString",
+// "coordinates": [ [101.0, 0.0], [102.0, 1.0] ]
+// }
+// ]
+// }
+Status GeoParser::parseGeometryCollection(const BSONObj& obj, GeometryCollection* out) {
+ BSONElement coordElt = obj.getFieldDotted(GEOJSON_GEOMETRIES);
+ if (Array != coordElt.type())
+ return BAD_VALUE("GeometryCollection geometries must be an array");
+
+ const vector<BSONElement>& geometries = coordElt.Array();
+ if (0 == geometries.size())
+ return BAD_VALUE("GeometryCollection geometries must have at least 1 element");
+
+ for (size_t i = 0; i < geometries.size(); ++i) {
+ if (Object != geometries[i].type())
+ return BAD_VALUE("Element " << i << " of \"geometries\" is not an object");
+
+ const BSONObj& geoObj = geometries[i].Obj();
+ GeoJSONType type = parseGeoJSONType(geoObj);
+
+ if (GEOJSON_UNKNOWN == type)
+ return BAD_VALUE("Unknown GeoJSON type: " << geometries[i].toString(false));
+
+ if (GEOJSON_GEOMETRY_COLLECTION == type)
+ return BAD_VALUE(
+ "GeometryCollections cannot be nested: " << geometries[i].toString(false));
- // No more
- if (objIt.more())
- return BAD_VALUE("Only 2 fields allowed for circular region");
+ Status status = Status::OK();
+ if (GEOJSON_POINT == type) {
+ out->points.resize(out->points.size() + 1);
+ status = parseGeoJSONPoint(geoObj, &out->points.back());
+ } else if (GEOJSON_LINESTRING == type) {
+ out->lines.mutableVector().push_back(new LineWithCRS());
+ status = parseGeoJSONLine(geoObj, out->lines.vector().back());
+ } else if (GEOJSON_POLYGON == type) {
+ out->polygons.mutableVector().push_back(new PolygonWithCRS());
+ status = parseGeoJSONPolygon(geoObj, out->polygons.vector().back());
+ } else if (GEOJSON_MULTI_POINT == type) {
+ out->multiPoints.mutableVector().push_back(new MultiPointWithCRS());
+ status = parseMultiPoint(geoObj, out->multiPoints.mutableVector().back());
+ } else if (GEOJSON_MULTI_LINESTRING == type) {
+ out->multiLines.mutableVector().push_back(new MultiLineWithCRS());
+ status = parseMultiLine(geoObj, out->multiLines.mutableVector().back());
+ } else if (GEOJSON_MULTI_POLYGON == type) {
+ out->multiPolygons.mutableVector().push_back(new MultiPolygonWithCRS());
+ status = parseMultiPolygon(geoObj, out->multiPolygons.mutableVector().back());
+ } else {
+ // Should not reach here.
+ invariant(false);
+ }
- out->circle.radius = radius.number();
- out->crs = FLAT;
- return Status::OK();
+ // Check parsing result.
+ if (!status.isOK())
+ return status;
}
- Status GeoParser::parseCenterSphere(const BSONObj& obj, CapWithCRS *out) {
- BSONObjIterator objIt(obj);
+ return Status::OK();
+}
- // Center
- BSONElement center = objIt.next();
- Point p;
- // Check the object has and only has 2 numbers.
- Status status = parseFlatPoint(center, &p);
- if (!status.isOK()) return status;
-
- S2Point centerPoint;
- status = coordToPoint(p.x, p.y, &centerPoint);
- if (!status.isOK()) return status;
-
- // Radius
- BSONElement radiusElt = objIt.next();
- // radius >= 0 and is not NaN
- if (!radiusElt.isNumber() || !(radiusElt.number() >= 0))
- return BAD_VALUE("radius must be a non-negative number");
- double radius = radiusElt.number();
-
- // No more elements
- if (objIt.more())
- return BAD_VALUE("Only 2 fields allowed for circular region");
-
- out->cap = S2Cap::FromAxisAngle(centerPoint, S1Angle::Radians(radius));
- out->circle.radius = radius;
- out->circle.center = p;
- out->crs = SPHERE;
- return Status::OK();
+bool GeoParser::parsePointWithMaxDistance(const BSONObj& obj, PointWithCRS* out, double* maxOut) {
+ BSONObjIterator it(obj);
+ if (!it.more()) {
+ return false;
}
- // { "type": "GeometryCollection",
- // "geometries": [
- // { "type": "Point",
- // "coordinates": [100.0, 0.0]
- // },
- // { "type": "LineString",
- // "coordinates": [ [101.0, 0.0], [102.0, 1.0] ]
- // }
- // ]
- // }
- Status GeoParser::parseGeometryCollection(const BSONObj &obj, GeometryCollection *out) {
- BSONElement coordElt = obj.getFieldDotted(GEOJSON_GEOMETRIES);
- if (Array != coordElt.type())
- return BAD_VALUE("GeometryCollection geometries must be an array");
-
- const vector<BSONElement>& geometries = coordElt.Array();
- if (0 == geometries.size())
- return BAD_VALUE("GeometryCollection geometries must have at least 1 element");
-
- for (size_t i = 0; i < geometries.size(); ++i) {
- if (Object != geometries[i].type())
- return BAD_VALUE("Element " << i << " of \"geometries\" is not an object");
-
- const BSONObj& geoObj = geometries[i].Obj();
- GeoJSONType type = parseGeoJSONType(geoObj);
-
- if (GEOJSON_UNKNOWN == type)
- return BAD_VALUE("Unknown GeoJSON type: " << geometries[i].toString(false));
-
- if (GEOJSON_GEOMETRY_COLLECTION == type)
- return BAD_VALUE("GeometryCollections cannot be nested: "
- << geometries[i].toString(false));
-
- Status status = Status::OK();
- if (GEOJSON_POINT == type) {
- out->points.resize(out->points.size() + 1);
- status = parseGeoJSONPoint(geoObj, &out->points.back());
- } else if (GEOJSON_LINESTRING == type) {
- out->lines.mutableVector().push_back(new LineWithCRS());
- status = parseGeoJSONLine(geoObj, out->lines.vector().back());
- } else if (GEOJSON_POLYGON == type) {
- out->polygons.mutableVector().push_back(new PolygonWithCRS());
- status = parseGeoJSONPolygon(geoObj, out->polygons.vector().back());
- } else if (GEOJSON_MULTI_POINT == type) {
- out->multiPoints.mutableVector().push_back(new MultiPointWithCRS());
- status = parseMultiPoint(geoObj, out->multiPoints.mutableVector().back());
- } else if (GEOJSON_MULTI_LINESTRING == type) {
- out->multiLines.mutableVector().push_back(new MultiLineWithCRS());
- status = parseMultiLine(geoObj, out->multiLines.mutableVector().back());
- } else if (GEOJSON_MULTI_POLYGON == type) {
- out->multiPolygons.mutableVector().push_back(new MultiPolygonWithCRS());
- status = parseMultiPolygon(geoObj, out->multiPolygons.mutableVector().back());
- } else {
- // Should not reach here.
- invariant(false);
- }
-
- // Check parsing result.
- if (!status.isOK()) return status;
- }
+ BSONElement lng = it.next();
+ if (!lng.isNumber()) {
+ return false;
+ }
+ if (!it.more()) {
+ return false;
+ }
- return Status::OK();
+ BSONElement lat = it.next();
+ if (!lat.isNumber()) {
+ return false;
+ }
+ if (!it.more()) {
+ return false;
}
- bool GeoParser::parsePointWithMaxDistance(const BSONObj& obj, PointWithCRS* out, double* maxOut) {
- BSONObjIterator it(obj);
- if (!it.more()) { return false; }
-
- BSONElement lng = it.next();
- if (!lng.isNumber()) { return false; }
- if (!it.more()) { return false; }
-
- BSONElement lat = it.next();
- if (!lat.isNumber()) { return false; }
- if (!it.more()) { return false; }
-
- BSONElement dist = it.next();
- if (!dist.isNumber()) { return false; }
- if (it.more()) { return false; }
-
- out->oldPoint.x = lng.number();
- out->oldPoint.y = lat.number();
- out->crs = FLAT;
- *maxOut = dist.number();
- return true;
- }
-
- GeoParser::GeoSpecifier GeoParser::parseGeoSpecifier(const BSONElement& type) {
- if (!type.isABSONObj()) { return GeoParser::UNKNOWN; }
- const char* fieldName = type.fieldName();
- if (mongoutils::str::equals(fieldName, "$box")) {
- return GeoParser::BOX;
- } else if (mongoutils::str::equals(fieldName, "$center")) {
- return GeoParser::CENTER;
- } else if (mongoutils::str::equals(fieldName, "$polygon")) {
- return GeoParser::POLYGON;
- } else if (mongoutils::str::equals(fieldName, "$centerSphere")) {
- return GeoParser::CENTER_SPHERE;
- } else if (mongoutils::str::equals(fieldName, "$geometry")) {
- return GeoParser::GEOMETRY;
- }
+ BSONElement dist = it.next();
+ if (!dist.isNumber()) {
+ return false;
+ }
+ if (it.more()) {
+ return false;
+ }
+
+ out->oldPoint.x = lng.number();
+ out->oldPoint.y = lat.number();
+ out->crs = FLAT;
+ *maxOut = dist.number();
+ return true;
+}
+
+GeoParser::GeoSpecifier GeoParser::parseGeoSpecifier(const BSONElement& type) {
+ if (!type.isABSONObj()) {
return GeoParser::UNKNOWN;
}
+ const char* fieldName = type.fieldName();
+ if (mongoutils::str::equals(fieldName, "$box")) {
+ return GeoParser::BOX;
+ } else if (mongoutils::str::equals(fieldName, "$center")) {
+ return GeoParser::CENTER;
+ } else if (mongoutils::str::equals(fieldName, "$polygon")) {
+ return GeoParser::POLYGON;
+ } else if (mongoutils::str::equals(fieldName, "$centerSphere")) {
+ return GeoParser::CENTER_SPHERE;
+ } else if (mongoutils::str::equals(fieldName, "$geometry")) {
+ return GeoParser::GEOMETRY;
+ }
+ return GeoParser::UNKNOWN;
+}
- GeoParser::GeoJSONType GeoParser::parseGeoJSONType(const BSONObj& obj) {
- BSONElement type = obj.getFieldDotted(GEOJSON_TYPE);
- if (String != type.type()) { return GeoParser::GEOJSON_UNKNOWN; }
- const string& typeString = type.String();
- if (GEOJSON_TYPE_POINT == typeString) {
- return GeoParser::GEOJSON_POINT;
- } else if (GEOJSON_TYPE_LINESTRING == typeString) {
- return GeoParser::GEOJSON_LINESTRING;
- } else if (GEOJSON_TYPE_POLYGON == typeString) {
- return GeoParser::GEOJSON_POLYGON;
- } else if (GEOJSON_TYPE_MULTI_POINT == typeString) {
- return GeoParser::GEOJSON_MULTI_POINT;
- } else if (GEOJSON_TYPE_MULTI_LINESTRING == typeString) {
- return GeoParser::GEOJSON_MULTI_LINESTRING;
- } else if (GEOJSON_TYPE_MULTI_POLYGON == typeString) {
- return GeoParser::GEOJSON_MULTI_POLYGON;
- } else if (GEOJSON_TYPE_GEOMETRY_COLLECTION == typeString) {
- return GeoParser::GEOJSON_GEOMETRY_COLLECTION;
- }
+GeoParser::GeoJSONType GeoParser::parseGeoJSONType(const BSONObj& obj) {
+ BSONElement type = obj.getFieldDotted(GEOJSON_TYPE);
+ if (String != type.type()) {
return GeoParser::GEOJSON_UNKNOWN;
}
+ const string& typeString = type.String();
+ if (GEOJSON_TYPE_POINT == typeString) {
+ return GeoParser::GEOJSON_POINT;
+ } else if (GEOJSON_TYPE_LINESTRING == typeString) {
+ return GeoParser::GEOJSON_LINESTRING;
+ } else if (GEOJSON_TYPE_POLYGON == typeString) {
+ return GeoParser::GEOJSON_POLYGON;
+ } else if (GEOJSON_TYPE_MULTI_POINT == typeString) {
+ return GeoParser::GEOJSON_MULTI_POINT;
+ } else if (GEOJSON_TYPE_MULTI_LINESTRING == typeString) {
+ return GeoParser::GEOJSON_MULTI_LINESTRING;
+ } else if (GEOJSON_TYPE_MULTI_POLYGON == typeString) {
+ return GeoParser::GEOJSON_MULTI_POLYGON;
+ } else if (GEOJSON_TYPE_GEOMETRY_COLLECTION == typeString) {
+ return GeoParser::GEOJSON_GEOMETRY_COLLECTION;
+ }
+ return GeoParser::GEOJSON_UNKNOWN;
+}
} // namespace mongo
diff --git a/src/mongo/db/geo/geoparser.h b/src/mongo/db/geo/geoparser.h
index 796db6b087f..91cdb6649ea 100644
--- a/src/mongo/db/geo/geoparser.h
+++ b/src/mongo/db/geo/geoparser.h
@@ -33,64 +33,65 @@
namespace mongo {
- // This class parses geographic data.
- // It parses a subset of GeoJSON and creates S2 shapes from it.
- // See http://geojson.org/geojson-spec.html for the spec.
- //
- // This class also parses the ad-hoc geo formats that MongoDB introduced.
- //
- // parse* methods may do some more validation than the is* methods; they return false if they
- // encounter invalid geometry and true if the geometry is parsed successfully.
- class GeoParser {
- public:
-
- // Geospatial specifier after $geoWithin / $geoIntersects predicates.
- // i.e. "$box" in { $box: [[1, 2], [3, 4]] }
- enum GeoSpecifier {
- UNKNOWN = 0,
- BOX, // $box
- CENTER, // $center
- POLYGON, // $polygon
- CENTER_SPHERE, // $centerSphere
- GEOMETRY // GeoJSON geometry, $geometry
- };
+// This class parses geographic data.
+// It parses a subset of GeoJSON and creates S2 shapes from it.
+// See http://geojson.org/geojson-spec.html for the spec.
+//
+// This class also parses the ad-hoc geo formats that MongoDB introduced.
+//
+// parse* methods may do some more validation than the is* methods; they return false if they
+// encounter invalid geometry and true if the geometry is parsed successfully.
+class GeoParser {
+public:
+ // Geospatial specifier after $geoWithin / $geoIntersects predicates.
+ // i.e. "$box" in { $box: [[1, 2], [3, 4]] }
+ enum GeoSpecifier {
+ UNKNOWN = 0,
+ BOX, // $box
+ CENTER, // $center
+ POLYGON, // $polygon
+ CENTER_SPHERE, // $centerSphere
+ GEOMETRY // GeoJSON geometry, $geometry
+ };
- // GeoJSON type defined in GeoJSON document.
- // i.e. "Point" in { type: "Point", coordinates: [1, 2] }
- enum GeoJSONType {
- GEOJSON_UNKNOWN = 0,
- GEOJSON_POINT,
- GEOJSON_LINESTRING,
- GEOJSON_POLYGON,
- GEOJSON_MULTI_POINT,
- GEOJSON_MULTI_LINESTRING,
- GEOJSON_MULTI_POLYGON,
- GEOJSON_GEOMETRY_COLLECTION
- };
+ // GeoJSON type defined in GeoJSON document.
+ // i.e. "Point" in { type: "Point", coordinates: [1, 2] }
+ enum GeoJSONType {
+ GEOJSON_UNKNOWN = 0,
+ GEOJSON_POINT,
+ GEOJSON_LINESTRING,
+ GEOJSON_POLYGON,
+ GEOJSON_MULTI_POINT,
+ GEOJSON_MULTI_LINESTRING,
+ GEOJSON_MULTI_POLYGON,
+ GEOJSON_GEOMETRY_COLLECTION
+ };
- static GeoSpecifier parseGeoSpecifier(const BSONElement& elem);
- static GeoJSONType parseGeoJSONType(const BSONObj& obj);
+ static GeoSpecifier parseGeoSpecifier(const BSONElement& elem);
+ static GeoJSONType parseGeoJSONType(const BSONObj& obj);
- // Legacy points can contain extra data as extra fields - these are valid to index
- // e.g. { x: 1, y: 1, z: 1 }
- static Status parseLegacyPoint(const BSONElement &elem, PointWithCRS *out, bool allowAddlFields = false);
- // Parse the BSON object after $box, $center, etc.
- static Status parseLegacyBox(const BSONObj& obj, BoxWithCRS *out);
- static Status parseLegacyCenter(const BSONObj& obj, CapWithCRS *out);
- static Status parseLegacyPolygon(const BSONObj& obj, PolygonWithCRS *out);
- static Status parseCenterSphere(const BSONObj& obj, CapWithCRS *out);
- static Status parseGeoJSONPolygon(const BSONObj &obj, PolygonWithCRS *out);
- static Status parseGeoJSONPoint(const BSONObj &obj, PointWithCRS *out);
- static Status parseGeoJSONLine(const BSONObj& obj, LineWithCRS* out);
- static Status parseMultiPoint(const BSONObj &obj, MultiPointWithCRS *out);
- static Status parseMultiLine(const BSONObj &obj, MultiLineWithCRS *out);
- static Status parseMultiPolygon(const BSONObj &obj, MultiPolygonWithCRS *out);
- static Status parseGeometryCollection(const BSONObj &obj, GeometryCollection *out);
+ // Legacy points can contain extra data as extra fields - these are valid to index
+ // e.g. { x: 1, y: 1, z: 1 }
+ static Status parseLegacyPoint(const BSONElement& elem,
+ PointWithCRS* out,
+ bool allowAddlFields = false);
+ // Parse the BSON object after $box, $center, etc.
+ static Status parseLegacyBox(const BSONObj& obj, BoxWithCRS* out);
+ static Status parseLegacyCenter(const BSONObj& obj, CapWithCRS* out);
+ static Status parseLegacyPolygon(const BSONObj& obj, PolygonWithCRS* out);
+ static Status parseCenterSphere(const BSONObj& obj, CapWithCRS* out);
+ static Status parseGeoJSONPolygon(const BSONObj& obj, PolygonWithCRS* out);
+ static Status parseGeoJSONPoint(const BSONObj& obj, PointWithCRS* out);
+ static Status parseGeoJSONLine(const BSONObj& obj, LineWithCRS* out);
+ static Status parseMultiPoint(const BSONObj& obj, MultiPointWithCRS* out);
+ static Status parseMultiLine(const BSONObj& obj, MultiLineWithCRS* out);
+ static Status parseMultiPolygon(const BSONObj& obj, MultiPolygonWithCRS* out);
+ static Status parseGeometryCollection(const BSONObj& obj, GeometryCollection* out);
- // For geo near
- static Status parseQueryPoint(const BSONElement &elem, PointWithCRS *out);
- static Status parseStoredPoint(const BSONElement &elem, PointWithCRS *out);
- static bool parsePointWithMaxDistance(const BSONObj& obj, PointWithCRS* out, double* maxOut);
- };
+ // For geo near
+ static Status parseQueryPoint(const BSONElement& elem, PointWithCRS* out);
+ static Status parseStoredPoint(const BSONElement& elem, PointWithCRS* out);
+ static bool parsePointWithMaxDistance(const BSONObj& obj, PointWithCRS* out, double* maxOut);
+};
} // namespace mongo
diff --git a/src/mongo/db/geo/geoparser_test.cpp b/src/mongo/db/geo/geoparser_test.cpp
index 2387696b7be..201416e076b 100644
--- a/src/mongo/db/geo/geoparser_test.cpp
+++ b/src/mongo/db/geo/geoparser_test.cpp
@@ -47,361 +47,379 @@ using namespace mongo;
namespace {
- TEST(GeoParser, parseGeoSpecifier) {
- ASSERT_EQUALS(GeoParser::parseGeoSpecifier(
- fromjson("{$box : [[1, 2], [3, 4]]}").firstElement()),
- GeoParser::BOX);
- ASSERT_EQUALS(GeoParser::parseGeoSpecifier(
- fromjson("{$center : [[0, 0], 4]}").firstElement()),
- GeoParser::CENTER);
- ASSERT_EQUALS(GeoParser::parseGeoSpecifier(
- fromjson("{$centerSphere : [[0, 0], 1]}").firstElement()),
- GeoParser::CENTER_SPHERE);
- ASSERT_EQUALS(GeoParser::parseGeoSpecifier(
- fromjson("{$geometry : {'type':'Point', 'coordinates': [40, 5]}}").firstElement()),
- GeoParser::GEOMETRY);
- }
+TEST(GeoParser, parseGeoSpecifier) {
+ ASSERT_EQUALS(
+ GeoParser::parseGeoSpecifier(fromjson("{$box : [[1, 2], [3, 4]]}").firstElement()),
+ GeoParser::BOX);
+ ASSERT_EQUALS(GeoParser::parseGeoSpecifier(fromjson("{$center : [[0, 0], 4]}").firstElement()),
+ GeoParser::CENTER);
+ ASSERT_EQUALS(
+ GeoParser::parseGeoSpecifier(fromjson("{$centerSphere : [[0, 0], 1]}").firstElement()),
+ GeoParser::CENTER_SPHERE);
+ ASSERT_EQUALS(
+ GeoParser::parseGeoSpecifier(
+ fromjson("{$geometry : {'type':'Point', 'coordinates': [40, 5]}}").firstElement()),
+ GeoParser::GEOMETRY);
+}
- TEST(GeoParser, parseGeoJSONPoint) {
- PointWithCRS point;
-
- ASSERT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [40, 5]}"), &point));
- ASSERT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [-40.3, -5.0]}"), &point));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordhats': [40, -5]}"), &point));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': 40}"), &point));
- ASSERT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [40, -5, 7]}"), &point));
-
- // Make sure lat is in range
- ASSERT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [0, 90.0]}"), &point));
- ASSERT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [0, -90.0]}"), &point));
- ASSERT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [180, 90.0]}"), &point));
- ASSERT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [-180, -90.0]}"), &point));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [180.01, 90.0]}"), &point));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [-180.01, -90.0]}"), &point));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [0, 90.1]}"), &point));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [0, -90.1]}"), &point));
- }
+TEST(GeoParser, parseGeoJSONPoint) {
+ PointWithCRS point;
+
+ ASSERT_OK(
+ GeoParser::parseGeoJSONPoint(fromjson("{'type':'Point', 'coordinates': [40, 5]}"), &point));
+ ASSERT_OK(GeoParser::parseGeoJSONPoint(
+ fromjson("{'type':'Point', 'coordinates': [-40.3, -5.0]}"), &point));
+ ASSERT_NOT_OK(
+ GeoParser::parseGeoJSONPoint(fromjson("{'type':'Point', 'coordhats': [40, -5]}"), &point));
+ ASSERT_NOT_OK(
+ GeoParser::parseGeoJSONPoint(fromjson("{'type':'Point', 'coordinates': 40}"), &point));
+ ASSERT_OK(GeoParser::parseGeoJSONPoint(fromjson("{'type':'Point', 'coordinates': [40, -5, 7]}"),
+ &point));
+
+ // Make sure lat is in range
+ ASSERT_OK(GeoParser::parseGeoJSONPoint(fromjson("{'type':'Point', 'coordinates': [0, 90.0]}"),
+ &point));
+ ASSERT_OK(GeoParser::parseGeoJSONPoint(fromjson("{'type':'Point', 'coordinates': [0, -90.0]}"),
+ &point));
+ ASSERT_OK(GeoParser::parseGeoJSONPoint(fromjson("{'type':'Point', 'coordinates': [180, 90.0]}"),
+ &point));
+ ASSERT_OK(GeoParser::parseGeoJSONPoint(
+ fromjson("{'type':'Point', 'coordinates': [-180, -90.0]}"), &point));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(
+ fromjson("{'type':'Point', 'coordinates': [180.01, 90.0]}"), &point));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(
+ fromjson("{'type':'Point', 'coordinates': [-180.01, -90.0]}"), &point));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(
+ fromjson("{'type':'Point', 'coordinates': [0, 90.1]}"), &point));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(
+ fromjson("{'type':'Point', 'coordinates': [0, -90.1]}"), &point));
+}
- TEST(GeoParser, parseGeoJSONLine) {
- LineWithCRS polyline;
-
- ASSERT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[[1,2], [3,4]]}"), &polyline));
- ASSERT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[[0,-90], [0,90]]}"), &polyline));
- ASSERT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[[180,-90], [-180,90]]}"), &polyline));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[[180.1,-90], [-180.1,90]]}"),
- &polyline));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[[0,-91], [0,90]]}"), &polyline));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[[0,-90], [0,91]]}"), &polyline));
- ASSERT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[[1,2], [3,4], [5,6]]}"), &polyline));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[[1,2]]}"), &polyline));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[['chicken','little']]}"), &polyline));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[1,2, 3, 4]}"), &polyline));
- ASSERT_OK(GeoParser::parseGeoJSONLine(
- fromjson("{'type':'LineString', 'coordinates':[[1,2, 3], [3,4, 5], [5,6]]}"),
- &polyline));
- }
+TEST(GeoParser, parseGeoJSONLine) {
+ LineWithCRS polyline;
+
+ ASSERT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[[1,2], [3,4]]}"), &polyline));
+ ASSERT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[[0,-90], [0,90]]}"), &polyline));
+ ASSERT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[[180,-90], [-180,90]]}"), &polyline));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[[180.1,-90], [-180.1,90]]}"), &polyline));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[[0,-91], [0,90]]}"), &polyline));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[[0,-90], [0,91]]}"), &polyline));
+ ASSERT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[[1,2], [3,4], [5,6]]}"), &polyline));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[[1,2]]}"), &polyline));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[['chicken','little']]}"), &polyline));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[1,2, 3, 4]}"), &polyline));
+ ASSERT_OK(GeoParser::parseGeoJSONLine(
+ fromjson("{'type':'LineString', 'coordinates':[[1,2, 3], [3,4, 5], [5,6]]}"), &polyline));
+}
- TEST(GeoParser, parseGeoJSONPolygon) {
- PolygonWithCRS polygon;
-
- ASSERT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,5],[0,5],[0,0]] ]}"),
- &polygon));
- // No out of bounds points
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,91],[0,5],[0,0]] ]}"),
- &polygon));
- ASSERT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[180,0],[5,5],[0,5],[0,0]] ]}"),
- &polygon));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[181,0],[5,5],[0,5],[0,0]] ]}"),
- &polygon));
- // And one with a hole.
- ASSERT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,5],[0,5],[0,0]],"
- " [[1,1],[4,1],[4,4],[1,4],[1,1]] ]}"), &polygon));
- // Latitudes must be OK
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,91],[0,91],[0,0]],"
- " [[1,1],[4,1],[4,4],[1,4],[1,1]] ]}"), &polygon));
- // First point must be the same as the last.
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[1,2],[3,4],[5,6]] ]}"), &polygon));
- // Extra elements are allowed
- ASSERT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0,0,0],[5,0,0],[5,5,1],"
- " [0,5],[0,0]] ]}"), &polygon));
-
- // Test functionality of polygon
- PointWithCRS point;
- ASSERT_OK(GeoParser::parseGeoJSONPoint(
- fromjson("{'type':'Point', 'coordinates': [2, 2]}"), &point));
-
- PolygonWithCRS polygonA;
- ASSERT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,5],[0,5],[0,0]] ]}"),
- &polygonA));
- ASSERT_TRUE(polygonA.s2Polygon->Contains(point.point));
-
- PolygonWithCRS polygonB;
- ASSERT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,5],[0,5],[0,0]],"
- " [[1,1],[1,4],[4,4],[4,1],[1,1]] ]}"),
- &polygonB));
- // We removed this in the hole.
- ASSERT_FALSE(polygonB.s2Polygon->Contains(point.point));
-
- // Now we reverse the orientations and verify that the code fixes it up
- // (outer loop must be CCW, inner CW).
- PolygonWithCRS polygonC;
- ASSERT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[0,5],[5,5],[5,0],[0,0]] ]}"),
- &polygonC));
- ASSERT_TRUE(polygonC.s2Polygon->Contains(point.point));
-
- PolygonWithCRS polygonD;
- ASSERT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[0,5],[5,5],[5,0],[0,0]],"
- " [[1,1],[1,4],[4,4],[4,1],[1,1]] ]}"),
- &polygonD));
- // Also removed in the loop.
- ASSERT_FALSE(polygonD.s2Polygon->Contains(point.point));
-
- //
- // Bad polygon examples
- //
-
- // Polygon with not enough points, because some are duplicated
- PolygonWithCRS polygonBad;
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(
- fromjson("{'type':'Polygon', 'coordinates':[[ [0,0], [0,0], [5,5], [5,5], [0,0] ]]}"),
- &polygonBad));
- }
+TEST(GeoParser, parseGeoJSONPolygon) {
+ PolygonWithCRS polygon;
+
+ ASSERT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,5],[0,5],[0,0]] ]}"),
+ &polygon));
+ // No out of bounds points
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,91],[0,5],[0,0]] ]}"),
+ &polygon));
+ ASSERT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[180,0],[5,5],[0,5],[0,0]] ]}"),
+ &polygon));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[181,0],[5,5],[0,5],[0,0]] ]}"),
+ &polygon));
+ // And one with a hole.
+ ASSERT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson(
+ "{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,5],[0,5],[0,0]],"
+ " [[1,1],[4,1],[4,4],[1,4],[1,1]] ]}"),
+ &polygon));
+ // Latitudes must be OK
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson(
+ "{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,91],[0,91],[0,0]],"
+ " [[1,1],[4,1],[4,4],[1,4],[1,1]] ]}"),
+ &polygon));
+ // First point must be the same as the last.
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson("{'type':'Polygon', 'coordinates':[ [[1,2],[3,4],[5,6]] ]}"), &polygon));
+ // Extra elements are allowed
+ ASSERT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson(
+ "{'type':'Polygon', 'coordinates':[ [[0,0,0,0],[5,0,0],[5,5,1],"
+ " [0,5],[0,0]] ]}"),
+ &polygon));
+
+ // Test functionality of polygon
+ PointWithCRS point;
+ ASSERT_OK(
+ GeoParser::parseGeoJSONPoint(fromjson("{'type':'Point', 'coordinates': [2, 2]}"), &point));
+
+ PolygonWithCRS polygonA;
+ ASSERT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,5],[0,5],[0,0]] ]}"),
+ &polygonA));
+ ASSERT_TRUE(polygonA.s2Polygon->Contains(point.point));
+
+ PolygonWithCRS polygonB;
+ ASSERT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson(
+ "{'type':'Polygon', 'coordinates':[ [[0,0],[5,0],[5,5],[0,5],[0,0]],"
+ " [[1,1],[1,4],[4,4],[4,1],[1,1]] ]}"),
+ &polygonB));
+ // We removed this in the hole.
+ ASSERT_FALSE(polygonB.s2Polygon->Contains(point.point));
+
+ // Now we reverse the orientations and verify that the code fixes it up
+ // (outer loop must be CCW, inner CW).
+ PolygonWithCRS polygonC;
+ ASSERT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson("{'type':'Polygon', 'coordinates':[ [[0,0],[0,5],[5,5],[5,0],[0,0]] ]}"),
+ &polygonC));
+ ASSERT_TRUE(polygonC.s2Polygon->Contains(point.point));
+
+ PolygonWithCRS polygonD;
+ ASSERT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson(
+ "{'type':'Polygon', 'coordinates':[ [[0,0],[0,5],[5,5],[5,0],[0,0]],"
+ " [[1,1],[1,4],[4,4],[4,1],[1,1]] ]}"),
+ &polygonD));
+ // Also removed in the loop.
+ ASSERT_FALSE(polygonD.s2Polygon->Contains(point.point));
+
+ //
+ // Bad polygon examples
+ //
+
+ // Polygon with not enough points, because some are duplicated
+ PolygonWithCRS polygonBad;
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(
+ fromjson("{'type':'Polygon', 'coordinates':[[ [0,0], [0,0], [5,5], [5,5], [0,0] ]]}"),
+ &polygonBad));
+}
- TEST(GeoParser, parseGeoJSONCRS) {
- string goodCRS1 = "crs:{ type: 'name', properties:{name:'EPSG:4326'}}";
- string goodCRS2 = "crs:{ type: 'name', properties:{name:'urn:ogc:def:crs:OGC:1.3:CRS84'}}";
- string badCRS1 = "crs:{ type: 'name', properties:{name:'EPSG:2000'}}";
- string badCRS2 = "crs:{ type: 'name', properties:{name:'urn:ogc:def:crs:OGC:1.3:CRS83'}}";
-
- BSONObj point1 = fromjson("{'type':'Point', 'coordinates': [40, 5], " + goodCRS1 + "}");
- BSONObj point2 = fromjson("{'type':'Point', 'coordinates': [40, 5], " + goodCRS2 + "}");
- PointWithCRS point;
- ASSERT_OK(GeoParser::parseGeoJSONPoint(point1, &point));
- ASSERT_OK(GeoParser::parseGeoJSONPoint(point2, &point));
- BSONObj point3 = fromjson("{'type':'Point', 'coordinates': [40, 5], " + badCRS1 + "}");
- BSONObj point4 = fromjson("{'type':'Point', 'coordinates': [40, 5], " + badCRS2 + "}");
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(point3, &point));
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(point4, &point));
-
- PolygonWithCRS polygon;
- BSONObj polygon1 = fromjson("{'type':'Polygon', 'coordinates':[ "
- "[[0,0],[5,0],[5,5],[0,5],[0,0]],"
- " [[1,1],[1,4],[4,4],[4,1],[1,1]] ]," + goodCRS1 + "}");
- ASSERT_OK(GeoParser::parseGeoJSONPolygon(polygon1, &polygon));
- BSONObj polygon2 = fromjson("{'type':'Polygon', 'coordinates':[ "
- "[[0,0],[5,0],[5,5],[0,5],[0,0]],"
- " [[1,1],[1,4],[4,4],[4,1],[1,1]] ]," + badCRS2 + "}");
- ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(polygon2, &polygon));
-
- LineWithCRS line;
- BSONObj line1 = fromjson("{'type':'LineString', 'coordinates':[[1,2], [3,4], [5,6]]," +
- goodCRS2 + "}");
- ASSERT_OK(GeoParser::parseGeoJSONLine(line1, &line));
- BSONObj line2 = fromjson("{'type':'LineString', 'coordinates':[[1,2], [3,4], [5,6]]," +
- badCRS1 + "}");
- ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(line2, &line));
- }
+TEST(GeoParser, parseGeoJSONCRS) {
+ string goodCRS1 = "crs:{ type: 'name', properties:{name:'EPSG:4326'}}";
+ string goodCRS2 = "crs:{ type: 'name', properties:{name:'urn:ogc:def:crs:OGC:1.3:CRS84'}}";
+ string badCRS1 = "crs:{ type: 'name', properties:{name:'EPSG:2000'}}";
+ string badCRS2 = "crs:{ type: 'name', properties:{name:'urn:ogc:def:crs:OGC:1.3:CRS83'}}";
+
+ BSONObj point1 = fromjson("{'type':'Point', 'coordinates': [40, 5], " + goodCRS1 + "}");
+ BSONObj point2 = fromjson("{'type':'Point', 'coordinates': [40, 5], " + goodCRS2 + "}");
+ PointWithCRS point;
+ ASSERT_OK(GeoParser::parseGeoJSONPoint(point1, &point));
+ ASSERT_OK(GeoParser::parseGeoJSONPoint(point2, &point));
+ BSONObj point3 = fromjson("{'type':'Point', 'coordinates': [40, 5], " + badCRS1 + "}");
+ BSONObj point4 = fromjson("{'type':'Point', 'coordinates': [40, 5], " + badCRS2 + "}");
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(point3, &point));
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPoint(point4, &point));
+
+ PolygonWithCRS polygon;
+ BSONObj polygon1 = fromjson(
+ "{'type':'Polygon', 'coordinates':[ "
+ "[[0,0],[5,0],[5,5],[0,5],[0,0]],"
+ " [[1,1],[1,4],[4,4],[4,1],[1,1]] ]," +
+ goodCRS1 + "}");
+ ASSERT_OK(GeoParser::parseGeoJSONPolygon(polygon1, &polygon));
+ BSONObj polygon2 = fromjson(
+ "{'type':'Polygon', 'coordinates':[ "
+ "[[0,0],[5,0],[5,5],[0,5],[0,0]],"
+ " [[1,1],[1,4],[4,4],[4,1],[1,1]] ]," +
+ badCRS2 + "}");
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(polygon2, &polygon));
+
+ LineWithCRS line;
+ BSONObj line1 =
+ fromjson("{'type':'LineString', 'coordinates':[[1,2], [3,4], [5,6]]," + goodCRS2 + "}");
+ ASSERT_OK(GeoParser::parseGeoJSONLine(line1, &line));
+ BSONObj line2 =
+ fromjson("{'type':'LineString', 'coordinates':[[1,2], [3,4], [5,6]]," + badCRS1 + "}");
+ ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(line2, &line));
+}
- TEST(GeoParser, parseLegacyPoint) {
- PointWithCRS point;
- ASSERT_OK(GeoParser::parseLegacyPoint(BSON_ELT(BSON_ARRAY(0 << 1)), &point));
- ASSERT_NOT_OK(GeoParser::parseLegacyPoint(BSON_ELT(BSON_ARRAY(0)), &point));
- ASSERT_NOT_OK(GeoParser::parseLegacyPoint(BSON_ELT(BSON_ARRAY(0 << 1 << 2)), &point));
- ASSERT_OK(GeoParser::parseLegacyPoint(BSON_ELT(fromjson("{x: 50, y:40}")), &point));
- ASSERT_NOT_OK(GeoParser::parseLegacyPoint(BSON_ELT(fromjson("{x: '50', y:40}")), &point));
- ASSERT_NOT_OK(GeoParser::parseLegacyPoint(BSON_ELT(fromjson("{x: 5, y:40, z:50}")), &point));
- ASSERT_NOT_OK(GeoParser::parseLegacyPoint(BSON_ELT(fromjson("{x: 5}")), &point));
- }
+TEST(GeoParser, parseLegacyPoint) {
+ PointWithCRS point;
+ ASSERT_OK(GeoParser::parseLegacyPoint(BSON_ELT(BSON_ARRAY(0 << 1)), &point));
+ ASSERT_NOT_OK(GeoParser::parseLegacyPoint(BSON_ELT(BSON_ARRAY(0)), &point));
+ ASSERT_NOT_OK(GeoParser::parseLegacyPoint(BSON_ELT(BSON_ARRAY(0 << 1 << 2)), &point));
+ ASSERT_OK(GeoParser::parseLegacyPoint(BSON_ELT(fromjson("{x: 50, y:40}")), &point));
+ ASSERT_NOT_OK(GeoParser::parseLegacyPoint(BSON_ELT(fromjson("{x: '50', y:40}")), &point));
+ ASSERT_NOT_OK(GeoParser::parseLegacyPoint(BSON_ELT(fromjson("{x: 5, y:40, z:50}")), &point));
+ ASSERT_NOT_OK(GeoParser::parseLegacyPoint(BSON_ELT(fromjson("{x: 5}")), &point));
+}
- TEST(GeoParser, parseLegacyPolygon) {
- PolygonWithCRS polygon;
-
- // Parse the object after field name "$polygon"
- ASSERT_OK(GeoParser::parseLegacyPolygon(
- fromjson("[[10,20],[10,40],[30,40],[30,20]]"), &polygon));
- ASSERT(polygon.crs == FLAT);
-
- ASSERT_OK(GeoParser::parseLegacyPolygon(
- fromjson("[[10,20], [10,40], [30,40]]"), &polygon));
- ASSERT(polygon.crs == FLAT);
-
- ASSERT_NOT_OK(GeoParser::parseLegacyPolygon(
- fromjson("[[10,20],[10,40]]"), &polygon));
- ASSERT_NOT_OK(GeoParser::parseLegacyPolygon(
- fromjson("[['10',20],[10,40],[30,40],[30,20]]"), &polygon));
- ASSERT_NOT_OK(GeoParser::parseLegacyPolygon(
- fromjson("[[10,20,30],[10,40],[30,40],[30,20]]"), &polygon));
- ASSERT_OK(GeoParser::parseLegacyPolygon(
- fromjson("{a:{x:40,y:5},b:{x:40,y:6},c:{x:41,y:6},d:{x:41,y:5}}"), &polygon));
- }
+TEST(GeoParser, parseLegacyPolygon) {
+ PolygonWithCRS polygon;
- TEST(GeoParser, parseMultiPoint) {
- mongo::MultiPointWithCRS mp;
-
- ASSERT_OK(GeoParser::parseMultiPoint(
- fromjson("{'type':'MultiPoint','coordinates':[[1,2],[3,4]]}"), &mp));
- ASSERT_EQUALS(mp.points.size(), (size_t)2);
-
- ASSERT_OK(GeoParser::parseMultiPoint(
- fromjson("{'type':'MultiPoint','coordinates':[[3,4]]}"), &mp));
- ASSERT_EQUALS(mp.points.size(), (size_t)1);
-
- ASSERT_OK(GeoParser::parseMultiPoint(
- fromjson("{'type':'MultiPoint','coordinates':[[1,2],[3,4],[5,6],[7,8]]}"), &mp));
- ASSERT_EQUALS(mp.points.size(), (size_t)4);
-
- ASSERT_NOT_OK(GeoParser::parseMultiPoint(
- fromjson("{'type':'MultiPoint','coordinates':[]}"), &mp));
- ASSERT_NOT_OK(GeoParser::parseMultiPoint(
- fromjson("{'type':'MultiPoint','coordinates':[[181,2],[3,4]]}"), &mp));
- ASSERT_NOT_OK(GeoParser::parseMultiPoint(
- fromjson("{'type':'MultiPoint','coordinates':[[1,-91],[3,4]]}"), &mp));
- ASSERT_NOT_OK(GeoParser::parseMultiPoint(
- fromjson("{'type':'MultiPoint','coordinates':[[181,2],[3,'chicken']]}"), &mp));
- }
+ // Parse the object after field name "$polygon"
+ ASSERT_OK(
+ GeoParser::parseLegacyPolygon(fromjson("[[10,20],[10,40],[30,40],[30,20]]"), &polygon));
+ ASSERT(polygon.crs == FLAT);
- TEST(GeoParser, parseMultiLine) {
- mongo::MultiLineWithCRS ml;
-
- ASSERT_OK(GeoParser::parseMultiLine(
- fromjson("{'type':'MultiLineString','coordinates':[ [[1,1],[2,2],[3,3]],"
- "[[4,5],[6,7]]]}"), &ml));
- ASSERT_EQUALS(ml.lines.size(), (size_t)2);
-
- ASSERT_OK(GeoParser::parseMultiLine(
- fromjson("{'type':'MultiLineString','coordinates':[ [[1,1],[2,2]],"
- "[[4,5],[6,7]]]}"), &ml));
- ASSERT_EQUALS(ml.lines.size(), (size_t)2);
-
- ASSERT_OK(GeoParser::parseMultiLine(
- fromjson("{'type':'MultiLineString','coordinates':[ [[1,1],[2,2]]]}"), &ml));
- ASSERT_EQUALS(ml.lines.size(), (size_t)1);
-
- ASSERT_OK(GeoParser::parseMultiLine(
- fromjson("{'type':'MultiLineString','coordinates':[ [[1,1],[2,2]],"
- "[[2,2],[1,1]]]}"), &ml));
- ASSERT_EQUALS(ml.lines.size(), (size_t)2);
-
- ASSERT_NOT_OK(GeoParser::parseMultiLine(
- fromjson("{'type':'MultiLineString','coordinates':[ [[1,1]]]}"), &ml));
- ASSERT_NOT_OK(GeoParser::parseMultiLine(
- fromjson("{'type':'MultiLineString','coordinates':[ [[1,1]],[[1,2],[3,4]]]}"), &ml));
- ASSERT_NOT_OK(GeoParser::parseMultiLine(
- fromjson("{'type':'MultiLineString','coordinates':[ [[181,1],[2,2]]]}"), &ml));
- ASSERT_NOT_OK(GeoParser::parseMultiLine(
- fromjson("{'type':'MultiLineString','coordinates':[ [[181,1],[2,-91]]]}"), &ml));
- }
+ ASSERT_OK(GeoParser::parseLegacyPolygon(fromjson("[[10,20], [10,40], [30,40]]"), &polygon));
+ ASSERT(polygon.crs == FLAT);
- TEST(GeoParser, parseMultiPolygon) {
- mongo::MultiPolygonWithCRS mp;
-
- ASSERT_OK(GeoParser::parseMultiPolygon(
- fromjson("{'type':'MultiPolygon','coordinates':["
- "[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],"
- "[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],"
- "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"
- "]}"), &mp));
- ASSERT_EQUALS(mp.polygons.size(), (size_t)2);
-
- ASSERT_OK(GeoParser::parseMultiPolygon(
- fromjson("{'type':'MultiPolygon','coordinates':["
- "[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],"
- "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"
- "]}"), &mp));
- ASSERT_EQUALS(mp.polygons.size(), (size_t)1);
- }
+ ASSERT_NOT_OK(GeoParser::parseLegacyPolygon(fromjson("[[10,20],[10,40]]"), &polygon));
+ ASSERT_NOT_OK(
+ GeoParser::parseLegacyPolygon(fromjson("[['10',20],[10,40],[30,40],[30,20]]"), &polygon));
+ ASSERT_NOT_OK(
+ GeoParser::parseLegacyPolygon(fromjson("[[10,20,30],[10,40],[30,40],[30,20]]"), &polygon));
+ ASSERT_OK(GeoParser::parseLegacyPolygon(
+ fromjson("{a:{x:40,y:5},b:{x:40,y:6},c:{x:41,y:6},d:{x:41,y:5}}"), &polygon));
+}
- TEST(GeoParser, parseGeometryCollection) {
- {
- mongo::GeometryCollection gc;
- BSONObj obj = fromjson(
+TEST(GeoParser, parseMultiPoint) {
+ mongo::MultiPointWithCRS mp;
+
+ ASSERT_OK(GeoParser::parseMultiPoint(
+ fromjson("{'type':'MultiPoint','coordinates':[[1,2],[3,4]]}"), &mp));
+ ASSERT_EQUALS(mp.points.size(), (size_t)2);
+
+ ASSERT_OK(
+ GeoParser::parseMultiPoint(fromjson("{'type':'MultiPoint','coordinates':[[3,4]]}"), &mp));
+ ASSERT_EQUALS(mp.points.size(), (size_t)1);
+
+ ASSERT_OK(GeoParser::parseMultiPoint(
+ fromjson("{'type':'MultiPoint','coordinates':[[1,2],[3,4],[5,6],[7,8]]}"), &mp));
+ ASSERT_EQUALS(mp.points.size(), (size_t)4);
+
+ ASSERT_NOT_OK(
+ GeoParser::parseMultiPoint(fromjson("{'type':'MultiPoint','coordinates':[]}"), &mp));
+ ASSERT_NOT_OK(GeoParser::parseMultiPoint(
+ fromjson("{'type':'MultiPoint','coordinates':[[181,2],[3,4]]}"), &mp));
+ ASSERT_NOT_OK(GeoParser::parseMultiPoint(
+ fromjson("{'type':'MultiPoint','coordinates':[[1,-91],[3,4]]}"), &mp));
+ ASSERT_NOT_OK(GeoParser::parseMultiPoint(
+ fromjson("{'type':'MultiPoint','coordinates':[[181,2],[3,'chicken']]}"), &mp));
+}
+
+TEST(GeoParser, parseMultiLine) {
+ mongo::MultiLineWithCRS ml;
+
+ ASSERT_OK(GeoParser::parseMultiLine(
+ fromjson(
+ "{'type':'MultiLineString','coordinates':[ [[1,1],[2,2],[3,3]],"
+ "[[4,5],[6,7]]]}"),
+ &ml));
+ ASSERT_EQUALS(ml.lines.size(), (size_t)2);
+
+ ASSERT_OK(
+ GeoParser::parseMultiLine(fromjson(
+ "{'type':'MultiLineString','coordinates':[ [[1,1],[2,2]],"
+ "[[4,5],[6,7]]]}"),
+ &ml));
+ ASSERT_EQUALS(ml.lines.size(), (size_t)2);
+
+ ASSERT_OK(GeoParser::parseMultiLine(
+ fromjson("{'type':'MultiLineString','coordinates':[ [[1,1],[2,2]]]}"), &ml));
+ ASSERT_EQUALS(ml.lines.size(), (size_t)1);
+
+ ASSERT_OK(
+ GeoParser::parseMultiLine(fromjson(
+ "{'type':'MultiLineString','coordinates':[ [[1,1],[2,2]],"
+ "[[2,2],[1,1]]]}"),
+ &ml));
+ ASSERT_EQUALS(ml.lines.size(), (size_t)2);
+
+ ASSERT_NOT_OK(GeoParser::parseMultiLine(
+ fromjson("{'type':'MultiLineString','coordinates':[ [[1,1]]]}"), &ml));
+ ASSERT_NOT_OK(GeoParser::parseMultiLine(
+ fromjson("{'type':'MultiLineString','coordinates':[ [[1,1]],[[1,2],[3,4]]]}"), &ml));
+ ASSERT_NOT_OK(GeoParser::parseMultiLine(
+ fromjson("{'type':'MultiLineString','coordinates':[ [[181,1],[2,2]]]}"), &ml));
+ ASSERT_NOT_OK(GeoParser::parseMultiLine(
+ fromjson("{'type':'MultiLineString','coordinates':[ [[181,1],[2,-91]]]}"), &ml));
+}
+
+TEST(GeoParser, parseMultiPolygon) {
+ mongo::MultiPolygonWithCRS mp;
+
+ ASSERT_OK(GeoParser::parseMultiPolygon(
+ fromjson(
+ "{'type':'MultiPolygon','coordinates':["
+ "[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],"
+ "[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],"
+ "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"
+ "]}"),
+ &mp));
+ ASSERT_EQUALS(mp.polygons.size(), (size_t)2);
+
+ ASSERT_OK(GeoParser::parseMultiPolygon(
+ fromjson(
+ "{'type':'MultiPolygon','coordinates':["
+ "[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],"
+ "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"
+ "]}"),
+ &mp));
+ ASSERT_EQUALS(mp.polygons.size(), (size_t)1);
+}
+
+TEST(GeoParser, parseGeometryCollection) {
+ {
+ mongo::GeometryCollection gc;
+ BSONObj obj = fromjson(
"{ 'type': 'GeometryCollection', 'geometries': ["
- "{ 'type': 'Point','coordinates': [100.0,0.0]},"
- "{ 'type': 'LineString', 'coordinates': [ [101.0, 0.0], [102.0, 1.0] ]}"
- "]}");
- ASSERT_OK(GeoParser::parseGeometryCollection(obj, &gc));
- ASSERT_FALSE(gc.supportsContains());
- }
-
- {
- BSONObj obj = fromjson(
+ "{ 'type': 'Point','coordinates': [100.0,0.0]},"
+ "{ 'type': 'LineString', 'coordinates': [ [101.0, 0.0], [102.0, 1.0] ]}"
+ "]}");
+ ASSERT_OK(GeoParser::parseGeometryCollection(obj, &gc));
+ ASSERT_FALSE(gc.supportsContains());
+ }
+
+ {
+ BSONObj obj = fromjson(
"{ 'type': 'GeometryCollection', 'geometries': ["
"{'type':'MultiPolygon','coordinates':["
- "[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],"
- "[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],"
- "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"
- "]}"
- "]}");
-
- mongo::GeometryCollection gc;
- ASSERT_OK(GeoParser::parseGeometryCollection(obj, &gc));
- ASSERT_TRUE(gc.supportsContains());
- }
-
- {
- BSONObj obj = fromjson(
+ "[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],"
+ "[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],"
+ "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"
+ "]}"
+ "]}");
+
+ mongo::GeometryCollection gc;
+ ASSERT_OK(GeoParser::parseGeometryCollection(obj, &gc));
+ ASSERT_TRUE(gc.supportsContains());
+ }
+
+ {
+ BSONObj obj = fromjson(
"{ 'type': 'GeometryCollection', 'geometries': ["
"{'type':'Polygon', 'coordinates':[ [[0,0],[0,91],[5,5],[5,0],[0,0]] ]},"
"{'type':'MultiPolygon','coordinates':["
- "[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],"
- "[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],"
- "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"
- "]}"
- "]}");
- mongo::GeometryCollection gc;
- ASSERT_NOT_OK(GeoParser::parseGeometryCollection(obj, &gc));
- }
-
- {
- BSONObj obj = fromjson(
+ "[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],"
+ "[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],"
+ "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"
+ "]}"
+ "]}");
+ mongo::GeometryCollection gc;
+ ASSERT_NOT_OK(GeoParser::parseGeometryCollection(obj, &gc));
+ }
+
+ {
+ BSONObj obj = fromjson(
"{ 'type': 'GeometryCollection', 'geometries': ["
"{'type':'Polygon', 'coordinates':[ [[0,0],[0,5],[5,5],[5,0],[0,0]] ]},"
"{'type':'MultiPolygon','coordinates':["
- "[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],"
- "[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],"
- "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"
- "]}"
- "]}");
-
- mongo::GeometryCollection gc;
- ASSERT_OK(GeoParser::parseGeometryCollection(obj, &gc));
- ASSERT_TRUE(gc.supportsContains());
- }
+ "[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],"
+ "[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],"
+ "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"
+ "]}"
+ "]}");
+
+ mongo::GeometryCollection gc;
+ ASSERT_OK(GeoParser::parseGeometryCollection(obj, &gc));
+ ASSERT_TRUE(gc.supportsContains());
}
}
+}
diff --git a/src/mongo/db/geo/hash.cpp b/src/mongo/db/geo/hash.cpp
index 318a1cfdcab..60d435812c1 100644
--- a/src/mongo/db/geo/hash.cpp
+++ b/src/mongo/db/geo/hash.cpp
@@ -32,7 +32,7 @@
#include "mongo/db/geo/shapes.h"
#include "mongo/util/mongoutils/str.h"
-#include <algorithm> // for max()
+#include <algorithm> // for max()
#include <iostream>
// So we can get at the str namespace.
@@ -40,791 +40,788 @@ using namespace mongoutils;
namespace mongo {
- using std::stringstream;
+using std::stringstream;
- std::ostream& operator<<(std::ostream &s, const GeoHash &h) {
- return s << h.toString();
- }
-
- /*
- * GeoBitSets fills out various bit patterns that are used by GeoHash.
- * What patterns? Look at the comments next to the fields.
- * TODO(hk): hashedToNormal is still a bit of a mystery.
- */
- class GeoBitSets {
- public:
- GeoBitSets() {
- for (unsigned i = 0; i < 16; i++) {
- unsigned fixed = 0;
- for (int j = 0; j < 4; j++) {
- if (i & (1 << j))
- fixed |= (1 << (j * 2));
- }
- hashedToNormal[fixed] = i;
- }
-
- // Generate all 32 + 1 all-on bit patterns by repeatedly shifting the next bit to the
- // correct position
-
- long long currAllX = 0, currAllY = 0;
- for (int i = 0; i < 64 + 2; i++){
-
- long long thisBit = 1LL << (63 >= i ? 63 - i : 0);
+std::ostream& operator<<(std::ostream& s, const GeoHash& h) {
+ return s << h.toString();
+}
- if (i % 2 == 0) {
- allX[i / 2] = currAllX;
- currAllX |= thisBit;
- } else{
- allY[i / 2] = currAllY;
- currAllY |= thisBit;
- }
+/*
+ * GeoBitSets fills out various bit patterns that are used by GeoHash.
+ * What patterns? Look at the comments next to the fields.
+ * TODO(hk): hashedToNormal is still a bit of a mystery.
+ */
+class GeoBitSets {
+public:
+ GeoBitSets() {
+ for (unsigned i = 0; i < 16; i++) {
+ unsigned fixed = 0;
+ for (int j = 0; j < 4; j++) {
+ if (i & (1 << j))
+ fixed |= (1 << (j * 2));
}
+ hashedToNormal[fixed] = i;
}
- // The 0-th entries of each all[XY] is 0.
- // The i-th entry of allX has i alternating bits turned on starting
- // with the most significant. Example:
- // allX[1] = 8000000000000000
- // allX[2] = a000000000000000
- // allX[3] = a800000000000000
- // Note that 32 + 1 entries are needed, since 0 and 32 are both valid numbers of bits.
- long long allX[33];
- // Same alternating bits but starting with one from the MSB:
- // allY[1] = 4000000000000000
- // allY[2] = 5000000000000000
- // allY[3] = 5400000000000000
- long long allY[33];
-
- unsigned hashedToNormal[256];
- };
-
- // Oh global variables.
- GeoBitSets geoBitSets;
-
- // For i return the i-th most significant bit.
- // masks(0) = 80000..000
- // masks(1) = 40000..000
- // etc.
- // Number of 0s depends on 32 vs. 64 bit.
- inline static int mask32For(const int i) {
- return 1 << (31 - i);
- }
-
- inline static long long mask64For(const int i) {
- return 1LL << (63 - i);
- }
-
- // Binary data is stored in some particular byte ordering that requires this.
- static void copyAndReverse(char *dst, const char *src) {
- for (unsigned a = 0; a < 8; a++) {
- dst[a] = src[7 - a];
- }
- }
-
- // Definition
- unsigned int const GeoHash::kMaxBits = 32;
-
- /* This class maps an x,y coordinate pair to a hash value.
- * This should probably be renamed/generalized so that it's more of a planar hash,
- * and we also have a spherical hash, etc.
- */
- GeoHash::GeoHash() : _hash(0), _bits(0) { }
-
- GeoHash::GeoHash(const string& hash) {
- initFromString(hash.c_str());
- }
-
- GeoHash::GeoHash(const char *s) {
- initFromString(s);
- }
-
- void GeoHash::initFromString(const char *s) {
- int length = strlen(s);
- uassert(16457, "initFromString passed a too-long string", length <= 64);
- uassert(16458, "initFromString passed an odd length string ", 0 == (length % 2));
- _hash = 0;
- // _bits is how many bits for X or Y, not both, so we divide by 2.
- _bits = length / 2;
- for (int i = 0; s[i] != '\0'; ++i)
- if (s[i] == '1')
- setBit(i, 1);
- }
+ // Generate all 32 + 1 all-on bit patterns by repeatedly shifting the next bit to the
+ // correct position
- // This only works if e is BinData.
- GeoHash::GeoHash(const BSONElement& e, unsigned bits) {
- _bits = bits;
- if (e.type() == BinData) {
- int len = 0;
- copyAndReverse((char*)&_hash, e.binData(len));
- verify(len == 8);
- } else {
- cout << "GeoHash bad element: " << e << endl;
- uassert(13047, "wrong type for geo index. if you're using a pre-release version,"
- " need to rebuild index", 0);
- }
- clearUnusedBits();
- }
+ long long currAllX = 0, currAllY = 0;
+ for (int i = 0; i < 64 + 2; i++) {
+ long long thisBit = 1LL << (63 >= i ? 63 - i : 0);
- GeoHash::GeoHash(unsigned x, unsigned y, unsigned bits) {
- verify(bits <= 32);
- _hash = 0;
- _bits = bits;
- for (unsigned i = 0; i < bits; i++) {
- if (isBitSet(x, i)) _hash |= mask64For(i * 2);
- if (isBitSet(y, i)) _hash |= mask64For((i * 2) + 1);
- }
- }
-
- GeoHash::GeoHash(const GeoHash& old) {
- _hash = old._hash;
- _bits = old._bits;
- }
-
- GeoHash::GeoHash(long long hash, unsigned bits) : _hash(hash) , _bits(bits) {
- clearUnusedBits();
- }
-
- // TODO(hk): This is nasty and has no examples.
- void GeoHash::unhash_fast(unsigned *x, unsigned *y) const {
- *x = 0;
- *y = 0;
- const char *c = reinterpret_cast<const char*>(&_hash);
- for (int i = 0; i < 8; i++) {
- unsigned t = (unsigned)(c[i]) & 0x55;
- *y |= (geoBitSets.hashedToNormal[t] << (4 * i));
-
- t = ((unsigned)(c[i]) >> 1) & 0x55;
- *x |= (geoBitSets.hashedToNormal[t] << (4 * i));
- }
- }
-
- void GeoHash::unhash_slow(unsigned *x, unsigned *y) const {
- *x = 0;
- *y = 0;
- for (unsigned i = 0; i < _bits; i++) {
- if (getBitX(i))
- *x |= mask32For(i);
- if (getBitY(i))
- *y |= mask32For(i);
+ if (i % 2 == 0) {
+ allX[i / 2] = currAllX;
+ currAllX |= thisBit;
+ } else {
+ allY[i / 2] = currAllY;
+ currAllY |= thisBit;
+ }
}
}
- void GeoHash::unhash(unsigned *x, unsigned *y) const {
- unhash_fast(x, y);
- }
-
- /** Is the 'bit'-th most significant bit set? (NOT the least significant) */
- bool GeoHash::isBitSet(unsigned val, unsigned bit) {
- return mask32For(bit) & val;
- }
-
- /** Return a GeoHash with one bit of precision lost. */
- GeoHash GeoHash::up() const {
- return GeoHash(_hash, _bits - 1);
- }
-
- bool GeoHash::hasPrefix(const GeoHash& other) const {
- verify(other._bits <= _bits);
- if (other._bits == 0)
- return true;
-
- long long x = other._hash ^ _hash;
- // We only care about the leftmost other._bits (well, really _bits*2 since we have x and
- // y)
- x = x >> (64 - (other._bits * 2));
- return x == 0;
- }
-
- string GeoHash::toString() const {
- StringBuilder buf;
- for (unsigned x = 0; x < _bits * 2; x++)
- buf.append((_hash & mask64For(x)) ? "1" : "0");
- return buf.str();
- }
-
- string GeoHash::toStringHex1() const {
- stringstream ss;
- ss << std::hex << _hash;
- return ss.str();
- }
-
- void GeoHash::setBit(unsigned pos, bool value) {
- verify(pos < _bits * 2);
- const long long mask = mask64For(pos);
- if (value)
- _hash |= mask;
- else // if (_hash & mask)
- _hash &= ~mask;
- }
-
- bool GeoHash::getBit(unsigned pos) const {
- return _hash & mask64For(pos);
- }
-
- bool GeoHash::getBitX(unsigned pos) const {
- verify(pos < 32);
- return getBit(pos * 2);
- }
-
- bool GeoHash::getBitY(unsigned pos) const {
- verify(pos < 32);
- return getBit((pos * 2) + 1);
- }
-
- // TODO(hk): Comment this.
- BSONObj GeoHash::wrap(const char* name) const {
- BSONObjBuilder b(20);
- appendHashMin(&b, name);
- BSONObj o = b.obj();
- if ('\0' == name[0]) verify(o.objsize() == 20);
- return o;
- }
-
- // Do we have a non-trivial GeoHash?
- bool GeoHash::constrains() const {
- return _bits > 0;
- }
-
- // Could our GeoHash have higher precision?
- bool GeoHash::canRefine() const {
- return _bits < 32;
- }
-
- /**
- * Hashing works like this:
- * Divide the world into 4 buckets. Label each one as such:
- * -----------------
- * | | |
- * | | |
- * | 0,1 | 1,1 |
- * -----------------
- * | | |
- * | | |
- * | 0,0 | 1,0 |
- * -----------------
- * We recursively divide each cell, furthermore.
- * The functions below tell us what quadrant we're in *at the finest level
- * of the subdivision.*
- */
- bool GeoHash::atMinX() const {
- return (_hash & geoBitSets.allX[_bits]) == 0;
- }
- bool GeoHash::atMinY() const {
- return (_hash & geoBitSets.allY[_bits]) == 0;
- }
- bool GeoHash::atMaxX() const {
- return (_hash & geoBitSets.allX[_bits]) == geoBitSets.allX[_bits];
- }
- bool GeoHash::atMaxY() const {
- return (_hash & geoBitSets.allY[_bits]) == geoBitSets.allY[_bits];
- }
+ // The 0-th entries of each all[XY] is 0.
+ // The i-th entry of allX has i alternating bits turned on starting
+ // with the most significant. Example:
+ // allX[1] = 8000000000000000
+ // allX[2] = a000000000000000
+ // allX[3] = a800000000000000
+ // Note that 32 + 1 entries are needed, since 0 and 32 are both valid numbers of bits.
+ long long allX[33];
+ // Same alternating bits but starting with one from the MSB:
+ // allY[1] = 4000000000000000
+ // allY[2] = 5000000000000000
+ // allY[3] = 5400000000000000
+ long long allY[33];
+
+ unsigned hashedToNormal[256];
+};
+
+// Oh global variables.
+GeoBitSets geoBitSets;
+
+// For i return the i-th most significant bit.
+// masks(0) = 80000..000
+// masks(1) = 40000..000
+// etc.
+// Number of 0s depends on 32 vs. 64 bit.
+inline static int mask32For(const int i) {
+ return 1 << (31 - i);
+}
+
+inline static long long mask64For(const int i) {
+ return 1LL << (63 - i);
+}
+
+// Binary data is stored in some particular byte ordering that requires this.
+static void copyAndReverse(char* dst, const char* src) {
+ for (unsigned a = 0; a < 8; a++) {
+ dst[a] = src[7 - a];
+ }
+}
+
+// Definition
+unsigned int const GeoHash::kMaxBits = 32;
+
+/* This class maps an x,y coordinate pair to a hash value.
+ * This should probably be renamed/generalized so that it's more of a planar hash,
+ * and we also have a spherical hash, etc.
+ */
+GeoHash::GeoHash() : _hash(0), _bits(0) {}
+
+GeoHash::GeoHash(const string& hash) {
+ initFromString(hash.c_str());
+}
+
+GeoHash::GeoHash(const char* s) {
+ initFromString(s);
+}
+
+void GeoHash::initFromString(const char* s) {
+ int length = strlen(s);
+ uassert(16457, "initFromString passed a too-long string", length <= 64);
+ uassert(16458, "initFromString passed an odd length string ", 0 == (length % 2));
+ _hash = 0;
+ // _bits is how many bits for X or Y, not both, so we divide by 2.
+ _bits = length / 2;
+ for (int i = 0; s[i] != '\0'; ++i)
+ if (s[i] == '1')
+ setBit(i, 1);
+}
+
+// This only works if e is BinData.
+GeoHash::GeoHash(const BSONElement& e, unsigned bits) {
+ _bits = bits;
+ if (e.type() == BinData) {
+ int len = 0;
+ copyAndReverse((char*)&_hash, e.binData(len));
+ verify(len == 8);
+ } else {
+ cout << "GeoHash bad element: " << e << endl;
+ uassert(13047,
+ "wrong type for geo index. if you're using a pre-release version,"
+ " need to rebuild index",
+ 0);
+ }
+ clearUnusedBits();
+}
+
+GeoHash::GeoHash(unsigned x, unsigned y, unsigned bits) {
+ verify(bits <= 32);
+ _hash = 0;
+ _bits = bits;
+ for (unsigned i = 0; i < bits; i++) {
+ if (isBitSet(x, i))
+ _hash |= mask64For(i * 2);
+ if (isBitSet(y, i))
+ _hash |= mask64For((i * 2) + 1);
+ }
+}
+
+GeoHash::GeoHash(const GeoHash& old) {
+ _hash = old._hash;
+ _bits = old._bits;
+}
+
+GeoHash::GeoHash(long long hash, unsigned bits) : _hash(hash), _bits(bits) {
+ clearUnusedBits();
+}
+
+// TODO(hk): This is nasty and has no examples.
+void GeoHash::unhash_fast(unsigned* x, unsigned* y) const {
+ *x = 0;
+ *y = 0;
+ const char* c = reinterpret_cast<const char*>(&_hash);
+ for (int i = 0; i < 8; i++) {
+ unsigned t = (unsigned)(c[i]) & 0x55;
+ *y |= (geoBitSets.hashedToNormal[t] << (4 * i));
+
+ t = ((unsigned)(c[i]) >> 1) & 0x55;
+ *x |= (geoBitSets.hashedToNormal[t] << (4 * i));
+ }
+}
+
+void GeoHash::unhash_slow(unsigned* x, unsigned* y) const {
+ *x = 0;
+ *y = 0;
+ for (unsigned i = 0; i < _bits; i++) {
+ if (getBitX(i))
+ *x |= mask32For(i);
+ if (getBitY(i))
+ *y |= mask32For(i);
+ }
+}
+
+void GeoHash::unhash(unsigned* x, unsigned* y) const {
+ unhash_fast(x, y);
+}
+
+/** Is the 'bit'-th most significant bit set? (NOT the least significant) */
+bool GeoHash::isBitSet(unsigned val, unsigned bit) {
+ return mask32For(bit) & val;
+}
+
+/** Return a GeoHash with one bit of precision lost. */
+GeoHash GeoHash::up() const {
+ return GeoHash(_hash, _bits - 1);
+}
+
+bool GeoHash::hasPrefix(const GeoHash& other) const {
+ verify(other._bits <= _bits);
+ if (other._bits == 0)
+ return true;
- // TODO(hk): comment better
- void GeoHash::move(int x, int y) {
- verify(_bits);
- _move(0, x);
- _move(1, y);
- }
+ long long x = other._hash ^ _hash;
+ // We only care about the leftmost other._bits (well, really _bits*2 since we have x and
+ // y)
+ x = x >> (64 - (other._bits * 2));
+ return x == 0;
+}
+
+string GeoHash::toString() const {
+ StringBuilder buf;
+ for (unsigned x = 0; x < _bits * 2; x++)
+ buf.append((_hash & mask64For(x)) ? "1" : "0");
+ return buf.str();
+}
+
+string GeoHash::toStringHex1() const {
+ stringstream ss;
+ ss << std::hex << _hash;
+ return ss.str();
+}
+
+void GeoHash::setBit(unsigned pos, bool value) {
+ verify(pos < _bits * 2);
+ const long long mask = mask64For(pos);
+ if (value)
+ _hash |= mask;
+ else // if (_hash & mask)
+ _hash &= ~mask;
+}
+
+bool GeoHash::getBit(unsigned pos) const {
+ return _hash & mask64For(pos);
+}
+
+bool GeoHash::getBitX(unsigned pos) const {
+ verify(pos < 32);
+ return getBit(pos * 2);
+}
+
+bool GeoHash::getBitY(unsigned pos) const {
+ verify(pos < 32);
+ return getBit((pos * 2) + 1);
+}
+
+// TODO(hk): Comment this.
+BSONObj GeoHash::wrap(const char* name) const {
+ BSONObjBuilder b(20);
+ appendHashMin(&b, name);
+ BSONObj o = b.obj();
+ if ('\0' == name[0])
+ verify(o.objsize() == 20);
+ return o;
+}
+
+// Do we have a non-trivial GeoHash?
+bool GeoHash::constrains() const {
+ return _bits > 0;
+}
+
+// Could our GeoHash have higher precision?
+bool GeoHash::canRefine() const {
+ return _bits < 32;
+}
- // TODO(hk): comment much better
- void GeoHash::_move(unsigned offset, int d) {
- if (d == 0)
+/**
+ * Hashing works like this:
+ * Divide the world into 4 buckets. Label each one as such:
+ * -----------------
+ * | | |
+ * | | |
+ * | 0,1 | 1,1 |
+ * -----------------
+ * | | |
+ * | | |
+ * | 0,0 | 1,0 |
+ * -----------------
+ * We recursively divide each cell, furthermore.
+ * The functions below tell us what quadrant we're in *at the finest level
+ * of the subdivision.*
+ */
+bool GeoHash::atMinX() const {
+ return (_hash & geoBitSets.allX[_bits]) == 0;
+}
+bool GeoHash::atMinY() const {
+ return (_hash & geoBitSets.allY[_bits]) == 0;
+}
+bool GeoHash::atMaxX() const {
+ return (_hash & geoBitSets.allX[_bits]) == geoBitSets.allX[_bits];
+}
+bool GeoHash::atMaxY() const {
+ return (_hash & geoBitSets.allY[_bits]) == geoBitSets.allY[_bits];
+}
+
+// TODO(hk): comment better
+void GeoHash::move(int x, int y) {
+ verify(_bits);
+ _move(0, x);
+ _move(1, y);
+}
+
+// TODO(hk): comment much better
+void GeoHash::_move(unsigned offset, int d) {
+ if (d == 0)
+ return;
+ verify(d <= 1 && d >= -1); // TEMP
+
+ bool from, to;
+ if (d > 0) {
+ from = 0;
+ to = 1;
+ } else {
+ from = 1;
+ to = 0;
+ }
+
+ unsigned pos = (_bits * 2) - 1;
+ if (offset == 0)
+ pos--;
+ while (true) {
+ if (getBit(pos) == from) {
+ setBit(pos, to);
return;
- verify(d <= 1 && d>= -1); // TEMP
-
- bool from, to;
- if (d > 0) {
- from = 0;
- to = 1;
- }
- else {
- from = 1;
- to = 0;
}
- unsigned pos = (_bits * 2) - 1;
- if (offset == 0)
- pos--;
- while (true) {
- if (getBit(pos) == from) {
- setBit(pos , to);
- return;
+ if (pos < 2) {
+ // overflow
+ for (; pos < (_bits * 2); pos += 2) {
+ setBit(pos, from);
}
-
- if (pos < 2) {
- // overflow
- for (; pos < (_bits * 2) ; pos += 2) {
- setBit(pos , from);
- }
- return;
- }
-
- setBit(pos , from);
- pos -= 2;
- }
-
- verify(0);
- }
-
- GeoHash& GeoHash::operator=(const GeoHash& h) {
- _hash = h._hash;
- _bits = h._bits;
- return *this;
- }
-
- bool GeoHash::operator==(const GeoHash& h) const {
- return _hash == h._hash && _bits == h._bits;
- }
-
- bool GeoHash::operator!=(const GeoHash& h) const {
- return !(*this == h);
- }
-
- bool GeoHash::operator<(const GeoHash& h) const {
-
- if (_hash != h._hash) {
- return static_cast<unsigned long long>(_hash) <
- static_cast<unsigned long long>(h._hash);
- }
-
- return _bits < h._bits;
- }
-
- // Append the hash in s to our current hash. We expect s to be '0' or '1' or '\0',
- // though we also treat non-'1' values as '0'.
- GeoHash& GeoHash::operator+=(const char* s) {
- unsigned pos = _bits * 2;
- _bits += strlen(s) / 2;
- verify(_bits <= 32);
- while ('\0' != s[0]) {
- if (s[0] == '1')
- setBit(pos , 1);
- pos++;
- s++;
- }
- return *this;
- }
-
- GeoHash GeoHash::operator+(const char *s) const {
- GeoHash n = *this;
- n += s;
- return n;
- }
-
- GeoHash GeoHash::operator+(const std::string& s) const {
- return operator+(s.c_str());
- }
-
- /*
- * Keep the upper _bits*2 bits of _hash, clear the lower bits.
- * Maybe there's junk in there? Not sure why this is done.
- */
- void GeoHash::clearUnusedBits() {
- // Left shift count should be less than 64
- if (_bits == 0) {
- _hash = 0;
return;
}
- static long long FULL = 0xFFFFFFFFFFFFFFFFLL;
- long long mask = FULL << (64 - (_bits * 2));
- _hash &= mask;
- }
-
- static void appendHashToBuilder(long long hash,
- BSONObjBuilder* builder,
- const char* fieldName) {
- char buf[8];
- copyAndReverse(buf, (char*) &hash);
- builder->appendBinData(fieldName, 8, bdtCustom, buf);
- }
-
- void GeoHash::appendHashMin(BSONObjBuilder* builder, const char* fieldName) const {
- // The min bound of a GeoHash region has all the unused suffix bits set to 0
- appendHashToBuilder(_hash, builder, fieldName);
- }
-
- void GeoHash::appendHashMax(BSONObjBuilder* builder, const char* fieldName) const {
- // The max bound of a GeoHash region has all the unused suffix bits set to 1
- long long suffixMax = ~(geoBitSets.allX[_bits] | geoBitSets.allY[_bits]);
- long long hashMax = _hash | suffixMax;
-
- appendHashToBuilder(hashMax, builder, fieldName);
- }
-
- long long GeoHash::getHash() const {
- return _hash;
- }
-
- unsigned GeoHash::getBits() const {
- return _bits;
- }
-
- GeoHash GeoHash::commonPrefix(const GeoHash& other) const {
- unsigned i = 0;
- for (; i < _bits && i < other._bits; i++) {
- if (getBitX(i) == other.getBitX(i) && getBitY(i) == other.getBitY(i))
- continue;
- break;
- }
- // i is how many bits match between this and other.
- return GeoHash(_hash, i);
- }
-
-
- bool GeoHash::subdivide( GeoHash children[4] ) const {
- if ( _bits == 32 ) {
- return false;
- }
-
- children[0] = GeoHash( _hash, _bits + 1 ); // (0, 0)
- children[1] = children[0];
- children[1].setBit( _bits * 2 + 1, 1 ); // (0, 1)
- children[2] = children[0];
- children[2].setBit( _bits * 2, 1 ); // (1, 0)
- children[3] = GeoHash(children[1]._hash | children[2]._hash, _bits + 1); // (1, 1)
- return true;
- }
-
- bool GeoHash::contains(const GeoHash& other) const {
- return _bits <= other._bits && other.hasPrefix(*this);
- }
-
- GeoHash GeoHash::parent(unsigned int level) const {
- return GeoHash(_hash, level);
- }
-
- GeoHash GeoHash::parent() const {
- verify(_bits > 0);
- return GeoHash(_hash, _bits - 1);
- }
-
-
- void GeoHash::appendVertexNeighbors(unsigned level, vector<GeoHash>* output) const {
- invariant(level >= 0 && level < _bits);
-
- // Parent at the given level.
- GeoHash parentHash = parent(level);
- output->push_back(parentHash);
-
- // Generate the neighbors of parent that are closest to me.
- unsigned px, py, parentBits;
- parentHash.unhash(&px, &py);
- parentBits = parentHash.getBits();
-
- // No Neighbors for the top level.
- if (parentBits == 0U) return;
-
- // Position in parent
- // Y
- // ^
- // | 01, 11
- // | 00, 10
- // +----------> X
- // We can guarantee _bits > 0.
- long long posInParent = (_hash >> (64 - 2 * (parentBits + 1))) & 3LL;
-
- // 1 bit at parent's level, the least significant bit of parent.
- unsigned parentMask = 1U << (32 - parentBits);
-
- // Along X Axis
- if ((posInParent & 2LL) == 0LL) {
- // Left side of parent, X - 1
- if (!parentHash.atMinX()) output->push_back(GeoHash(px - parentMask, py, parentBits));
- } else {
- // Right side of parent, X + 1
- if (!parentHash.atMaxX()) output->push_back(GeoHash(px + parentMask, py, parentBits));
- }
-
- // Along Y Axis
- if ((posInParent & 1LL) == 0LL) {
- // Bottom of parent, Y - 1
- if (!parentHash.atMinY()) output->push_back(GeoHash(px, py - parentMask, parentBits));
- } else {
- // Top of parent, Y + 1
- if (!parentHash.atMaxY()) output->push_back(GeoHash(px, py + parentMask, parentBits));
- }
-
- // Four corners
- if (posInParent == 0LL) {
- if (!parentHash.atMinX() && !parentHash.atMinY())
- output->push_back(GeoHash(px - parentMask, py - parentMask, parentBits));
- } else if (posInParent == 1LL) {
- if (!parentHash.atMinX() && !parentHash.atMaxY())
- output->push_back(GeoHash(px - parentMask, py + parentMask, parentBits));
- } else if (posInParent == 2LL) {
- if (!parentHash.atMaxX() && !parentHash.atMinY())
- output->push_back(GeoHash(px + parentMask, py - parentMask, parentBits));
- } else {
- // PosInParent == 3LL
- if (!parentHash.atMaxX() && !parentHash.atMaxY())
- output->push_back(GeoHash(px + parentMask, py + parentMask, parentBits));
- }
- }
-
- static BSONField<int> bitsField("bits", 26);
- static BSONField<double> maxField("max", 180.0);
- static BSONField<double> minField("min", -180.0);
-
- // a x b
- // | | |
- // -----|---o-----|---------|-- "|" is a representable double number.
- //
- // In the above figure, b is the next representable double number after a, so
- // |a - b|/|a| = epsilon (ULP) ~= 2.22E-16.
- //
- // An exact number x will be represented as the nearest representable double, which is a.
- // |x - a|/|a| <= 0.5 ULP ~= 1.11e-16
- //
- // IEEE floating-point operations have a maximum error of 0.5 ULPS (units in
- // the last place). For double-precision numbers, this works out to 2**-53
- // (about 1.11e-16) times the magnitude of the result.
- double const GeoHashConverter::kMachinePrecision = 0.5 * std::numeric_limits<double>::epsilon();
-
- Status GeoHashConverter::parseParameters(const BSONObj& paramDoc,
- GeoHashConverter::Parameters* params) {
-
- string errMsg;
-
- if (FieldParser::FIELD_INVALID
- == FieldParser::extractNumber(paramDoc, bitsField, &params->bits, &errMsg)) {
- return Status(ErrorCodes::InvalidOptions, errMsg);
- }
-
- if (FieldParser::FIELD_INVALID
- == FieldParser::extractNumber(paramDoc, maxField, &params->max, &errMsg)) {
- return Status(ErrorCodes::InvalidOptions, errMsg);
- }
-
- if (FieldParser::FIELD_INVALID
- == FieldParser::extractNumber(paramDoc, minField, &params->min, &errMsg)) {
- return Status(ErrorCodes::InvalidOptions, errMsg);
- }
-
- if (params->bits < 1 || params->bits > 32) {
- return Status(ErrorCodes::InvalidOptions,
- str::stream() << "bits for hash must be > 0 and <= 32, "
- << "but " << params->bits << " bits were specified");
- }
-
- if (params->min >= params->max) {
- return Status(ErrorCodes::InvalidOptions,
- str::stream() << "region for hash must be valid and have positive area, "
- << "but [" << params->min << ", " << params->max << "] "
- << "was specified");
- }
-
- double numBuckets = (1024 * 1024 * 1024 * 4.0);
- params->scaling = numBuckets / (params->max - params->min);
-
- return Status::OK();
- }
-
- GeoHashConverter::GeoHashConverter(const Parameters& params) : _params(params) {
- init();
- }
-
- void GeoHashConverter::init() {
- // TODO(hk): What do we require of the values in params?
-
- // Compute how much error there is so it can be used as a fudge factor.
- GeoHash a(0, 0, _params.bits);
- GeoHash b = a;
- b.move(1, 1);
-
- // Epsilon is 1/100th of a bucket size
- // TODO: Can we actually find error bounds for the sqrt function?
- double epsilon = 0.001 / _params.scaling;
- _error = distanceBetweenHashes(a, b) + epsilon;
-
- // Error in radians
- _errorSphere = deg2rad(_error);
-
- // 8 * max(|max|, |min|) * u
- _errorUnhashToBox = calcUnhashToBoxError(_params);
+ setBit(pos, from);
+ pos -= 2;
}
- double GeoHashConverter::distanceBetweenHashes(const GeoHash& a, const GeoHash& b) const {
- double ax, ay, bx, by;
- unhash(a, &ax, &ay);
- unhash(b, &bx, &by);
-
- double dx = bx - ax;
- double dy = by - ay;
-
- return sqrt((dx * dx) + (dy * dy));
- }
+ verify(0);
+}
- /**
- * Hashing functions. Convert the following types (which have a double precision point)
- * to a GeoHash:
- * BSONElement
- * BSONObj
- * Point
- * double, double
- */
-
- GeoHash GeoHashConverter::hash(const Point &p) const {
- return hash(p.x, p.y);
- }
+GeoHash& GeoHash::operator=(const GeoHash& h) {
+ _hash = h._hash;
+ _bits = h._bits;
+ return *this;
+}
- GeoHash GeoHashConverter::hash(const BSONElement& e) const {
- if (e.isABSONObj())
- return hash(e.embeddedObject());
- return GeoHash(e, _params.bits);
- }
+bool GeoHash::operator==(const GeoHash& h) const {
+ return _hash == h._hash && _bits == h._bits;
+}
- GeoHash GeoHashConverter::hash(const BSONObj& o) const {
- return hash(o, NULL);
- }
+bool GeoHash::operator!=(const GeoHash& h) const {
+ return !(*this == h);
+}
- // src is printed out as debugging information. Maybe it is actually somehow the 'source' of o?
- GeoHash GeoHashConverter::hash(const BSONObj& o, const BSONObj* src) const {
- BSONObjIterator i(o);
- uassert(13067,
- str::stream() << "geo field is empty"
- << (src ? causedBy((*src).toString()) : ""),
- i.more());
-
- BSONElement x = i.next();
- uassert(13068,
- str::stream() << "geo field only has 1 element"
- << causedBy(src ? (*src).toString() : x.toString()),
- i.more());
-
- BSONElement y = i.next();
- uassert(13026,
- str::stream() << "geo values must be 'legacy coordinate pairs' for 2d indexes"
- << causedBy(src ? (*src).toString() :
- BSON_ARRAY(x << y).toString()),
- x.isNumber() && y.isNumber());
-
- uassert(13027,
- str::stream() << "point not in interval of [ " << _params.min << ", "
- << _params.max << " ]"
- << causedBy(src ? (*src).toString() :
- BSON_ARRAY(x.number() << y.number()).toString()),
- x.number() <= _params.max && x.number() >= _params.min &&
- y.number() <= _params.max && y.number() >= _params.min);
-
- return GeoHash(convertToHashScale(x.number()), convertToHashScale(y.number()),
- _params.bits);
+bool GeoHash::operator<(const GeoHash& h) const {
+ if (_hash != h._hash) {
+ return static_cast<unsigned long long>(_hash) < static_cast<unsigned long long>(h._hash);
}
- GeoHash GeoHashConverter::hash(double x, double y) const {
- uassert(16433,
- str::stream() << "point not in interval of [ " << _params.min << ", "
- << _params.max << " ]"
- << causedBy(BSON_ARRAY(x << y).toString()),
- x <= _params.max && x >= _params.min &&
- y <= _params.max && y >= _params.min);
-
- return GeoHash(convertToHashScale(x), convertToHashScale(y) , _params.bits);
- }
+ return _bits < h._bits;
+}
- /**
- * Unhashing functions. These convert from a "discretized" GeoHash to the "continuous"
- * doubles according to our scaling parameters.
- *
- * Possible outputs:
- * double, double
- * Point
- * BSONObj
- */
- // TODO(hk): these should have consistent naming
- Point GeoHashConverter::unhashToPoint(const GeoHash &h) const {
- Point point;
- unhash(h, &point.x, &point.y);
- return point;
+// Append the hash in s to our current hash. We expect s to be '0' or '1' or '\0',
+// though we also treat non-'1' values as '0'.
+GeoHash& GeoHash::operator+=(const char* s) {
+ unsigned pos = _bits * 2;
+ _bits += strlen(s) / 2;
+ verify(_bits <= 32);
+ while ('\0' != s[0]) {
+ if (s[0] == '1')
+ setBit(pos, 1);
+ pos++;
+ s++;
}
+ return *this;
+}
- Point GeoHashConverter::unhashToPoint(const BSONElement &e) const {
- return unhashToPoint(hash(e));
- }
+GeoHash GeoHash::operator+(const char* s) const {
+ GeoHash n = *this;
+ n += s;
+ return n;
+}
- BSONObj GeoHashConverter::unhashToBSONObj(const GeoHash& h) const {
- unsigned x, y;
- h.unhash(&x, &y);
- BSONObjBuilder b;
- b.append("x", convertFromHashScale(x));
- b.append("y", convertFromHashScale(y));
- return b.obj();
- }
+GeoHash GeoHash::operator+(const std::string& s) const {
+ return operator+(s.c_str());
+}
- void GeoHashConverter::unhash(const GeoHash &h, double *x, double *y) const {
- unsigned a, b;
- h.unhash(&a, &b);
- *x = convertFromHashScale(a);
- *y = convertFromHashScale(b);
- }
-
- Box GeoHashConverter::unhashToBoxCovering(const GeoHash &h) const {
- if (h.getBits() == 0) {
- // Return the result without any error.
- return Box(Point(_params.min, _params.min), Point(_params.max, _params.max));
- }
-
- double sizeEdgeBox = sizeEdge(h.getBits());
- Point min(unhashToPoint(h));
- Point max(min.x + sizeEdgeBox, min.y + sizeEdgeBox);
-
- // Expand the box by the error bound
- Box box(min, max);
- box.fudge(_errorUnhashToBox);
- return box;
- }
-
- double GeoHashConverter::calcUnhashToBoxError(const GeoHashConverter::Parameters& params) {
- return std::max(fabs(params.min), fabs(params.max))
- * GeoHashConverter::kMachinePrecision* 8;
- }
+/*
+ * Keep the upper _bits*2 bits of _hash, clear the lower bits.
+ * Maybe there's junk in there? Not sure why this is done.
+ */
+void GeoHash::clearUnusedBits() {
+ // Left shift count should be less than 64
+ if (_bits == 0) {
+ _hash = 0;
+ return;
+ }
- double GeoHashConverter::sizeOfDiag(const GeoHash& a) const {
- GeoHash b = a;
- b.move(1, 1);
- return distanceBetweenHashes(a, b);
- }
+ static long long FULL = 0xFFFFFFFFFFFFFFFFLL;
+ long long mask = FULL << (64 - (_bits * 2));
+ _hash &= mask;
+}
+
+static void appendHashToBuilder(long long hash, BSONObjBuilder* builder, const char* fieldName) {
+ char buf[8];
+ copyAndReverse(buf, (char*)&hash);
+ builder->appendBinData(fieldName, 8, bdtCustom, buf);
+}
+
+void GeoHash::appendHashMin(BSONObjBuilder* builder, const char* fieldName) const {
+ // The min bound of a GeoHash region has all the unused suffix bits set to 0
+ appendHashToBuilder(_hash, builder, fieldName);
+}
+
+void GeoHash::appendHashMax(BSONObjBuilder* builder, const char* fieldName) const {
+ // The max bound of a GeoHash region has all the unused suffix bits set to 1
+ long long suffixMax = ~(geoBitSets.allX[_bits] | geoBitSets.allY[_bits]);
+ long long hashMax = _hash | suffixMax;
+
+ appendHashToBuilder(hashMax, builder, fieldName);
+}
+
+long long GeoHash::getHash() const {
+ return _hash;
+}
+
+unsigned GeoHash::getBits() const {
+ return _bits;
+}
+
+GeoHash GeoHash::commonPrefix(const GeoHash& other) const {
+ unsigned i = 0;
+ for (; i < _bits && i < other._bits; i++) {
+ if (getBitX(i) == other.getBitX(i) && getBitY(i) == other.getBitY(i))
+ continue;
+ break;
+ }
+ // i is how many bits match between this and other.
+ return GeoHash(_hash, i);
+}
+
+
+bool GeoHash::subdivide(GeoHash children[4]) const {
+ if (_bits == 32) {
+ return false;
+ }
+
+ children[0] = GeoHash(_hash, _bits + 1); // (0, 0)
+ children[1] = children[0];
+ children[1].setBit(_bits * 2 + 1, 1); // (0, 1)
+ children[2] = children[0];
+ children[2].setBit(_bits * 2, 1); // (1, 0)
+ children[3] = GeoHash(children[1]._hash | children[2]._hash, _bits + 1); // (1, 1)
+ return true;
+}
+
+bool GeoHash::contains(const GeoHash& other) const {
+ return _bits <= other._bits && other.hasPrefix(*this);
+}
+
+GeoHash GeoHash::parent(unsigned int level) const {
+ return GeoHash(_hash, level);
+}
+
+GeoHash GeoHash::parent() const {
+ verify(_bits > 0);
+ return GeoHash(_hash, _bits - 1);
+}
+
+
+void GeoHash::appendVertexNeighbors(unsigned level, vector<GeoHash>* output) const {
+ invariant(level >= 0 && level < _bits);
+
+ // Parent at the given level.
+ GeoHash parentHash = parent(level);
+ output->push_back(parentHash);
+
+ // Generate the neighbors of parent that are closest to me.
+ unsigned px, py, parentBits;
+ parentHash.unhash(&px, &py);
+ parentBits = parentHash.getBits();
+
+ // No Neighbors for the top level.
+ if (parentBits == 0U)
+ return;
+
+ // Position in parent
+ // Y
+ // ^
+ // | 01, 11
+ // | 00, 10
+ // +----------> X
+ // We can guarantee _bits > 0.
+ long long posInParent = (_hash >> (64 - 2 * (parentBits + 1))) & 3LL;
+
+ // 1 bit at parent's level, the least significant bit of parent.
+ unsigned parentMask = 1U << (32 - parentBits);
+
+ // Along X Axis
+ if ((posInParent & 2LL) == 0LL) {
+ // Left side of parent, X - 1
+ if (!parentHash.atMinX())
+ output->push_back(GeoHash(px - parentMask, py, parentBits));
+ } else {
+ // Right side of parent, X + 1
+ if (!parentHash.atMaxX())
+ output->push_back(GeoHash(px + parentMask, py, parentBits));
+ }
+
+ // Along Y Axis
+ if ((posInParent & 1LL) == 0LL) {
+ // Bottom of parent, Y - 1
+ if (!parentHash.atMinY())
+ output->push_back(GeoHash(px, py - parentMask, parentBits));
+ } else {
+ // Top of parent, Y + 1
+ if (!parentHash.atMaxY())
+ output->push_back(GeoHash(px, py + parentMask, parentBits));
+ }
+
+ // Four corners
+ if (posInParent == 0LL) {
+ if (!parentHash.atMinX() && !parentHash.atMinY())
+ output->push_back(GeoHash(px - parentMask, py - parentMask, parentBits));
+ } else if (posInParent == 1LL) {
+ if (!parentHash.atMinX() && !parentHash.atMaxY())
+ output->push_back(GeoHash(px - parentMask, py + parentMask, parentBits));
+ } else if (posInParent == 2LL) {
+ if (!parentHash.atMaxX() && !parentHash.atMinY())
+ output->push_back(GeoHash(px + parentMask, py - parentMask, parentBits));
+ } else {
+ // PosInParent == 3LL
+ if (!parentHash.atMaxX() && !parentHash.atMaxY())
+ output->push_back(GeoHash(px + parentMask, py + parentMask, parentBits));
+ }
+}
+
+static BSONField<int> bitsField("bits", 26);
+static BSONField<double> maxField("max", 180.0);
+static BSONField<double> minField("min", -180.0);
+
+// a x b
+// | | |
+// -----|---o-----|---------|-- "|" is a representable double number.
+//
+// In the above figure, b is the next representable double number after a, so
+// |a - b|/|a| = epsilon (ULP) ~= 2.22E-16.
+//
+// An exact number x will be represented as the nearest representable double, which is a.
+// |x - a|/|a| <= 0.5 ULP ~= 1.11e-16
+//
+// IEEE floating-point operations have a maximum error of 0.5 ULPS (units in
+// the last place). For double-precision numbers, this works out to 2**-53
+// (about 1.11e-16) times the magnitude of the result.
+double const GeoHashConverter::kMachinePrecision = 0.5 * std::numeric_limits<double>::epsilon();
+
+Status GeoHashConverter::parseParameters(const BSONObj& paramDoc,
+ GeoHashConverter::Parameters* params) {
+ string errMsg;
+
+ if (FieldParser::FIELD_INVALID ==
+ FieldParser::extractNumber(paramDoc, bitsField, &params->bits, &errMsg)) {
+ return Status(ErrorCodes::InvalidOptions, errMsg);
+ }
+
+ if (FieldParser::FIELD_INVALID ==
+ FieldParser::extractNumber(paramDoc, maxField, &params->max, &errMsg)) {
+ return Status(ErrorCodes::InvalidOptions, errMsg);
+ }
+
+ if (FieldParser::FIELD_INVALID ==
+ FieldParser::extractNumber(paramDoc, minField, &params->min, &errMsg)) {
+ return Status(ErrorCodes::InvalidOptions, errMsg);
+ }
+
+ if (params->bits < 1 || params->bits > 32) {
+ return Status(ErrorCodes::InvalidOptions,
+ str::stream() << "bits for hash must be > 0 and <= 32, "
+ << "but " << params->bits << " bits were specified");
+ }
+
+ if (params->min >= params->max) {
+ return Status(ErrorCodes::InvalidOptions,
+ str::stream() << "region for hash must be valid and have positive area, "
+ << "but [" << params->min << ", " << params->max << "] "
+ << "was specified");
+ }
+
+ double numBuckets = (1024 * 1024 * 1024 * 4.0);
+ params->scaling = numBuckets / (params->max - params->min);
+
+ return Status::OK();
+}
+
+GeoHashConverter::GeoHashConverter(const Parameters& params) : _params(params) {
+ init();
+}
+
+void GeoHashConverter::init() {
+ // TODO(hk): What do we require of the values in params?
+
+ // Compute how much error there is so it can be used as a fudge factor.
+ GeoHash a(0, 0, _params.bits);
+ GeoHash b = a;
+ b.move(1, 1);
+
+ // Epsilon is 1/100th of a bucket size
+ // TODO: Can we actually find error bounds for the sqrt function?
+ double epsilon = 0.001 / _params.scaling;
+ _error = distanceBetweenHashes(a, b) + epsilon;
+
+ // Error in radians
+ _errorSphere = deg2rad(_error);
+
+ // 8 * max(|max|, |min|) * u
+ _errorUnhashToBox = calcUnhashToBoxError(_params);
+}
+double GeoHashConverter::distanceBetweenHashes(const GeoHash& a, const GeoHash& b) const {
+ double ax, ay, bx, by;
+ unhash(a, &ax, &ay);
+ unhash(b, &bx, &by);
- // Relative error = epsilon_(max-min). ldexp() is just a direct translation to
- // floating point exponent, and should be exact.
- double GeoHashConverter::sizeEdge(unsigned level) const {
- invariant(level >= 0);
- invariant((int)level <= _params.bits);
- return ldexp(_params.max - _params.min, -level);
- }
+ double dx = bx - ax;
+ double dy = by - ay;
- // Convert from a double in [0, (max-min)*scaling] to [min, max]
- double GeoHashConverter::convertDoubleFromHashScale(double x) const {
- x /= _params.scaling;
- x += _params.min;
- return x;
- }
+ return sqrt((dx * dx) + (dy * dy));
+}
- // Convert from an unsigned in [0, (max-min)*scaling] to [min, max]
- double GeoHashConverter::convertFromHashScale(unsigned in) const {
- return convertDoubleFromHashScale((double)in);
- }
-
- // Convert from a double that is [min, max] to a double in [0, (max-min)*scaling]
- double GeoHashConverter::convertToDoubleHashScale(double in) const {
- verify(in <= _params.max && in >= _params.min);
+/**
+ * Hashing functions. Convert the following types (which have a double precision point)
+ * to a GeoHash:
+ * BSONElement
+ * BSONObj
+ * Point
+ * double, double
+ */
- if (in == _params.max) {
- // prevent aliasing with _min by moving inside the "box"
- // makes 180 == 179.999 (roughly)
- in -= _error / 2;
- }
+GeoHash GeoHashConverter::hash(const Point& p) const {
+ return hash(p.x, p.y);
+}
+
+GeoHash GeoHashConverter::hash(const BSONElement& e) const {
+ if (e.isABSONObj())
+ return hash(e.embeddedObject());
+ return GeoHash(e, _params.bits);
+}
+
+GeoHash GeoHashConverter::hash(const BSONObj& o) const {
+ return hash(o, NULL);
+}
+
+// src is printed out as debugging information. Maybe it is actually somehow the 'source' of o?
+GeoHash GeoHashConverter::hash(const BSONObj& o, const BSONObj* src) const {
+ BSONObjIterator i(o);
+ uassert(13067,
+ str::stream() << "geo field is empty" << (src ? causedBy((*src).toString()) : ""),
+ i.more());
+
+ BSONElement x = i.next();
+ uassert(13068,
+ str::stream() << "geo field only has 1 element"
+ << causedBy(src ? (*src).toString() : x.toString()),
+ i.more());
+
+ BSONElement y = i.next();
+ uassert(13026,
+ str::stream() << "geo values must be 'legacy coordinate pairs' for 2d indexes"
+ << causedBy(src ? (*src).toString() : BSON_ARRAY(x << y).toString()),
+ x.isNumber() && y.isNumber());
+
+ uassert(13027,
+ str::stream() << "point not in interval of [ " << _params.min << ", " << _params.max
+ << " ]"
+ << causedBy(src ? (*src).toString()
+ : BSON_ARRAY(x.number() << y.number()).toString()),
+ x.number() <= _params.max && x.number() >= _params.min && y.number() <= _params.max &&
+ y.number() >= _params.min);
+
+ return GeoHash(convertToHashScale(x.number()), convertToHashScale(y.number()), _params.bits);
+}
+
+GeoHash GeoHashConverter::hash(double x, double y) const {
+ uassert(16433,
+ str::stream() << "point not in interval of [ " << _params.min << ", " << _params.max
+ << " ]" << causedBy(BSON_ARRAY(x << y).toString()),
+ x <= _params.max && x >= _params.min && y <= _params.max && y >= _params.min);
+
+ return GeoHash(convertToHashScale(x), convertToHashScale(y), _params.bits);
+}
- in -= _params.min;
- verify(in >= 0);
- return in * _params.scaling;
- }
-
- // Convert from a double that is [min, max] to an unsigned in [0, (max-min)*scaling]
- unsigned GeoHashConverter::convertToHashScale(double in) const {
- return static_cast<unsigned>(convertToDoubleHashScale(in));
- }
+/**
+ * Unhashing functions. These convert from a "discretized" GeoHash to the "continuous"
+ * doubles according to our scaling parameters.
+ *
+ * Possible outputs:
+ * double, double
+ * Point
+ * BSONObj
+ */
+// TODO(hk): these should have consistent naming
+Point GeoHashConverter::unhashToPoint(const GeoHash& h) const {
+ Point point;
+ unhash(h, &point.x, &point.y);
+ return point;
+}
+
+Point GeoHashConverter::unhashToPoint(const BSONElement& e) const {
+ return unhashToPoint(hash(e));
+}
+
+BSONObj GeoHashConverter::unhashToBSONObj(const GeoHash& h) const {
+ unsigned x, y;
+ h.unhash(&x, &y);
+ BSONObjBuilder b;
+ b.append("x", convertFromHashScale(x));
+ b.append("y", convertFromHashScale(y));
+ return b.obj();
+}
+
+void GeoHashConverter::unhash(const GeoHash& h, double* x, double* y) const {
+ unsigned a, b;
+ h.unhash(&a, &b);
+ *x = convertFromHashScale(a);
+ *y = convertFromHashScale(b);
+}
+
+Box GeoHashConverter::unhashToBoxCovering(const GeoHash& h) const {
+ if (h.getBits() == 0) {
+ // Return the result without any error.
+ return Box(Point(_params.min, _params.min), Point(_params.max, _params.max));
+ }
+
+ double sizeEdgeBox = sizeEdge(h.getBits());
+ Point min(unhashToPoint(h));
+ Point max(min.x + sizeEdgeBox, min.y + sizeEdgeBox);
+
+ // Expand the box by the error bound
+ Box box(min, max);
+ box.fudge(_errorUnhashToBox);
+ return box;
+}
+
+double GeoHashConverter::calcUnhashToBoxError(const GeoHashConverter::Parameters& params) {
+ return std::max(fabs(params.min), fabs(params.max)) * GeoHashConverter::kMachinePrecision * 8;
+}
+
+double GeoHashConverter::sizeOfDiag(const GeoHash& a) const {
+ GeoHash b = a;
+ b.move(1, 1);
+ return distanceBetweenHashes(a, b);
+}
+
+
+// Relative error = epsilon_(max-min). ldexp() is just a direct translation to
+// floating point exponent, and should be exact.
+double GeoHashConverter::sizeEdge(unsigned level) const {
+ invariant(level >= 0);
+ invariant((int)level <= _params.bits);
+ return ldexp(_params.max - _params.min, -level);
+}
+
+// Convert from a double in [0, (max-min)*scaling] to [min, max]
+double GeoHashConverter::convertDoubleFromHashScale(double x) const {
+ x /= _params.scaling;
+ x += _params.min;
+ return x;
+}
+
+// Convert from an unsigned in [0, (max-min)*scaling] to [min, max]
+double GeoHashConverter::convertFromHashScale(unsigned in) const {
+ return convertDoubleFromHashScale((double)in);
+}
+
+// Convert from a double that is [min, max] to a double in [0, (max-min)*scaling]
+double GeoHashConverter::convertToDoubleHashScale(double in) const {
+ verify(in <= _params.max && in >= _params.min);
+
+ if (in == _params.max) {
+ // prevent aliasing with _min by moving inside the "box"
+ // makes 180 == 179.999 (roughly)
+ in -= _error / 2;
+ }
+
+ in -= _params.min;
+ verify(in >= 0);
+ return in * _params.scaling;
+}
+
+// Convert from a double that is [min, max] to an unsigned in [0, (max-min)*scaling]
+unsigned GeoHashConverter::convertToHashScale(double in) const {
+ return static_cast<unsigned>(convertToDoubleHashScale(in));
+}
} // namespace mongo
diff --git a/src/mongo/db/geo/hash.h b/src/mongo/db/geo/hash.h
index b50ac57d3d1..1c5f6d1717a 100644
--- a/src/mongo/db/geo/hash.h
+++ b/src/mongo/db/geo/hash.h
@@ -33,229 +33,240 @@
namespace mongo {
- class GeoHash;
- class Box;
- struct Point;
- std::ostream& operator<<(std::ostream &s, const GeoHash &h);
-
- /* This class maps an unsigned x,y coordinate pair to a hash value.
- * To hash values more interesting than unsigned, use the GeoHashConverter,
- * which maps doubles to unsigned values.
+class GeoHash;
+class Box;
+struct Point;
+std::ostream& operator<<(std::ostream& s, const GeoHash& h);
+
+/* This class maps an unsigned x,y coordinate pair to a hash value.
+ * To hash values more interesting than unsigned, use the GeoHashConverter,
+ * which maps doubles to unsigned values.
+ */
+class GeoHash {
+public:
+ static unsigned int const kMaxBits; // = 32;
+
+ GeoHash();
+ // The strings are binary values of length <= 64,
+ // examples: 1001010100101, 1
+ explicit GeoHash(const std::string& hash);
+ explicit GeoHash(const char* s);
+ // bits is how many bits are used to hash each of x and y.
+ GeoHash(unsigned x, unsigned y, unsigned bits = 32);
+ GeoHash(const GeoHash& old);
+ // hash is a raw hash value. we just copy these into our private fields.
+ GeoHash(long long hash, unsigned bits);
+ // This only works if e is BinData. To get a GeoHash from other BSONElements,
+ // use the converter class.
+ explicit GeoHash(const BSONElement& e, unsigned bits = 32);
+
+ // Convert from the hashed value to unsigned.
+ void unhash(unsigned* x, unsigned* y) const;
+
+ /** Is the 'bit'-th most significant bit set? (NOT the least significant) */
+ static bool isBitSet(unsigned val, unsigned bit);
+
+ /** Return a GeoHash with one bit of precision lost. */
+ GeoHash up() const;
+
+ bool hasPrefix(const GeoHash& other) const;
+
+ std::string toString() const;
+ std::string toStringHex1() const;
+
+ void setBit(unsigned pos, bool value);
+ bool getBit(unsigned pos) const;
+
+ bool getBitX(unsigned pos) const;
+ bool getBitY(unsigned pos) const;
+
+ // XXX: what does this really do?
+ BSONObj wrap(const char* name = "") const;
+
+ // XXX what does this do
+ bool constrains() const;
+ bool canRefine() const;
+
+ // XXX comment better
+ bool atMinX() const;
+ bool atMinY() const;
+
+ // XXX comment better
+ bool atMaxX() const;
+ bool atMaxY() const;
+
+ // XXX: what does this do
+ void move(int x, int y);
+
+ GeoHash& operator=(const GeoHash& h);
+ bool operator==(const GeoHash& h) const;
+ bool operator!=(const GeoHash& h) const;
+ bool operator<(const GeoHash& h) const;
+ // Append the hash in s to our current hash. We expect s to be '0' or '1' or '\0',
+ // though we also treat non-'1' values as '0'.
+ GeoHash& operator+=(const char* s);
+ GeoHash operator+(const char* s) const;
+ GeoHash operator+(const std::string& s) const;
+
+ // Append the minimum range of the hash to the builder provided (inclusive)
+ void appendHashMin(BSONObjBuilder* builder, const char* fieldName) const;
+ // Append the maximum range of the hash to the builder provided (inclusive)
+ void appendHashMax(BSONObjBuilder* builder, const char* fieldName) const;
+
+ long long getHash() const;
+ unsigned getBits() const;
+
+ GeoHash commonPrefix(const GeoHash& other) const;
+
+ // If this is not a leaf cell, set children[0..3] to the four children of
+ // this cell (in traversal order) and return true. Otherwise returns false.
+ bool subdivide(GeoHash children[4]) const;
+ // Return true if the given cell is contained within this one.
+ bool contains(const GeoHash& other) const;
+ // Return the parent at given level.
+ GeoHash parent(unsigned int level) const;
+ GeoHash parent() const;
+
+ // Return the neighbors of closest vertex to this cell at the given level,
+ // by appending them to "output". Normally there are four neighbors, but
+ // the closest vertex may only have two or one neighbor if it is next to the
+ // boundary.
+ //
+ // Requires: level < this->_bits, so that we can determine which vertex is
+ // closest (in particular, level == kMaxBits is not allowed).
+ void appendVertexNeighbors(unsigned level, std::vector<GeoHash>* output) const;
+
+private:
+ // Create a hash from the provided string. Used by the std::string and char* cons.
+ void initFromString(const char* s);
+ /* Keep the upper _bits*2 bits of _hash, clear the lower bits.
+ * Maybe there's junk in there? XXX Not sure why this is done.
*/
- class GeoHash {
- public:
- static unsigned int const kMaxBits; // = 32;
-
- GeoHash();
- // The strings are binary values of length <= 64,
- // examples: 1001010100101, 1
- explicit GeoHash(const std::string& hash);
- explicit GeoHash(const char *s);
- // bits is how many bits are used to hash each of x and y.
- GeoHash(unsigned x, unsigned y, unsigned bits = 32);
- GeoHash(const GeoHash& old);
- // hash is a raw hash value. we just copy these into our private fields.
- GeoHash(long long hash, unsigned bits);
- // This only works if e is BinData. To get a GeoHash from other BSONElements,
- // use the converter class.
- explicit GeoHash(const BSONElement& e, unsigned bits = 32);
-
- // Convert from the hashed value to unsigned.
- void unhash(unsigned *x, unsigned *y) const;
-
- /** Is the 'bit'-th most significant bit set? (NOT the least significant) */
- static bool isBitSet(unsigned val, unsigned bit);
-
- /** Return a GeoHash with one bit of precision lost. */
- GeoHash up() const;
-
- bool hasPrefix(const GeoHash& other) const;
-
- std::string toString() const;
- std::string toStringHex1() const;
-
- void setBit(unsigned pos, bool value);
- bool getBit(unsigned pos) const;
-
- bool getBitX(unsigned pos) const;
- bool getBitY(unsigned pos) const;
-
- // XXX: what does this really do?
- BSONObj wrap(const char* name = "") const;
-
- // XXX what does this do
- bool constrains() const;
- bool canRefine() const;
-
- // XXX comment better
- bool atMinX() const;
- bool atMinY() const;
-
- // XXX comment better
- bool atMaxX() const;
- bool atMaxY() const;
-
- // XXX: what does this do
- void move(int x, int y);
-
- GeoHash& operator=(const GeoHash& h);
- bool operator==(const GeoHash& h) const;
- bool operator!=(const GeoHash& h) const;
- bool operator<(const GeoHash& h) const;
- // Append the hash in s to our current hash. We expect s to be '0' or '1' or '\0',
- // though we also treat non-'1' values as '0'.
- GeoHash& operator+=(const char* s);
- GeoHash operator+(const char *s) const;
- GeoHash operator+(const std::string& s) const;
-
- // Append the minimum range of the hash to the builder provided (inclusive)
- void appendHashMin(BSONObjBuilder* builder, const char* fieldName) const;
- // Append the maximum range of the hash to the builder provided (inclusive)
- void appendHashMax(BSONObjBuilder* builder, const char* fieldName) const;
-
- long long getHash() const;
- unsigned getBits() const;
-
- GeoHash commonPrefix(const GeoHash& other) const;
-
- // If this is not a leaf cell, set children[0..3] to the four children of
- // this cell (in traversal order) and return true. Otherwise returns false.
- bool subdivide(GeoHash children[4]) const;
- // Return true if the given cell is contained within this one.
- bool contains(const GeoHash& other) const;
- // Return the parent at given level.
- GeoHash parent(unsigned int level) const;
- GeoHash parent() const;
-
- // Return the neighbors of closest vertex to this cell at the given level,
- // by appending them to "output". Normally there are four neighbors, but
- // the closest vertex may only have two or one neighbor if it is next to the
- // boundary.
- //
- // Requires: level < this->_bits, so that we can determine which vertex is
- // closest (in particular, level == kMaxBits is not allowed).
- void appendVertexNeighbors(unsigned level, std::vector<GeoHash>* output) const;
-
- private:
-
- // Create a hash from the provided string. Used by the std::string and char* cons.
- void initFromString(const char *s);
- /* Keep the upper _bits*2 bits of _hash, clear the lower bits.
- * Maybe there's junk in there? XXX Not sure why this is done.
- */
- void clearUnusedBits();
- // XXX: what does this do
- void _move(unsigned offset, int d);
- // XXX: this is nasty and has no example
- void unhash_fast(unsigned *x, unsigned *y) const;
- void unhash_slow(unsigned *x, unsigned *y) const;
-
- long long _hash;
- // Bits per field. Our hash is 64 bits, and we have an X and a Y field,
- // so this is 1 to 32.
- unsigned _bits;
+ void clearUnusedBits();
+ // XXX: what does this do
+ void _move(unsigned offset, int d);
+ // XXX: this is nasty and has no example
+ void unhash_fast(unsigned* x, unsigned* y) const;
+ void unhash_slow(unsigned* x, unsigned* y) const;
+
+ long long _hash;
+ // Bits per field. Our hash is 64 bits, and we have an X and a Y field,
+ // so this is 1 to 32.
+ unsigned _bits;
+};
+
+/* Convert between various types and the GeoHash. We need additional information (scaling etc.)
+ * to convert to/from GeoHash. The additional information doesn't change often and is the same
+ * for all conversions, so we stick all the conversion methods here with their associated
+ * data.
+ */
+class GeoHashConverter {
+public:
+ static double const kMachinePrecision; // = 1.1e-16
+
+ struct Parameters {
+ // How many bits to use for the hash?
+ int bits;
+ // X/Y values must be [min, max]
+ double min;
+ double max;
+ // Values are scaled by this when converted to/from hash scale.
+ double scaling;
};
- /* Convert between various types and the GeoHash. We need additional information (scaling etc.)
- * to convert to/from GeoHash. The additional information doesn't change often and is the same
- * for all conversions, so we stick all the conversion methods here with their associated
- * data.
+ GeoHashConverter(const Parameters& params);
+
+ /**
+ * Returns hashing parameters parsed from a BSONObj
*/
- class GeoHashConverter {
- public:
- static double const kMachinePrecision; // = 1.1e-16
-
- struct Parameters {
- // How many bits to use for the hash?
- int bits;
- // X/Y values must be [min, max]
- double min;
- double max;
- // Values are scaled by this when converted to/from hash scale.
- double scaling;
- };
-
- GeoHashConverter(const Parameters &params);
-
- /**
- * Returns hashing parameters parsed from a BSONObj
- */
- static Status parseParameters(const BSONObj& paramDoc, Parameters* params);
-
- static double calcUnhashToBoxError(const GeoHashConverter::Parameters& params);
-
- /**
- * Return converter parameterss which can be used to
- * construct an copy of this converter.
- */
- const Parameters& getParams() const { return _params; }
-
- int getBits() const { return _params.bits; }
- double getError() const { return _error; }
- double getErrorSphere() const { return _errorSphere ;}
- double getMin() const { return _params.min; }
- double getMax() const { return _params.max; }
-
- double distanceBetweenHashes(const GeoHash& a, const GeoHash& b) const;
-
- /**
- * Hashing functions. Convert the following types to a GeoHash:
- * BSONElement
- * BSONObj
- * Point
- * double, double
- */
- GeoHash hash(const Point &p) const;
- GeoHash hash(const BSONElement& e) const;
- GeoHash hash(const BSONObj& o) const;
- // src is printed out as debugging information. I'm not sure if it's actually
- // somehow the 'source' of o? Anyway, this is nasty, very nasty. XXX
- GeoHash hash(const BSONObj& o, const BSONObj* src) const;
- GeoHash hash(double x, double y) const;
-
- /** Unhashing functions.
- * Convert from a hash to the following types:
- * double, double
- * Point
- * Box
- * BSONObj
- */
- // XXX: these should have consistent naming
- Point unhashToPoint(const GeoHash &h) const;
- Point unhashToPoint(const BSONElement &e) const;
- BSONObj unhashToBSONObj(const GeoHash& h) const;
- void unhash(const GeoHash &h, double *x, double *y) const;
-
- /**
- * Generates bounding box from geohash, expanded by the error bound
- */
- Box unhashToBoxCovering(const GeoHash &h) const;
-
- double sizeOfDiag(const GeoHash& a) const;
-
- // Return the sizeEdge of a cell at a given level.
- double sizeEdge(unsigned level) const;
-
- // Used by test.
- double convertDoubleFromHashScale(double in) const;
- double convertToDoubleHashScale(double in) const;
- private:
-
- void init();
-
- // Convert from an unsigned in [0, (max-min)*scaling] to [min, max]
- double convertFromHashScale(unsigned in) const;
-
- // Convert from a double that is [min, max] to an unsigned in [0, (max-min)*scaling]
- unsigned convertToHashScale(double in) const;
-
- Parameters _params;
- // We compute these based on the _params:
- double _error;
- double _errorSphere;
-
- // Error bound of unhashToBox, see hash_test.cpp for its proof.
- // 8 * max(|max|, |min|) * u
- double _errorUnhashToBox;
- };
+ static Status parseParameters(const BSONObj& paramDoc, Parameters* params);
+
+ static double calcUnhashToBoxError(const GeoHashConverter::Parameters& params);
+
+ /**
+ * Return converter parameterss which can be used to
+ * construct an copy of this converter.
+ */
+ const Parameters& getParams() const {
+ return _params;
+ }
+
+ int getBits() const {
+ return _params.bits;
+ }
+ double getError() const {
+ return _error;
+ }
+ double getErrorSphere() const {
+ return _errorSphere;
+ }
+ double getMin() const {
+ return _params.min;
+ }
+ double getMax() const {
+ return _params.max;
+ }
+
+ double distanceBetweenHashes(const GeoHash& a, const GeoHash& b) const;
+
+ /**
+ * Hashing functions. Convert the following types to a GeoHash:
+ * BSONElement
+ * BSONObj
+ * Point
+ * double, double
+ */
+ GeoHash hash(const Point& p) const;
+ GeoHash hash(const BSONElement& e) const;
+ GeoHash hash(const BSONObj& o) const;
+ // src is printed out as debugging information. I'm not sure if it's actually
+ // somehow the 'source' of o? Anyway, this is nasty, very nasty. XXX
+ GeoHash hash(const BSONObj& o, const BSONObj* src) const;
+ GeoHash hash(double x, double y) const;
+
+ /** Unhashing functions.
+ * Convert from a hash to the following types:
+ * double, double
+ * Point
+ * Box
+ * BSONObj
+ */
+ // XXX: these should have consistent naming
+ Point unhashToPoint(const GeoHash& h) const;
+ Point unhashToPoint(const BSONElement& e) const;
+ BSONObj unhashToBSONObj(const GeoHash& h) const;
+ void unhash(const GeoHash& h, double* x, double* y) const;
+
+ /**
+ * Generates bounding box from geohash, expanded by the error bound
+ */
+ Box unhashToBoxCovering(const GeoHash& h) const;
+
+ double sizeOfDiag(const GeoHash& a) const;
+
+ // Return the sizeEdge of a cell at a given level.
+ double sizeEdge(unsigned level) const;
+
+ // Used by test.
+ double convertDoubleFromHashScale(double in) const;
+ double convertToDoubleHashScale(double in) const;
+
+private:
+ void init();
+
+ // Convert from an unsigned in [0, (max-min)*scaling] to [min, max]
+ double convertFromHashScale(unsigned in) const;
+
+ // Convert from a double that is [min, max] to an unsigned in [0, (max-min)*scaling]
+ unsigned convertToHashScale(double in) const;
+
+ Parameters _params;
+ // We compute these based on the _params:
+ double _error;
+ double _errorSphere;
+
+ // Error bound of unhashToBox, see hash_test.cpp for its proof.
+ // 8 * max(|max|, |min|) * u
+ double _errorUnhashToBox;
+};
} // namespace mongo
diff --git a/src/mongo/db/geo/hash_test.cpp b/src/mongo/db/geo/hash_test.cpp
index 6b64e11ebe0..725c0254cc0 100644
--- a/src/mongo/db/geo/hash_test.cpp
+++ b/src/mongo/db/geo/hash_test.cpp
@@ -34,7 +34,7 @@
#include <sstream>
#include <iomanip>
#include <cmath>
-#include <algorithm> // For max()
+#include <algorithm> // For max()
#include "mongo/db/geo/hash.h"
#include "mongo/db/geo/shapes.h"
@@ -48,416 +48,418 @@ using std::string;
using std::stringstream;
namespace {
- TEST(GeoHash, MakeZeroHash) {
- unsigned x = 0, y = 0;
- GeoHash hash(x, y);
- }
+TEST(GeoHash, MakeZeroHash) {
+ unsigned x = 0, y = 0;
+ GeoHash hash(x, y);
+}
- static string makeRandomBitString(int length) {
- stringstream ss;
- mongo::PseudoRandom random(31337);
- for (int i = 0; i < length; ++i) {
- if (random.nextInt32() & 1) {
- ss << "1";
- } else {
- ss << "0";
- }
+static string makeRandomBitString(int length) {
+ stringstream ss;
+ mongo::PseudoRandom random(31337);
+ for (int i = 0; i < length; ++i) {
+ if (random.nextInt32() & 1) {
+ ss << "1";
+ } else {
+ ss << "0";
}
- return ss.str();
}
+ return ss.str();
+}
- TEST(GeoHash, MakeRandomValidHashes) {
- int maxStringLength = 64;
- for (int i = 0; i < maxStringLength; i += 2) {
- string a = makeRandomBitString(i);
- GeoHash hashA = GeoHash(a);
- (void)hashA.isBitSet(i, 0);
- (void)hashA.isBitSet(i, 1);
- }
+TEST(GeoHash, MakeRandomValidHashes) {
+ int maxStringLength = 64;
+ for (int i = 0; i < maxStringLength; i += 2) {
+ string a = makeRandomBitString(i);
+ GeoHash hashA = GeoHash(a);
+ (void)hashA.isBitSet(i, 0);
+ (void)hashA.isBitSet(i, 1);
}
+}
- // ASSERT_THROWS does not work if we try to put GeoHash(a) in the macro.
- static GeoHash makeHash(const string& a) { return GeoHash(a); }
+// ASSERT_THROWS does not work if we try to put GeoHash(a) in the macro.
+static GeoHash makeHash(const string& a) {
+ return GeoHash(a);
+}
- TEST(GeoHash, MakeTooLongHash) {
- string a = makeRandomBitString(100);
- ASSERT_THROWS(makeHash(a), mongo::UserException);
- }
+TEST(GeoHash, MakeTooLongHash) {
+ string a = makeRandomBitString(100);
+ ASSERT_THROWS(makeHash(a), mongo::UserException);
+}
- TEST(GeoHash, MakeOddHash) {
- string a = makeRandomBitString(13);
- ASSERT_THROWS(makeHash(a), mongo::UserException);
- }
+TEST(GeoHash, MakeOddHash) {
+ string a = makeRandomBitString(13);
+ ASSERT_THROWS(makeHash(a), mongo::UserException);
+}
+
+TEST(GeoHashConvertor, EdgeLength) {
+ const double kError = 10E-15;
+ GeoHashConverter::Parameters params;
+ params.max = 200.0;
+ params.min = 100.0;
+ params.bits = 32;
+ double numBuckets = (1024 * 1024 * 1024 * 4.0);
+ params.scaling = numBuckets / (params.max - params.min);
- TEST(GeoHashConvertor, EdgeLength) {
- const double kError = 10E-15;
- GeoHashConverter::Parameters params;
- params.max = 200.0;
- params.min = 100.0;
+ GeoHashConverter converter(params);
+
+ ASSERT_APPROX_EQUAL(100.0, converter.sizeEdge(0), kError);
+ ASSERT_APPROX_EQUAL(50.0, converter.sizeEdge(1), kError);
+ ASSERT_APPROX_EQUAL(25.0, converter.sizeEdge(2), kError);
+}
+
+/**
+ * ==========================
+ * Error Bound of UnhashToBox
+ * ==========================
+ *
+ * Compute the absolute error when unhashing a GeoHash to a box, so that expanding
+ * the box by this absolute error can guarantee a point is always contained by the box
+ * of its GeoHash. Thus, the absolute error of box should consist of 3 components:
+ *
+ * 1) The error introduced by hashing x to GeoHash. The extreme example would be a point
+ * close to the boundary of a cell is hashed to an adjacent box.
+ *
+ * For a hash/unhash functions h(x)/uh(x) and computed functions h'(x),uh'(x):
+ *
+ * x uh(h'(x))
+ * |--------|----|--------------------> min-max scale
+ * min \
+ * \
+ * \
+ * \
+ * |--------|--|-|--------------------> hash scale for cells c
+ * 0 h(x) c h'(x)
+ *
+ * 2) The error introduced by unhashing an (int) GeoHash to its lower left corner in x-y
+ * space.
+ *
+ * uh(c)
+ * x | uh'(c)
+ * |--------|--|----|-----------------> min-max scale
+ * min \ /
+ * \ /
+ * \ /
+ * X
+ * |--------|--|-|--------------------> hash scale for cells c
+ * 0 h(x) c h'(x)
+ *
+ * 3) The error introduced by adding the edge length to get the top-right corner of box.
+ * Instead of directly computing uh'(c+1), we add the computed box edge length to the computed
+ * value uh(c), giving us an extra error.
+ *
+ * |edge(min,max)|
+ * | |
+ * | uh(c)+edge
+ * uh(c) |
+ * |-------------|------[uh(c)+edge']-----------> min-max scale
+ * min
+ *
+ * |-------------|-------------|----------------> hash scale
+ * 0 c c+1
+ * Hash and unhash definitions
+ * -------------------------
+ * h(x) = (x - min) * scaling = 2^32 * (x - min) / (max - min)
+ * uh(h) = h / scaling + min,
+ * where
+ * scaling = 2^32 / (max - min)
+ *
+ * Again, h(x)/uh(x) are the exact hash functions and h'(x)/uh'(x) are the computational hash
+ * functions which have small rounding errors.
+ *
+ * | h'(x) - h(x) | == | delta_h(x; max, min) |
+ * where delta_fn = the absolute difference between the computed and actual value of a
+ * function.
+ *
+ * Restating the problem, we're looking for:
+ * |delta_box| = | delta_x_{h'(x)=H} + delta_uh(h) + delta_edge_length |
+ * <= | delta_x_{h'(x)=H} | + | delta_uh(h) | + | delta_edge_length |
+ *
+ * 1. Error bounds calculation
+ * ---------------------------
+ *
+ * 1.1 Error: | delta_x_{h'(x)=H} |
+ * --------------------------------
+ * The first error | delta_x_{h'(x)=H} | means, given GeoHash H, we can find
+ * the range of x and only the range of x that may be mapped to H.
+ * In other words, given H, for any x that is far enough from uh(H) by at least d,
+ * it is impossible for x to be mapped to H.
+ * Mathematical, find d, such that for any x satisfying |x - uh(H)| > d,
+ * |h(x) - H| >= | delta_h(x) |
+ * => |h(x) - H| - | delta_h(x) | >= 0
+ * => |h(x) - H + delta_h(x) | >= 0 (|a + b| >= |a| - |b|)
+ * => |h'(x) - H| >= 0 (h'(x) = h(x) + delta_h(x))
+ * which guarantees h'(x) != H.
+ *
+ *
+ * uh(H)-d
+ * |
+ * x | uh(H)
+ * |--------|---[----|----]-----------> min-max scale
+ * min / \ \ /
+ * / \ \ /
+ * / \ \ /
+ * / \ \ /
+ * |---[----|--|-]---|----------------> hash scale for cells c
+ * 0 h(x) | H
+ * h'(x)
+ * =h(x)+delta_h(x)
+ *
+ *
+ * Let's consider one case of the above inequality. We need to find the d,
+ * such that, when
+ * x < uh(H) - d, (1)
+ * we have
+ * h(x) + |delta_h(x)| <= H. (2)
+ *
+ * Due to the monotonicity of h(x), apply h(x) to both side of inequality (1),
+ * we have
+ * h(x) < h(uh(H) - d) <= H - |delta_h(x)| (from (2))
+ *
+ * By solving it, we have
+ * d = |delta_h(x)| / scaling
+ * <= 2Mu * (1 + |x-min|/|max-min|) (see calculation for |delta_h(x)| below)
+ * <= 4Mu
+ *
+ * | delta_x_{h'(x)=H} | <= d <= 4Mu
+ * The similar calculation applies for the other side of the above inequality.
+ *
+ * 1.2 Error of h(x)
+ * -----------------
+ *
+ * Rules of error propagation
+ * --------------------------
+ * Absolute error of x is |delta_x|
+ * Relative error of x is epsilon_x = |delta_x| / |x|
+ * For any double number x, the relative error of x is bounded by "u". We assume all inputs
+ * have this error to make deduction clear.
+ * epsilon_x <= u = 0.5 * unit of least precision(ULP) ~= 1.1 * 10E-16
+ *
+ * |delta_(x + y)| <= |delta_x| + |delta_y|
+ * |delta_(x - y)| <= |delta_x| + |delta_y|
+ * epsilon_(x * y) <= epsilon_x + epsilon_y
+ * epsilon_(x / y) <= epsilon_x + epsilon_y
+ *
+ * For a given min, max scale, the maximum delta in a computation is bounded by the maximum
+ * value in the scale - M * u = max(|max|, |min|) * u.
+ *
+ * For the hash function h(x)
+ * --------------------------
+ *
+ * epsilon_h(x) = epsilon_(x-min) + epsilon_scaling
+ *
+ * epsilon_(x-min) = (|delta_x| + |delta_min|) / |x - min|
+ * <= 2Mu / |x - min|
+ *
+ * epsilon_scaling = epsilon_(2^32) + epsilon_(max - min)
+ * = 0 + epsilon_(max - min)
+ * <= 2Mu / |max - min|
+ *
+ * Hence, epsilon_h(x) <= 2Mu * (1/|x - min| + 1/|max - min|)
+ *
+ * |delta_h(x)| = 2Mu * (1 + |x-min|/|max-min|) * 2^32 / |max - min|
+ * <= 4Mu * 2^32 / |max-min|
+ *
+ * 2. Error: unhashing GeoHash to point
+ * ------------------------------------
+ * Similarly, we can calculate the error for uh(h) function, assuming h is exactly
+ * represented in form of GeoHash, since integer is represented exactly.
+ *
+ * |delta_uh(h)| = epsilon_(h/scaling) * |h/scaling| + delta_min
+ * = epsilon_(scaling) * |h/scaling| + delta_min
+ * <= 2Mu / |max-min| * |max-min| + |min| * u
+ * <= 3Mu
+ *
+ * Thus, the second error |delta_uh(h)| <= 3Mu
+ * Totally, the absolute error we need to add to unhashing to a point <= 4Mu + 3Mu = 7Mu
+ *
+ * 3. Error: edge length
+ * ---------------------
+ * The third part is easy to compute, since ldexp() doesn't introduce extra
+ * relative error.
+ *
+ * edge_length = ldexp(max - min, -level)
+ *
+ * epsilon_edge = epsilon_(max - min) <= 2 * M * u / |max - min|
+ *
+ * | delta_edge | = epsilon_edge * (max - min) * 2^(-level)
+ * = 2Mu * 2^(-level) <= Mu (level >= 1)
+ *
+ * This error is neglectable when level >> 0.
+ *
+ * In conclusion, | delta_box | <= 8Mu
+ *
+ *
+ * Test
+ * ====
+ * This first two component errors can be simulated by uh'(h'(x)).
+ * Let h = h'(x)
+ * |delta_(uh'(h'(x)))|
+ * = epsilon_(h/scaling) * |h/scaling| + delta_min
+ * = (epsilon_(h) + epsilon_(scaling)) * |h/scaling| + delta_min
+ * = epsilon_(h) * h/scaling + epsilon_(scaling) * |h/scaling| + delta_min
+ * = |delta_h|/scaling + |delta_uh(h)|
+ * ~= |delta_box| when level = 32
+ *
+ * Another way to think about it is the error of uh'(h'(x)) also consists of
+ * the same two components that constitute the error of unhashing to a point,
+ * by substituting c with h'(x).
+ *
+ * | delta_(uh'(h'(x))) | = | x - uh'(h(x)) |
+ *
+ * uh(h'(x))
+ * |
+ * x | uh'(h(x))
+ * |--------|---|---|----------------> min-max scale
+ * min \ /
+ * \ /
+ * \ /
+ * |--------|---|--------------------> hash scale for cells c
+ * 0 h(x) h'(x)
+ *
+ *
+ * We can get the maximum of the error by making max very large and min = -min, x -> max
+ */
+TEST(GeoHashConverter, UnhashToBoxError) {
+ GeoHashConverter::Parameters params;
+ // Test max from 2^-20 to 2^20
+ for (int times = -20; times <= 20; times += 2) {
+ // Construct parameters
+ params.max = ldexp(1 + 0.01 * times, times);
+ params.min = -params.max;
params.bits = 32;
double numBuckets = (1024 * 1024 * 1024 * 4.0);
params.scaling = numBuckets / (params.max - params.min);
GeoHashConverter converter(params);
+ // Assume level == 32, so we ignore the error of edge length here.
+ double delta_box = 7.0 / 8.0 * GeoHashConverter::calcUnhashToBoxError(params);
+ double cellEdge = 1 / params.scaling;
+ double x;
- ASSERT_APPROX_EQUAL(100.0, converter.sizeEdge(0), kError);
- ASSERT_APPROX_EQUAL(50.0, converter.sizeEdge(1), kError);
- ASSERT_APPROX_EQUAL(25.0, converter.sizeEdge(2), kError);
- }
-
- /**
- * ==========================
- * Error Bound of UnhashToBox
- * ==========================
- *
- * Compute the absolute error when unhashing a GeoHash to a box, so that expanding
- * the box by this absolute error can guarantee a point is always contained by the box
- * of its GeoHash. Thus, the absolute error of box should consist of 3 components:
- *
- * 1) The error introduced by hashing x to GeoHash. The extreme example would be a point
- * close to the boundary of a cell is hashed to an adjacent box.
- *
- * For a hash/unhash functions h(x)/uh(x) and computed functions h'(x),uh'(x):
- *
- * x uh(h'(x))
- * |--------|----|--------------------> min-max scale
- * min \
- * \
- * \
- * \
- * |--------|--|-|--------------------> hash scale for cells c
- * 0 h(x) c h'(x)
- *
- * 2) The error introduced by unhashing an (int) GeoHash to its lower left corner in x-y
- * space.
- *
- * uh(c)
- * x | uh'(c)
- * |--------|--|----|-----------------> min-max scale
- * min \ /
- * \ /
- * \ /
- * X
- * |--------|--|-|--------------------> hash scale for cells c
- * 0 h(x) c h'(x)
- *
- * 3) The error introduced by adding the edge length to get the top-right corner of box.
- * Instead of directly computing uh'(c+1), we add the computed box edge length to the computed
- * value uh(c), giving us an extra error.
- *
- * |edge(min,max)|
- * | |
- * | uh(c)+edge
- * uh(c) |
- * |-------------|------[uh(c)+edge']-----------> min-max scale
- * min
- *
- * |-------------|-------------|----------------> hash scale
- * 0 c c+1
- * Hash and unhash definitions
- * -------------------------
- * h(x) = (x - min) * scaling = 2^32 * (x - min) / (max - min)
- * uh(h) = h / scaling + min,
- * where
- * scaling = 2^32 / (max - min)
- *
- * Again, h(x)/uh(x) are the exact hash functions and h'(x)/uh'(x) are the computational hash
- * functions which have small rounding errors.
- *
- * | h'(x) - h(x) | == | delta_h(x; max, min) |
- * where delta_fn = the absolute difference between the computed and actual value of a
- * function.
- *
- * Restating the problem, we're looking for:
- * |delta_box| = | delta_x_{h'(x)=H} + delta_uh(h) + delta_edge_length |
- * <= | delta_x_{h'(x)=H} | + | delta_uh(h) | + | delta_edge_length |
- *
- * 1. Error bounds calculation
- * ---------------------------
- *
- * 1.1 Error: | delta_x_{h'(x)=H} |
- * --------------------------------
- * The first error | delta_x_{h'(x)=H} | means, given GeoHash H, we can find
- * the range of x and only the range of x that may be mapped to H.
- * In other words, given H, for any x that is far enough from uh(H) by at least d,
- * it is impossible for x to be mapped to H.
- * Mathematical, find d, such that for any x satisfying |x - uh(H)| > d,
- * |h(x) - H| >= | delta_h(x) |
- * => |h(x) - H| - | delta_h(x) | >= 0
- * => |h(x) - H + delta_h(x) | >= 0 (|a + b| >= |a| - |b|)
- * => |h'(x) - H| >= 0 (h'(x) = h(x) + delta_h(x))
- * which guarantees h'(x) != H.
- *
- *
- * uh(H)-d
- * |
- * x | uh(H)
- * |--------|---[----|----]-----------> min-max scale
- * min / \ \ /
- * / \ \ /
- * / \ \ /
- * / \ \ /
- * |---[----|--|-]---|----------------> hash scale for cells c
- * 0 h(x) | H
- * h'(x)
- * =h(x)+delta_h(x)
- *
- *
- * Let's consider one case of the above inequality. We need to find the d,
- * such that, when
- * x < uh(H) - d, (1)
- * we have
- * h(x) + |delta_h(x)| <= H. (2)
- *
- * Due to the monotonicity of h(x), apply h(x) to both side of inequality (1),
- * we have
- * h(x) < h(uh(H) - d) <= H - |delta_h(x)| (from (2))
- *
- * By solving it, we have
- * d = |delta_h(x)| / scaling
- * <= 2Mu * (1 + |x-min|/|max-min|) (see calculation for |delta_h(x)| below)
- * <= 4Mu
- *
- * | delta_x_{h'(x)=H} | <= d <= 4Mu
- * The similar calculation applies for the other side of the above inequality.
- *
- * 1.2 Error of h(x)
- * -----------------
- *
- * Rules of error propagation
- * --------------------------
- * Absolute error of x is |delta_x|
- * Relative error of x is epsilon_x = |delta_x| / |x|
- * For any double number x, the relative error of x is bounded by "u". We assume all inputs
- * have this error to make deduction clear.
- * epsilon_x <= u = 0.5 * unit of least precision(ULP) ~= 1.1 * 10E-16
- *
- * |delta_(x + y)| <= |delta_x| + |delta_y|
- * |delta_(x - y)| <= |delta_x| + |delta_y|
- * epsilon_(x * y) <= epsilon_x + epsilon_y
- * epsilon_(x / y) <= epsilon_x + epsilon_y
- *
- * For a given min, max scale, the maximum delta in a computation is bounded by the maximum
- * value in the scale - M * u = max(|max|, |min|) * u.
- *
- * For the hash function h(x)
- * --------------------------
- *
- * epsilon_h(x) = epsilon_(x-min) + epsilon_scaling
- *
- * epsilon_(x-min) = (|delta_x| + |delta_min|) / |x - min|
- * <= 2Mu / |x - min|
- *
- * epsilon_scaling = epsilon_(2^32) + epsilon_(max - min)
- * = 0 + epsilon_(max - min)
- * <= 2Mu / |max - min|
- *
- * Hence, epsilon_h(x) <= 2Mu * (1/|x - min| + 1/|max - min|)
- *
- * |delta_h(x)| = 2Mu * (1 + |x-min|/|max-min|) * 2^32 / |max - min|
- * <= 4Mu * 2^32 / |max-min|
- *
- * 2. Error: unhashing GeoHash to point
- * ------------------------------------
- * Similarly, we can calculate the error for uh(h) function, assuming h is exactly
- * represented in form of GeoHash, since integer is represented exactly.
- *
- * |delta_uh(h)| = epsilon_(h/scaling) * |h/scaling| + delta_min
- * = epsilon_(scaling) * |h/scaling| + delta_min
- * <= 2Mu / |max-min| * |max-min| + |min| * u
- * <= 3Mu
- *
- * Thus, the second error |delta_uh(h)| <= 3Mu
- * Totally, the absolute error we need to add to unhashing to a point <= 4Mu + 3Mu = 7Mu
- *
- * 3. Error: edge length
- * ---------------------
- * The third part is easy to compute, since ldexp() doesn't introduce extra
- * relative error.
- *
- * edge_length = ldexp(max - min, -level)
- *
- * epsilon_edge = epsilon_(max - min) <= 2 * M * u / |max - min|
- *
- * | delta_edge | = epsilon_edge * (max - min) * 2^(-level)
- * = 2Mu * 2^(-level) <= Mu (level >= 1)
- *
- * This error is neglectable when level >> 0.
- *
- * In conclusion, | delta_box | <= 8Mu
- *
- *
- * Test
- * ====
- * This first two component errors can be simulated by uh'(h'(x)).
- * Let h = h'(x)
- * |delta_(uh'(h'(x)))|
- * = epsilon_(h/scaling) * |h/scaling| + delta_min
- * = (epsilon_(h) + epsilon_(scaling)) * |h/scaling| + delta_min
- * = epsilon_(h) * h/scaling + epsilon_(scaling) * |h/scaling| + delta_min
- * = |delta_h|/scaling + |delta_uh(h)|
- * ~= |delta_box| when level = 32
- *
- * Another way to think about it is the error of uh'(h'(x)) also consists of
- * the same two components that constitute the error of unhashing to a point,
- * by substituting c with h'(x).
- *
- * | delta_(uh'(h'(x))) | = | x - uh'(h(x)) |
- *
- * uh(h'(x))
- * |
- * x | uh'(h(x))
- * |--------|---|---|----------------> min-max scale
- * min \ /
- * \ /
- * \ /
- * |--------|---|--------------------> hash scale for cells c
- * 0 h(x) h'(x)
- *
- *
- * We can get the maximum of the error by making max very large and min = -min, x -> max
- */
- TEST(GeoHashConverter, UnhashToBoxError) {
- GeoHashConverter::Parameters params;
- // Test max from 2^-20 to 2^20
- for (int times = -20; times <= 20; times += 2) {
- // Construct parameters
- params.max = ldexp(1 + 0.01 * times, times);
- params.min = -params.max;
- params.bits = 32;
- double numBuckets = (1024 * 1024 * 1024 * 4.0);
- params.scaling = numBuckets / (params.max - params.min);
-
- GeoHashConverter converter(params);
- // Assume level == 32, so we ignore the error of edge length here.
- double delta_box = 7.0 / 8.0 * GeoHashConverter::calcUnhashToBoxError(params);
- double cellEdge = 1 / params.scaling;
- double x;
-
- // We are not able to test all the FP numbers to verify the error bound by design,
- // so we consider the numbers in the cell near the point we are interested in.
- //
- // FP numbers starting at max, working downward in minimal increments
- x = params.max;
- while (x > params.max - cellEdge) {
- x = nextafter(x, params.min);
- double x_prime = converter.convertDoubleFromHashScale(
- converter.convertToDoubleHashScale(x));
- double delta = fabs(x - x_prime);
- ASSERT_LESS_THAN(delta, delta_box);
- }
+ // We are not able to test all the FP numbers to verify the error bound by design,
+ // so we consider the numbers in the cell near the point we are interested in.
+ //
+ // FP numbers starting at max, working downward in minimal increments
+ x = params.max;
+ while (x > params.max - cellEdge) {
+ x = nextafter(x, params.min);
+ double x_prime =
+ converter.convertDoubleFromHashScale(converter.convertToDoubleHashScale(x));
+ double delta = fabs(x - x_prime);
+ ASSERT_LESS_THAN(delta, delta_box);
+ }
- // FP numbers starting between first and second cell, working downward to min
- x = params.min + cellEdge;
- while (x > params.min) {
- x = nextafter(x, params.min);
- double x_prime = converter.convertDoubleFromHashScale(
- converter.convertToDoubleHashScale(x));
- double delta = fabs(x - x_prime);
- ASSERT_LESS_THAN(delta, delta_box);
- }
+ // FP numbers starting between first and second cell, working downward to min
+ x = params.min + cellEdge;
+ while (x > params.min) {
+ x = nextafter(x, params.min);
+ double x_prime =
+ converter.convertDoubleFromHashScale(converter.convertToDoubleHashScale(x));
+ double delta = fabs(x - x_prime);
+ ASSERT_LESS_THAN(delta, delta_box);
}
}
+}
- // SERVER-15576 Verify a point is contained by its GeoHash box.
- TEST(GeoHashConverter, GeoHashBox) {
- GeoHashConverter::Parameters params;
- params.max = 100000000.3;
- params.min = -params.max;
- params.bits = 32;
- double numBuckets = (1024 * 1024 * 1024 * 4.0);
- params.scaling = numBuckets / (params.max - params.min);
+// SERVER-15576 Verify a point is contained by its GeoHash box.
+TEST(GeoHashConverter, GeoHashBox) {
+ GeoHashConverter::Parameters params;
+ params.max = 100000000.3;
+ params.min = -params.max;
+ params.bits = 32;
+ double numBuckets = (1024 * 1024 * 1024 * 4.0);
+ params.scaling = numBuckets / (params.max - params.min);
- GeoHashConverter converter(params);
+ GeoHashConverter converter(params);
- // Without expanding the box, the following point is not contained by its GeoHash box.
- mongo::Point p(-7201198.6497758823, -0.1);
- mongo::GeoHash hash = converter.hash(p);
- mongo::Box box = converter.unhashToBoxCovering(hash);
- ASSERT(box.inside(p));
- }
+ // Without expanding the box, the following point is not contained by its GeoHash box.
+ mongo::Point p(-7201198.6497758823, -0.1);
+ mongo::GeoHash hash = converter.hash(p);
+ mongo::Box box = converter.unhashToBoxCovering(hash);
+ ASSERT(box.inside(p));
+}
- TEST(GeoHash, NeighborsBasic) {
- vector<GeoHash> neighbors;
+TEST(GeoHash, NeighborsBasic) {
+ vector<GeoHash> neighbors;
- // Top level
- GeoHash hashAtLevel3("100001");
- hashAtLevel3.appendVertexNeighbors(0u, &neighbors);
- ASSERT_EQUALS(neighbors.size(), (size_t)1);
- ASSERT_EQUALS(neighbors.front(), GeoHash(""));
+ // Top level
+ GeoHash hashAtLevel3("100001");
+ hashAtLevel3.appendVertexNeighbors(0u, &neighbors);
+ ASSERT_EQUALS(neighbors.size(), (size_t)1);
+ ASSERT_EQUALS(neighbors.front(), GeoHash(""));
- // Level 1
- neighbors.clear();
- hashAtLevel3.appendVertexNeighbors(1u, &neighbors);
- ASSERT_EQUALS(neighbors.size(), (size_t)2);
- std::sort(neighbors.begin(), neighbors.end());
- ASSERT_EQUALS(neighbors[0], GeoHash("00"));
- ASSERT_EQUALS(neighbors[1], GeoHash("10"));
+ // Level 1
+ neighbors.clear();
+ hashAtLevel3.appendVertexNeighbors(1u, &neighbors);
+ ASSERT_EQUALS(neighbors.size(), (size_t)2);
+ std::sort(neighbors.begin(), neighbors.end());
+ ASSERT_EQUALS(neighbors[0], GeoHash("00"));
+ ASSERT_EQUALS(neighbors[1], GeoHash("10"));
- // Level 2
- neighbors.clear();
- hashAtLevel3.appendVertexNeighbors(2u, &neighbors);
- ASSERT_EQUALS(neighbors.size(), (size_t)4);
- std::sort(neighbors.begin(), neighbors.end());
- ASSERT_EQUALS(neighbors[0], GeoHash("0010"));
- ASSERT_EQUALS(neighbors[1], GeoHash("0011"));
- ASSERT_EQUALS(neighbors[2], GeoHash("1000"));
- ASSERT_EQUALS(neighbors[3], GeoHash("1001"));
- }
+ // Level 2
+ neighbors.clear();
+ hashAtLevel3.appendVertexNeighbors(2u, &neighbors);
+ ASSERT_EQUALS(neighbors.size(), (size_t)4);
+ std::sort(neighbors.begin(), neighbors.end());
+ ASSERT_EQUALS(neighbors[0], GeoHash("0010"));
+ ASSERT_EQUALS(neighbors[1], GeoHash("0011"));
+ ASSERT_EQUALS(neighbors[2], GeoHash("1000"));
+ ASSERT_EQUALS(neighbors[3], GeoHash("1001"));
+}
- TEST(GeoHash, NeighborsAtFinestLevel) {
- std::vector<GeoHash> neighbors;
+TEST(GeoHash, NeighborsAtFinestLevel) {
+ std::vector<GeoHash> neighbors;
- std::string zeroBase = "00000000000000000000000000000000000000000000000000000000";
- // At finest level
- GeoHash cellHash(zeroBase + "00011110");
- neighbors.clear();
- cellHash.appendVertexNeighbors(31u, &neighbors);
- ASSERT_EQUALS(neighbors.size(), (size_t)4);
- std::sort(neighbors.begin(), neighbors.end());
- ASSERT_EQUALS(neighbors[0], GeoHash(zeroBase + "000110"));
- ASSERT_EQUALS(neighbors[1], GeoHash(zeroBase + "000111"));
- ASSERT_EQUALS(neighbors[2], GeoHash(zeroBase + "001100"));
- ASSERT_EQUALS(neighbors[3], GeoHash(zeroBase + "001101"));
+ std::string zeroBase = "00000000000000000000000000000000000000000000000000000000";
+ // At finest level
+ GeoHash cellHash(zeroBase + "00011110");
+ neighbors.clear();
+ cellHash.appendVertexNeighbors(31u, &neighbors);
+ ASSERT_EQUALS(neighbors.size(), (size_t)4);
+ std::sort(neighbors.begin(), neighbors.end());
+ ASSERT_EQUALS(neighbors[0], GeoHash(zeroBase + "000110"));
+ ASSERT_EQUALS(neighbors[1], GeoHash(zeroBase + "000111"));
+ ASSERT_EQUALS(neighbors[2], GeoHash(zeroBase + "001100"));
+ ASSERT_EQUALS(neighbors[3], GeoHash(zeroBase + "001101"));
- // Level 30
- neighbors.clear();
- cellHash.appendVertexNeighbors(30u, &neighbors);
- ASSERT_EQUALS(neighbors.size(), (size_t)4);
- std::sort(neighbors.begin(), neighbors.end());
- ASSERT_EQUALS(neighbors[0], GeoHash(zeroBase + "0001"));
- ASSERT_EQUALS(neighbors[1], GeoHash(zeroBase + "0011"));
- ASSERT_EQUALS(neighbors[2], GeoHash(zeroBase + "0100"));
- ASSERT_EQUALS(neighbors[3], GeoHash(zeroBase + "0110"));
+ // Level 30
+ neighbors.clear();
+ cellHash.appendVertexNeighbors(30u, &neighbors);
+ ASSERT_EQUALS(neighbors.size(), (size_t)4);
+ std::sort(neighbors.begin(), neighbors.end());
+ ASSERT_EQUALS(neighbors[0], GeoHash(zeroBase + "0001"));
+ ASSERT_EQUALS(neighbors[1], GeoHash(zeroBase + "0011"));
+ ASSERT_EQUALS(neighbors[2], GeoHash(zeroBase + "0100"));
+ ASSERT_EQUALS(neighbors[3], GeoHash(zeroBase + "0110"));
- // Level 29, only two neighbors including the parent.
- // ^
- // |
- // +-+
- // +-+
- // +-+-------> x
- neighbors.clear();
- cellHash.appendVertexNeighbors(29u, &neighbors);
- ASSERT_EQUALS(neighbors.size(), (size_t)2);
- std::sort(neighbors.begin(), neighbors.end());
- ASSERT_EQUALS(neighbors[0], GeoHash(zeroBase + "00"));
- ASSERT_EQUALS(neighbors[1], GeoHash(zeroBase + "01"));
+ // Level 29, only two neighbors including the parent.
+ // ^
+ // |
+ // +-+
+ // +-+
+ // +-+-------> x
+ neighbors.clear();
+ cellHash.appendVertexNeighbors(29u, &neighbors);
+ ASSERT_EQUALS(neighbors.size(), (size_t)2);
+ std::sort(neighbors.begin(), neighbors.end());
+ ASSERT_EQUALS(neighbors[0], GeoHash(zeroBase + "00"));
+ ASSERT_EQUALS(neighbors[1], GeoHash(zeroBase + "01"));
- // Level 28, only one neighbor (the parent) at the left bottom corner.
- // ^
- // |
- // +---+
- // | |
- // +---+-----> x
- neighbors.clear();
- cellHash.appendVertexNeighbors(28u, &neighbors);
- ASSERT_EQUALS(neighbors.size(), (size_t)1);
- ASSERT_EQUALS(neighbors[0], GeoHash(zeroBase));
+ // Level 28, only one neighbor (the parent) at the left bottom corner.
+ // ^
+ // |
+ // +---+
+ // | |
+ // +---+-----> x
+ neighbors.clear();
+ cellHash.appendVertexNeighbors(28u, &neighbors);
+ ASSERT_EQUALS(neighbors.size(), (size_t)1);
+ ASSERT_EQUALS(neighbors[0], GeoHash(zeroBase));
- // Level 1
- neighbors.clear();
- cellHash.appendVertexNeighbors(1u, &neighbors);
- ASSERT_EQUALS(neighbors.size(), (size_t)1);
- ASSERT_EQUALS(neighbors[0], GeoHash("00"));
- }
+ // Level 1
+ neighbors.clear();
+ cellHash.appendVertexNeighbors(1u, &neighbors);
+ ASSERT_EQUALS(neighbors.size(), (size_t)1);
+ ASSERT_EQUALS(neighbors[0], GeoHash("00"));
+}
}
diff --git a/src/mongo/db/geo/haystack.cpp b/src/mongo/db/geo/haystack.cpp
index 531595d8719..4c294aba9e9 100644
--- a/src/mongo/db/geo/haystack.cpp
+++ b/src/mongo/db/geo/haystack.cpp
@@ -54,71 +54,82 @@
*/
namespace mongo {
- using std::string;
- using std::vector;
-
- class GeoHaystackSearchCommand : public Command {
- public:
- GeoHaystackSearchCommand() : Command("geoSearch") {}
-
- virtual bool isWriteCommandForConfigServer() const { return false; }
- bool slaveOk() const { return true; }
- bool slaveOverrideOk() const { return true; }
-
- virtual void addRequiredPrivileges(const std::string& dbname,
- const BSONObj& cmdObj,
- std::vector<Privilege>* out) {
- ActionSet actions;
- actions.addAction(ActionType::find);
- out->push_back(Privilege(parseResourcePattern(dbname, cmdObj), actions));
+using std::string;
+using std::vector;
+
+class GeoHaystackSearchCommand : public Command {
+public:
+ GeoHaystackSearchCommand() : Command("geoSearch") {}
+
+ virtual bool isWriteCommandForConfigServer() const {
+ return false;
+ }
+ bool slaveOk() const {
+ return true;
+ }
+ bool slaveOverrideOk() const {
+ return true;
+ }
+
+ virtual void addRequiredPrivileges(const std::string& dbname,
+ const BSONObj& cmdObj,
+ std::vector<Privilege>* out) {
+ ActionSet actions;
+ actions.addAction(ActionType::find);
+ out->push_back(Privilege(parseResourcePattern(dbname, cmdObj), actions));
+ }
+
+ bool run(OperationContext* txn,
+ const string& dbname,
+ BSONObj& cmdObj,
+ int,
+ string& errmsg,
+ BSONObjBuilder& result) {
+ const std::string ns = parseNsCollectionRequired(dbname, cmdObj);
+
+ AutoGetCollectionForRead ctx(txn, ns);
+
+ Collection* collection = ctx.getCollection();
+ if (!collection) {
+ errmsg = "can't find ns";
+ return false;
}
- bool run(OperationContext* txn,
- const string& dbname,
- BSONObj& cmdObj,
- int,
- string& errmsg,
- BSONObjBuilder& result) {
- const std::string ns = parseNsCollectionRequired(dbname, cmdObj);
-
- AutoGetCollectionForRead ctx(txn, ns);
-
- Collection* collection = ctx.getCollection();
- if ( !collection ) {
- errmsg = "can't find ns";
- return false;
- }
-
- vector<IndexDescriptor*> idxs;
- collection->getIndexCatalog()->findIndexByType(txn, IndexNames::GEO_HAYSTACK, idxs);
- if (idxs.size() == 0) {
- errmsg = "no geoSearch index";
- return false;
- }
- if (idxs.size() > 1) {
- errmsg = "more than 1 geosearch index";
- return false;
- }
-
- BSONElement nearElt = cmdObj["near"];
- BSONElement maxDistance = cmdObj["maxDistance"];
- BSONElement search = cmdObj["search"];
-
- uassert(13318, "near needs to be an array", nearElt.isABSONObj());
- uassert(13319, "maxDistance needs a number", maxDistance.isNumber());
- uassert(13320, "search needs to be an object", search.type() == Object);
-
- unsigned limit = 50;
- if (cmdObj["limit"].isNumber())
- limit = static_cast<unsigned>(cmdObj["limit"].numberInt());
-
- IndexDescriptor* desc = idxs[0];
- HaystackAccessMethod* ham =
- static_cast<HaystackAccessMethod*>( collection->getIndexCatalog()->getIndex(desc) );
- ham->searchCommand(txn, collection, nearElt.Obj(), maxDistance.numberDouble(), search.Obj(),
- &result, limit);
- return 1;
+ vector<IndexDescriptor*> idxs;
+ collection->getIndexCatalog()->findIndexByType(txn, IndexNames::GEO_HAYSTACK, idxs);
+ if (idxs.size() == 0) {
+ errmsg = "no geoSearch index";
+ return false;
}
- } nameSearchCommand;
+ if (idxs.size() > 1) {
+ errmsg = "more than 1 geosearch index";
+ return false;
+ }
+
+ BSONElement nearElt = cmdObj["near"];
+ BSONElement maxDistance = cmdObj["maxDistance"];
+ BSONElement search = cmdObj["search"];
+
+ uassert(13318, "near needs to be an array", nearElt.isABSONObj());
+ uassert(13319, "maxDistance needs a number", maxDistance.isNumber());
+ uassert(13320, "search needs to be an object", search.type() == Object);
+
+ unsigned limit = 50;
+ if (cmdObj["limit"].isNumber())
+ limit = static_cast<unsigned>(cmdObj["limit"].numberInt());
+
+ IndexDescriptor* desc = idxs[0];
+ HaystackAccessMethod* ham =
+ static_cast<HaystackAccessMethod*>(collection->getIndexCatalog()->getIndex(desc));
+ ham->searchCommand(txn,
+ collection,
+ nearElt.Obj(),
+ maxDistance.numberDouble(),
+ search.Obj(),
+ &result,
+ limit);
+ return 1;
+ }
+} nameSearchCommand;
} // namespace mongo
diff --git a/src/mongo/db/geo/r2_region_coverer.cpp b/src/mongo/db/geo/r2_region_coverer.cpp
index b43db9665ad..3e49ab97099 100644
--- a/src/mongo/db/geo/r2_region_coverer.cpp
+++ b/src/mongo/db/geo/r2_region_coverer.cpp
@@ -36,250 +36,246 @@
namespace mongo {
- // Definition
- int const R2RegionCoverer::kDefaultMaxCells = 8;
-
- // Doesn't take ownership of "hashConverter". The caller should guarantee its life cycle
- // is longer than this coverer.
- R2RegionCoverer::R2RegionCoverer( GeoHashConverter* hashConverter ) :
- _hashConverter( hashConverter ),
- _minLevel( 0u ),
- _maxLevel( GeoHash::kMaxBits ),
- _maxCells( kDefaultMaxCells ),
- _region( NULL ),
- _candidateQueue( new CandidateQueue ),
- _results( new vector<GeoHash> )
- {
- }
-
- // Need to declare explicitly because of scoped pointers.
- R2RegionCoverer::~R2RegionCoverer() { }
-
- void R2RegionCoverer::setMinLevel( unsigned int minLevel ) {
- dassert(minLevel >= 0);
- dassert(minLevel <= GeoHash::kMaxBits);
- _minLevel = max(0u, min(GeoHash::kMaxBits, minLevel));
- }
-
- void R2RegionCoverer::setMaxLevel( unsigned int maxLevel ) {
- dassert(maxLevel >= 0);
- dassert(maxLevel <= GeoHash::kMaxBits);
- _maxLevel = max(0u, min(GeoHash::kMaxBits, maxLevel));
- }
-
- void R2RegionCoverer::setMaxCells( int maxCells ) {
- _maxCells = maxCells;
- }
-
- void R2RegionCoverer::getCovering( const R2Region& region, vector<GeoHash>* cover ) {
- // Strategy: Start with the full plane. Discard any
- // that do not intersect the shape. Then repeatedly choose the
- // largest cell that intersects the shape and subdivide it.
- //
- // _result contains the cells that will be part of the output, while the
- // queue contains cells that we may still subdivide further. Cells that
- // are entirely contained within the region are immediately added to the
- // output, while cells that do not intersect the region are immediately
- // discarded. Therefore the queue only contains cells that partially
- // intersect the region. Candidates are prioritized first according to
- // cell size (larger cells first), then by the number of intersecting
- // children they have (fewest children first), and then by the number of
- // fully contained children (fewest children first).
-
- verify(_minLevel <= _maxLevel);
- dassert(_candidateQueue->empty());
- dassert(_results->empty());
- _region = &region;
-
- getInitialCandidates();
-
- while(!_candidateQueue->empty()) {
- Candidate* candidate = _candidateQueue->top().second; // Owned
- _candidateQueue->pop();
- LOG(3) << "Pop: " << candidate->cell;
-
- // Try to expand this cell into its children
- if (candidate->cell.getBits() < _minLevel ||
- candidate->numChildren == 1 ||
- (int)_results->size() +
- (int)_candidateQueue->size() +
- candidate->numChildren <= _maxCells) {
-
- for (int i = 0; i < candidate->numChildren; i++) {
- addCandidate(candidate->children[i]);
- }
- deleteCandidate(candidate, false);
- } else {
- // Reached max cells. Move all candidates from the queue into results.
- candidate->isTerminal = true;
- addCandidate(candidate);
+// Definition
+int const R2RegionCoverer::kDefaultMaxCells = 8;
+
+// Doesn't take ownership of "hashConverter". The caller should guarantee its life cycle
+// is longer than this coverer.
+R2RegionCoverer::R2RegionCoverer(GeoHashConverter* hashConverter)
+ : _hashConverter(hashConverter),
+ _minLevel(0u),
+ _maxLevel(GeoHash::kMaxBits),
+ _maxCells(kDefaultMaxCells),
+ _region(NULL),
+ _candidateQueue(new CandidateQueue),
+ _results(new vector<GeoHash>) {}
+
+// Need to declare explicitly because of scoped pointers.
+R2RegionCoverer::~R2RegionCoverer() {}
+
+void R2RegionCoverer::setMinLevel(unsigned int minLevel) {
+ dassert(minLevel >= 0);
+ dassert(minLevel <= GeoHash::kMaxBits);
+ _minLevel = max(0u, min(GeoHash::kMaxBits, minLevel));
+}
+
+void R2RegionCoverer::setMaxLevel(unsigned int maxLevel) {
+ dassert(maxLevel >= 0);
+ dassert(maxLevel <= GeoHash::kMaxBits);
+ _maxLevel = max(0u, min(GeoHash::kMaxBits, maxLevel));
+}
+
+void R2RegionCoverer::setMaxCells(int maxCells) {
+ _maxCells = maxCells;
+}
+
+void R2RegionCoverer::getCovering(const R2Region& region, vector<GeoHash>* cover) {
+ // Strategy: Start with the full plane. Discard any
+ // that do not intersect the shape. Then repeatedly choose the
+ // largest cell that intersects the shape and subdivide it.
+ //
+ // _result contains the cells that will be part of the output, while the
+ // queue contains cells that we may still subdivide further. Cells that
+ // are entirely contained within the region are immediately added to the
+ // output, while cells that do not intersect the region are immediately
+ // discarded. Therefore the queue only contains cells that partially
+ // intersect the region. Candidates are prioritized first according to
+ // cell size (larger cells first), then by the number of intersecting
+ // children they have (fewest children first), and then by the number of
+ // fully contained children (fewest children first).
+
+ verify(_minLevel <= _maxLevel);
+ dassert(_candidateQueue->empty());
+ dassert(_results->empty());
+ _region = &region;
+
+ getInitialCandidates();
+
+ while (!_candidateQueue->empty()) {
+ Candidate* candidate = _candidateQueue->top().second; // Owned
+ _candidateQueue->pop();
+ LOG(3) << "Pop: " << candidate->cell;
+
+ // Try to expand this cell into its children
+ if (candidate->cell.getBits() < _minLevel || candidate->numChildren == 1 ||
+ (int)_results->size() + (int)_candidateQueue->size() + candidate->numChildren <=
+ _maxCells) {
+ for (int i = 0; i < candidate->numChildren; i++) {
+ addCandidate(candidate->children[i]);
}
- LOG(3) << "Queue: " << _candidateQueue->size();
- }
-
- _region = NULL;
- cover->swap(*_results);
- }
-
- // Caller owns the returned pointer
- R2RegionCoverer::Candidate* R2RegionCoverer::newCandidate( const GeoHash& cell ) {
- // Exclude the cell that doesn't intersect with the geometry.
- Box box = _hashConverter->unhashToBoxCovering(cell);
-
- if (_region->fastDisjoint(box)) {
- return NULL;
- }
-
- Candidate* candidate = new Candidate();
- candidate->cell = cell;
- candidate->numChildren = 0;
- // Stop subdivision when we reach the max level or there is no need to do so.
- // Don't stop if we haven't reach min level.
- candidate->isTerminal = cell.getBits() >= _minLevel &&
- (cell.getBits() >= _maxLevel || _region->fastContains(box));
-
- return candidate;
- }
-
- // Takes ownership of "candidate"
- void R2RegionCoverer::addCandidate( Candidate* candidate ) {
- if (candidate == NULL) return;
-
- if (candidate->isTerminal) {
- _results->push_back(candidate->cell);
- deleteCandidate(candidate, true);
- return;
- }
-
- verify(candidate->numChildren == 0);
-
- // Expand children
- int numTerminals = expandChildren(candidate);
-
- if (candidate->numChildren == 0) {
- deleteCandidate(candidate, true);
- } else if (numTerminals == 4 && candidate->cell.getBits() >= _minLevel) {
- // Optimization: add the parent cell rather than all of its children.
+ deleteCandidate(candidate, false);
+ } else {
+ // Reached max cells. Move all candidates from the queue into results.
candidate->isTerminal = true;
addCandidate(candidate);
- } else {
- // Add the cell into the priority queue for further subdivision.
- //
- // We negate the priority so that smaller absolute priorities are returned
- // first. The heuristic is designed to refine the largest cells first,
- // since those are where we have the largest potential gain. Among cells
- // at the same level, we prefer the cells with the smallest number of
- // intersecting children. Finally, we prefer cells that have the smallest
- // number of children that cannot be refined any further.
- int priority = -(((((int)candidate->cell.getBits() << 4)
- + candidate->numChildren) << 4)
- + numTerminals);
- _candidateQueue->push(make_pair(priority, candidate)); // queue owns candidate
- LOG(3) << "Push: " << candidate->cell << " (" << priority << ") ";
}
+ LOG(3) << "Queue: " << _candidateQueue->size();
}
- // Dones't take ownership of "candidate"
- int R2RegionCoverer::expandChildren( Candidate* candidate ) {
- GeoHash childCells[4];
- invariant(candidate->cell.subdivide(childCells));
-
- int numTerminals = 0;
- for (int i = 0; i < 4; ++i) {
- Candidate* child = newCandidate(childCells[i]);
- if (child) {
- candidate->children[candidate->numChildren++] = child;
- if (child->isTerminal) ++numTerminals;
- }
- }
- return numTerminals;
- }
+ _region = NULL;
+ cover->swap(*_results);
+}
- // Takes ownership of "candidate"
- void R2RegionCoverer::deleteCandidate( Candidate* candidate, bool freeChildren ) {
- if (freeChildren) {
- for (int i = 0; i < candidate->numChildren; i++) {
- deleteCandidate(candidate->children[i], true);
- }
- }
+// Caller owns the returned pointer
+R2RegionCoverer::Candidate* R2RegionCoverer::newCandidate(const GeoHash& cell) {
+ // Exclude the cell that doesn't intersect with the geometry.
+ Box box = _hashConverter->unhashToBoxCovering(cell);
- delete candidate;
+ if (_region->fastDisjoint(box)) {
+ return NULL;
}
- void R2RegionCoverer::getInitialCandidates() {
- // Add the full plane
- // TODO a better initialization.
- addCandidate(newCandidate(GeoHash()));
+ Candidate* candidate = new Candidate();
+ candidate->cell = cell;
+ candidate->numChildren = 0;
+ // Stop subdivision when we reach the max level or there is no need to do so.
+ // Don't stop if we haven't reach min level.
+ candidate->isTerminal =
+ cell.getBits() >= _minLevel && (cell.getBits() >= _maxLevel || _region->fastContains(box));
+
+ return candidate;
+}
+
+// Takes ownership of "candidate"
+void R2RegionCoverer::addCandidate(Candidate* candidate) {
+ if (candidate == NULL)
+ return;
+
+ if (candidate->isTerminal) {
+ _results->push_back(candidate->cell);
+ deleteCandidate(candidate, true);
+ return;
}
- //
- // R2CellUnion
- //
- void R2CellUnion::init(const vector<GeoHash>& cellIds) {
- _cellIds = cellIds;
- normalize();
- }
+ verify(candidate->numChildren == 0);
- bool R2CellUnion::contains(const GeoHash cellId) const {
- // Since all cells are ordered, if an ancestor of id exists, it must be the previous one.
- vector<GeoHash>::const_iterator it;
- it = upper_bound(_cellIds.begin(), _cellIds.end(), cellId); // it > cellId
- return it != _cellIds.begin() && (--it)->contains(cellId); // --it <= cellId
- }
+ // Expand children
+ int numTerminals = expandChildren(candidate);
- bool R2CellUnion::normalize() {
- vector<GeoHash> output;
- output.reserve(_cellIds.size());
- sort(_cellIds.begin(), _cellIds.end());
-
- for (size_t i = 0; i < _cellIds.size(); i++) {
- GeoHash id = _cellIds[i];
-
- // Parent is less than children. If an ancestor of id exists, it must be the last one.
- //
- // Invariant: output doesn't contain intersected cells (ancestor and its descendants)
- // Proof: Assume another cell "c" exists between ancestor "p" and the current "id",
- // i.e. p < c < id, then "c" has "p" as its prefix, since id share the same prefix "p",
- // so "p" contains "c", which conflicts with the invariant.
- if (!output.empty() && output.back().contains(id)) continue;
-
- // Check whether the last 3 elements of "output" plus "id" can be
- // collapsed into a single parent cell.
- while (output.size() >= 3) {
- // A necessary (but not sufficient) condition is that the XOR of the
- // four cells must be zero. This is also very fast to test.
- if ((output.end()[-3].getHash() ^ output.end()[-2].getHash() ^ output.back().getHash())
- != id.getHash())
- break;
-
- // Now we do a slightly more expensive but exact test.
- GeoHash parent = id.parent();
- if (parent != output.end()[-3].parent() ||
- parent != output.end()[-2].parent() ||
- parent != output.end()[-1].parent())
- break;
-
- // Replace four children by their parent cell.
- output.erase(output.end() - 3, output.end());
- id = parent;
- }
- output.push_back(id);
+ if (candidate->numChildren == 0) {
+ deleteCandidate(candidate, true);
+ } else if (numTerminals == 4 && candidate->cell.getBits() >= _minLevel) {
+ // Optimization: add the parent cell rather than all of its children.
+ candidate->isTerminal = true;
+ addCandidate(candidate);
+ } else {
+ // Add the cell into the priority queue for further subdivision.
+ //
+ // We negate the priority so that smaller absolute priorities are returned
+ // first. The heuristic is designed to refine the largest cells first,
+ // since those are where we have the largest potential gain. Among cells
+ // at the same level, we prefer the cells with the smallest number of
+ // intersecting children. Finally, we prefer cells that have the smallest
+ // number of children that cannot be refined any further.
+ int priority = -(((((int)candidate->cell.getBits() << 4) + candidate->numChildren) << 4) +
+ numTerminals);
+ _candidateQueue->push(make_pair(priority, candidate)); // queue owns candidate
+ LOG(3) << "Push: " << candidate->cell << " (" << priority << ") ";
+ }
+}
+
+// Dones't take ownership of "candidate"
+int R2RegionCoverer::expandChildren(Candidate* candidate) {
+ GeoHash childCells[4];
+ invariant(candidate->cell.subdivide(childCells));
+
+ int numTerminals = 0;
+ for (int i = 0; i < 4; ++i) {
+ Candidate* child = newCandidate(childCells[i]);
+ if (child) {
+ candidate->children[candidate->numChildren++] = child;
+ if (child->isTerminal)
+ ++numTerminals;
}
- if (output.size() < _cellIds.size()) {
- _cellIds.swap(output);
- return true;
+ }
+ return numTerminals;
+}
+
+// Takes ownership of "candidate"
+void R2RegionCoverer::deleteCandidate(Candidate* candidate, bool freeChildren) {
+ if (freeChildren) {
+ for (int i = 0; i < candidate->numChildren; i++) {
+ deleteCandidate(candidate->children[i], true);
}
- return false;
}
- string R2CellUnion::toString() const {
- std::stringstream ss;
- ss << "[ ";
- for (size_t i = 0; i < _cellIds.size(); i++) {
- ss << _cellIds[i] << " ";
+ delete candidate;
+}
+
+void R2RegionCoverer::getInitialCandidates() {
+ // Add the full plane
+ // TODO a better initialization.
+ addCandidate(newCandidate(GeoHash()));
+}
+
+//
+// R2CellUnion
+//
+void R2CellUnion::init(const vector<GeoHash>& cellIds) {
+ _cellIds = cellIds;
+ normalize();
+}
+
+bool R2CellUnion::contains(const GeoHash cellId) const {
+ // Since all cells are ordered, if an ancestor of id exists, it must be the previous one.
+ vector<GeoHash>::const_iterator it;
+ it = upper_bound(_cellIds.begin(), _cellIds.end(), cellId); // it > cellId
+ return it != _cellIds.begin() && (--it)->contains(cellId); // --it <= cellId
+}
+
+bool R2CellUnion::normalize() {
+ vector<GeoHash> output;
+ output.reserve(_cellIds.size());
+ sort(_cellIds.begin(), _cellIds.end());
+
+ for (size_t i = 0; i < _cellIds.size(); i++) {
+ GeoHash id = _cellIds[i];
+
+ // Parent is less than children. If an ancestor of id exists, it must be the last one.
+ //
+ // Invariant: output doesn't contain intersected cells (ancestor and its descendants)
+ // Proof: Assume another cell "c" exists between ancestor "p" and the current "id",
+ // i.e. p < c < id, then "c" has "p" as its prefix, since id share the same prefix "p",
+ // so "p" contains "c", which conflicts with the invariant.
+ if (!output.empty() && output.back().contains(id))
+ continue;
+
+ // Check whether the last 3 elements of "output" plus "id" can be
+ // collapsed into a single parent cell.
+ while (output.size() >= 3) {
+ // A necessary (but not sufficient) condition is that the XOR of the
+ // four cells must be zero. This is also very fast to test.
+ if ((output.end()[-3].getHash() ^ output.end()[-2].getHash() ^
+ output.back().getHash()) != id.getHash())
+ break;
+
+ // Now we do a slightly more expensive but exact test.
+ GeoHash parent = id.parent();
+ if (parent != output.end()[-3].parent() || parent != output.end()[-2].parent() ||
+ parent != output.end()[-1].parent())
+ break;
+
+ // Replace four children by their parent cell.
+ output.erase(output.end() - 3, output.end());
+ id = parent;
}
- ss << "]";
- return ss.str();
+ output.push_back(id);
+ }
+ if (output.size() < _cellIds.size()) {
+ _cellIds.swap(output);
+ return true;
+ }
+ return false;
+}
+
+string R2CellUnion::toString() const {
+ std::stringstream ss;
+ ss << "[ ";
+ for (size_t i = 0; i < _cellIds.size(); i++) {
+ ss << _cellIds[i] << " ";
}
+ ss << "]";
+ return ss.str();
+}
} /* namespace mongo */
diff --git a/src/mongo/db/geo/r2_region_coverer.h b/src/mongo/db/geo/r2_region_coverer.h
index db0aa69c8d9..ebd60e4997f 100644
--- a/src/mongo/db/geo/r2_region_coverer.h
+++ b/src/mongo/db/geo/r2_region_coverer.h
@@ -35,113 +35,120 @@
namespace mongo {
- class R2Region;
-
- class R2RegionCoverer {
- MONGO_DISALLOW_COPYING(R2RegionCoverer);
-
- // By default, the covering uses at most 8 cells at any level.
- static const int kDefaultMaxCells; // = 8;
-
- public:
- R2RegionCoverer() = default;
- R2RegionCoverer(GeoHashConverter* hashConverter);
- ~R2RegionCoverer();
-
- // Set the minimum and maximum cell level to be used. The default is to use
- // all cell levels. Requires: max_level() >= min_level().
- void setMinLevel(unsigned int minLevel);
- void setMaxLevel(unsigned int maxLevel);
- unsigned int minLevel() const { return _minLevel; }
- unsigned int maxLevel() const { return _maxLevel; }
-
- // Sets the maximum desired number of cells in the approximation (defaults
- // to kDefaultMaxCells).
- //
- // For any setting of max_cells(), an arbitrary number of cells may be
- // returned if min_level() is too high for the region being approximated.
- //
- // TODO(sz): accuracy experiments similar to S2RegionCoverer.
- void setMaxCells(int maxCells);
- int maxCells() const { return _maxCells; }
-
- void getCovering(const R2Region& region, std::vector<GeoHash>* cover);
-
- private:
- struct Candidate {
- GeoHash cell;
- bool isTerminal; // Cell should not be expanded further.
- int numChildren; // Number of children that intersect the region.
- Candidate* children[4];
- };
-
- // If the cell intersects the given region, return a new candidate with no
- // children, otherwise return NULL. Also marks the candidate as "terminal"
- // if it should not be expanded further.
- Candidate* newCandidate(GeoHash const& cell);
-
- // Process a candidate by either adding it to the result_ vector or
- // expanding its children and inserting it into the priority queue.
- // Passing an argument of NULL does nothing.
- void addCandidate(Candidate* candidate);
-
- // Free the memory associated with a candidate.
- void deleteCandidate( Candidate* candidate, bool freeChildren );
-
- // Populate the children of "candidate" by expanding from the given cell.
- // Returns the number of children that were marked "terminal".
- int expandChildren(Candidate* candidate);
-
- // Computes a set of initial candidates that cover the given region.
- void getInitialCandidates();
-
- GeoHashConverter* _hashConverter; // Not owned.
- // min / max level as unsigned so as to be consistent with GeoHash
- unsigned int _minLevel;
- unsigned int _maxLevel;
- int _maxCells;
-
- // Save the copy of pointer temporarily to avoid passing this parameter internally.
- // Only valid for the duration of a single getCovering() call.
- R2Region const* _region;
-
- // We keep the candidates that may intersect with this region in a priority queue.
- typedef std::pair<int, Candidate*> QueueEntry;
-
- // We define our own own comparison function on QueueEntries in order to
- // make the results deterministic. Using the default less<QueueEntry>,
- // entries of equal priority would be sorted according to the memory address
- // of the candidate.
- struct CompareQueueEntries {
- bool operator()(QueueEntry const& x, QueueEntry const& y) const {
- return x.first < y.first;
- }
- };
-
- typedef std::priority_queue<QueueEntry, std::vector<QueueEntry>,
- CompareQueueEntries> CandidateQueue;
- std::unique_ptr<CandidateQueue> _candidateQueue; // Priority queue owns candidate pointers.
- std::unique_ptr<std::vector<GeoHash> > _results;
+class R2Region;
+
+class R2RegionCoverer {
+ MONGO_DISALLOW_COPYING(R2RegionCoverer);
+
+ // By default, the covering uses at most 8 cells at any level.
+ static const int kDefaultMaxCells; // = 8;
+
+public:
+ R2RegionCoverer() = default;
+ R2RegionCoverer(GeoHashConverter* hashConverter);
+ ~R2RegionCoverer();
+
+ // Set the minimum and maximum cell level to be used. The default is to use
+ // all cell levels. Requires: max_level() >= min_level().
+ void setMinLevel(unsigned int minLevel);
+ void setMaxLevel(unsigned int maxLevel);
+ unsigned int minLevel() const {
+ return _minLevel;
+ }
+ unsigned int maxLevel() const {
+ return _maxLevel;
+ }
+
+ // Sets the maximum desired number of cells in the approximation (defaults
+ // to kDefaultMaxCells).
+ //
+ // For any setting of max_cells(), an arbitrary number of cells may be
+ // returned if min_level() is too high for the region being approximated.
+ //
+ // TODO(sz): accuracy experiments similar to S2RegionCoverer.
+ void setMaxCells(int maxCells);
+ int maxCells() const {
+ return _maxCells;
+ }
+
+ void getCovering(const R2Region& region, std::vector<GeoHash>* cover);
+
+private:
+ struct Candidate {
+ GeoHash cell;
+ bool isTerminal; // Cell should not be expanded further.
+ int numChildren; // Number of children that intersect the region.
+ Candidate* children[4];
};
-
- // An R2CellUnion is a region consisting of cells of various sizes.
- class R2CellUnion {
- MONGO_DISALLOW_COPYING(R2CellUnion);
- public:
- R2CellUnion() = default;
-
- void init(const std::vector<GeoHash>& cellIds);
- bool contains(const GeoHash cellId) const;
- std::string toString() const;
- private:
- // Normalizes the cell union by discarding cells that are contained by other
- // cells, replacing groups of 4 child cells by their parent cell whenever
- // possible, and sorting all the cell ids in increasing order. Returns true
- // if the number of cells was reduced.
- bool normalize();
- std::vector<GeoHash> _cellIds;
+ // If the cell intersects the given region, return a new candidate with no
+ // children, otherwise return NULL. Also marks the candidate as "terminal"
+ // if it should not be expanded further.
+ Candidate* newCandidate(GeoHash const& cell);
+
+ // Process a candidate by either adding it to the result_ vector or
+ // expanding its children and inserting it into the priority queue.
+ // Passing an argument of NULL does nothing.
+ void addCandidate(Candidate* candidate);
+
+ // Free the memory associated with a candidate.
+ void deleteCandidate(Candidate* candidate, bool freeChildren);
+
+ // Populate the children of "candidate" by expanding from the given cell.
+ // Returns the number of children that were marked "terminal".
+ int expandChildren(Candidate* candidate);
+
+ // Computes a set of initial candidates that cover the given region.
+ void getInitialCandidates();
+
+ GeoHashConverter* _hashConverter; // Not owned.
+ // min / max level as unsigned so as to be consistent with GeoHash
+ unsigned int _minLevel;
+ unsigned int _maxLevel;
+ int _maxCells;
+
+ // Save the copy of pointer temporarily to avoid passing this parameter internally.
+ // Only valid for the duration of a single getCovering() call.
+ R2Region const* _region;
+
+ // We keep the candidates that may intersect with this region in a priority queue.
+ typedef std::pair<int, Candidate*> QueueEntry;
+
+ // We define our own own comparison function on QueueEntries in order to
+ // make the results deterministic. Using the default less<QueueEntry>,
+ // entries of equal priority would be sorted according to the memory address
+ // of the candidate.
+ struct CompareQueueEntries {
+ bool operator()(QueueEntry const& x, QueueEntry const& y) const {
+ return x.first < y.first;
+ }
};
-} /* namespace mongo */
+ typedef std::priority_queue<QueueEntry, std::vector<QueueEntry>, CompareQueueEntries>
+ CandidateQueue;
+ std::unique_ptr<CandidateQueue> _candidateQueue; // Priority queue owns candidate pointers.
+ std::unique_ptr<std::vector<GeoHash>> _results;
+};
+
+
+// An R2CellUnion is a region consisting of cells of various sizes.
+class R2CellUnion {
+ MONGO_DISALLOW_COPYING(R2CellUnion);
+public:
+ R2CellUnion() = default;
+
+ void init(const std::vector<GeoHash>& cellIds);
+ bool contains(const GeoHash cellId) const;
+ std::string toString() const;
+
+private:
+ // Normalizes the cell union by discarding cells that are contained by other
+ // cells, replacing groups of 4 child cells by their parent cell whenever
+ // possible, and sorting all the cell ids in increasing order. Returns true
+ // if the number of cells was reduced.
+ bool normalize();
+ std::vector<GeoHash> _cellIds;
+};
+
+} /* namespace mongo */
diff --git a/src/mongo/db/geo/r2_region_coverer_test.cpp b/src/mongo/db/geo/r2_region_coverer_test.cpp
index e1df6edea77..7fc0f9192f5 100644
--- a/src/mongo/db/geo/r2_region_coverer_test.cpp
+++ b/src/mongo/db/geo/r2_region_coverer_test.cpp
@@ -38,588 +38,587 @@
namespace {
- using std::unique_ptr;
- using namespace mongo;
- using mongo::Polygon; // "windows.h" has another Polygon for Windows GDI.
+using std::unique_ptr;
+using namespace mongo;
+using mongo::Polygon; // "windows.h" has another Polygon for Windows GDI.
+
+//
+// GeoHash
+//
+TEST(R2RegionCoverer, GeoHashSubdivide) {
+ GeoHash children[4];
+
+ // Full plane -> 4 quadrants
+ GeoHash fullPlane;
+ ASSERT_TRUE(fullPlane.subdivide(children));
+ ASSERT_EQUALS(children[0], GeoHash(0LL, 1u)); // (x, y) : (0, 0)
+ ASSERT_EQUALS(children[1], GeoHash(1LL << 62, 1u)); // (x, y) : (0, 1)
+ ASSERT_EQUALS(children[2], GeoHash(2LL << 62, 1u)); // (x, y) : (1, 0)
+ ASSERT_EQUALS(children[3], GeoHash(3LL << 62, 1u)); // (x, y) : (1, 1)
+
+ // Small cell: 0...11XX -> 0...11[0-3]
+ const long long cellHash = 3LL << 2;
+ GeoHash cell(cellHash, 31u);
+ ASSERT_TRUE(cell.subdivide(children));
+ ASSERT_EQUALS(children[0], GeoHash(cellHash, 32u)); // (x, y) : (0, 0)
+ ASSERT_EQUALS(children[1], GeoHash(cellHash + 1, 32u)); // (x, y) : (0, 1)
+ ASSERT_EQUALS(children[2], GeoHash(cellHash + 2, 32u)); // (x, y) : (1, 0)
+ ASSERT_EQUALS(children[3], GeoHash(cellHash + 3, 32u)); // (x, y) : (1, 1)
+
+ // Smallest cell at finest level cannot subdivide
+ GeoHash leafCell(1LL, 32u);
+ ASSERT_FALSE(leafCell.subdivide(children));
+}
+
+TEST(R2RegionCoverer, GeoHashUnusedBits) {
+ GeoHash geoHash(5566154225580586776LL, 0u);
+ GeoHash entirePlane;
+ ASSERT_EQUALS(geoHash, entirePlane);
+}
+
+TEST(R2RegionCoverer, GeoHashContains) {
+ GeoHash entirePlane;
+ GeoHash geoHash(5566154225580586776LL, 32u); // An arbitrary random cell
+ // GeoHash contains itself
+ ASSERT_TRUE(entirePlane.contains(entirePlane));
+ ASSERT_TRUE(geoHash.contains(geoHash));
+ // Entire plane contains everything
+ ASSERT_TRUE(entirePlane.contains(geoHash));
+ ASSERT_FALSE(geoHash.contains(entirePlane));
+
+ // Positive cases
+ GeoHash parent("0010");
+ GeoHash child("00100101");
+ ASSERT_TRUE(parent.contains(parent));
+ ASSERT_TRUE(parent.contains(child));
+ ASSERT_TRUE(entirePlane.contains(geoHash));
+
+ // Negative cases
+ GeoHash other("01");
+ ASSERT_FALSE(parent.contains(other));
+ ASSERT_FALSE(other.contains(parent));
+}
+
+
+//
+// R2RegionCoverer
+//
+
+// Plane boundary, x: [0.0, 100.0], y: [0.0, 100.0]
+const double MAXBOUND = 100.0;
+
+// Global random number generator, repeatable.
+mongo::PseudoRandom rand(0);
+
+GeoHashConverter::Parameters getConverterParams() {
+ GeoHashConverter::Parameters params;
+ params.bits = 32;
+ params.min = 0.0;
+ params.max = MAXBOUND;
+ const double numBuckets = (1024 * 1024 * 1024 * 4.0);
+ params.scaling = numBuckets / (params.max - params.min);
+ return params;
+}
- //
- // GeoHash
- //
- TEST(R2RegionCoverer, GeoHashSubdivide) {
- GeoHash children[4];
-
- // Full plane -> 4 quadrants
- GeoHash fullPlane;
- ASSERT_TRUE( fullPlane.subdivide( children ) );
- ASSERT_EQUALS( children[0], GeoHash( 0LL, 1u ) ); // (x, y) : (0, 0)
- ASSERT_EQUALS( children[1], GeoHash( 1LL << 62, 1u ) ); // (x, y) : (0, 1)
- ASSERT_EQUALS( children[2], GeoHash( 2LL << 62, 1u ) ); // (x, y) : (1, 0)
- ASSERT_EQUALS( children[3], GeoHash( 3LL << 62, 1u ) ); // (x, y) : (1, 1)
-
- // Small cell: 0...11XX -> 0...11[0-3]
- const long long cellHash = 3LL << 2;
- GeoHash cell( cellHash, 31u );
- ASSERT_TRUE( cell.subdivide( children ) );
- ASSERT_EQUALS( children[0], GeoHash( cellHash, 32u ) ); // (x, y) : (0, 0)
- ASSERT_EQUALS( children[1], GeoHash( cellHash + 1, 32u ) ); // (x, y) : (0, 1)
- ASSERT_EQUALS( children[2], GeoHash( cellHash + 2, 32u ) ); // (x, y) : (1, 0)
- ASSERT_EQUALS( children[3], GeoHash( cellHash + 3, 32u ) ); // (x, y) : (1, 1)
-
- // Smallest cell at finest level cannot subdivide
- GeoHash leafCell(1LL, 32u);
- ASSERT_FALSE( leafCell.subdivide( children ) );
- }
-
- TEST(R2RegionCoverer, GeoHashUnusedBits) {
- GeoHash geoHash(5566154225580586776LL, 0u);
- GeoHash entirePlane;
- ASSERT_EQUALS(geoHash, entirePlane);
+/**
+ * Test region which mimics the region of a geohash cell.
+ * NOTE: Technically this is not 100% correct, since geohash cells are inclusive on lower and
+ * exclusive on upper edges. For now, this region is just exclusive on all edges.
+ * TODO: Create an explicit HashCell which correctly encapsulates this behavior, push to the
+ * R2Region interface.
+ */
+class HashBoxRegion : public R2Region {
+public:
+ HashBoxRegion(Box box) : _box(box) {}
+ Box getR2Bounds() const {
+ return _box;
}
- TEST(R2RegionCoverer, GeoHashContains) {
- GeoHash entirePlane;
- GeoHash geoHash(5566154225580586776LL, 32u); // An arbitrary random cell
- // GeoHash contains itself
- ASSERT_TRUE(entirePlane.contains(entirePlane));
- ASSERT_TRUE(geoHash.contains(geoHash));
- // Entire plane contains everything
- ASSERT_TRUE(entirePlane.contains(geoHash));
- ASSERT_FALSE(geoHash.contains(entirePlane));
-
- // Positive cases
- GeoHash parent("0010");
- GeoHash child("00100101");
- ASSERT_TRUE(parent.contains(parent));
- ASSERT_TRUE(parent.contains(child));
- ASSERT_TRUE(entirePlane.contains(geoHash));
-
- // Negative cases
- GeoHash other("01");
- ASSERT_FALSE(parent.contains(other));
- ASSERT_FALSE(other.contains(parent));
+ bool fastContains(const Box& other) const {
+ return _box.contains(other);
}
+ bool fastDisjoint(const Box& other) const {
+ if (!_box.intersects(other))
+ return true;
- //
- // R2RegionCoverer
- //
-
- // Plane boundary, x: [0.0, 100.0], y: [0.0, 100.0]
- const double MAXBOUND = 100.0;
-
- // Global random number generator, repeatable.
- mongo::PseudoRandom rand(0);
+ // Make outer edges exclusive
+ if (_box._max.x == other._min.x || _box._min.x == other._max.x ||
+ _box._max.y == other._min.y || _box._min.y == other._max.y)
+ return true;
- GeoHashConverter::Parameters getConverterParams() {
- GeoHashConverter::Parameters params;
- params.bits = 32;
- params.min = 0.0;
- params.max = MAXBOUND;
- const double numBuckets = (1024 * 1024 * 1024 * 4.0);
- params.scaling = numBuckets / (params.max - params.min);
- return params;
+ return false;
}
- /**
- * Test region which mimics the region of a geohash cell.
- * NOTE: Technically this is not 100% correct, since geohash cells are inclusive on lower and
- * exclusive on upper edges. For now, this region is just exclusive on all edges.
- * TODO: Create an explicit HashCell which correctly encapsulates this behavior, push to the
- * R2Region interface.
- */
- class HashBoxRegion : public R2Region {
- public:
-
- HashBoxRegion(Box box) : _box(box) {}
- Box getR2Bounds() const { return _box; }
-
- bool fastContains(const Box& other) const {
- return _box.contains(other);
- }
-
- bool fastDisjoint(const Box& other) const {
- if (!_box.intersects(other))
- return true;
-
- // Make outer edges exclusive
- if (_box._max.x == other._min.x || _box._min.x == other._max.x
- || _box._max.y == other._min.y || _box._min.y == other._max.y)
- return true;
-
- return false;
- }
-
- private:
- Box _box;
- };
-
- TEST(R2RegionCoverer, RandomCells) {
- GeoHashConverter converter(getConverterParams());
- R2RegionCoverer coverer(&converter);
- coverer.setMaxCells( 1 );
- // Test random cell ids at all levels.
- for ( int i = 0; i < 10000; ++i ) {
- GeoHash id( (long long) rand.nextInt64(),
- (unsigned) rand.nextInt32( GeoHash::kMaxBits + 1 ) );
- vector<GeoHash> covering;
- Box box = converter.unhashToBoxCovering(id);
- // Since the unhashed box is expanded by the error 8Mu, we need to shrink it.
- box.fudge(-GeoHashConverter::kMachinePrecision * MAXBOUND * 20);
- HashBoxRegion region(box);
- coverer.getCovering(region, &covering);
- ASSERT_EQUALS( covering.size(), (size_t)1 );
- ASSERT_EQUALS( covering[0], id );
+private:
+ Box _box;
+};
+
+TEST(R2RegionCoverer, RandomCells) {
+ GeoHashConverter converter(getConverterParams());
+ R2RegionCoverer coverer(&converter);
+ coverer.setMaxCells(1);
+ // Test random cell ids at all levels.
+ for (int i = 0; i < 10000; ++i) {
+ GeoHash id((long long)rand.nextInt64(), (unsigned)rand.nextInt32(GeoHash::kMaxBits + 1));
+ vector<GeoHash> covering;
+ Box box = converter.unhashToBoxCovering(id);
+ // Since the unhashed box is expanded by the error 8Mu, we need to shrink it.
+ box.fudge(-GeoHashConverter::kMachinePrecision * MAXBOUND * 20);
+ HashBoxRegion region(box);
+ coverer.getCovering(region, &covering);
+ ASSERT_EQUALS(covering.size(), (size_t)1);
+ ASSERT_EQUALS(covering[0], id);
+ }
+}
+
+double randDouble(double lowerBound, double upperBound) {
+ verify(lowerBound <= upperBound);
+ const int NUMBITS = 53;
+ // Random double in [0, 1)
+ double r = ldexp((double)(rand.nextInt64() & ((1ULL << NUMBITS) - 1ULL)), -NUMBITS);
+ return lowerBound + r * (upperBound - lowerBound);
+}
+
+// Check the given region is covered by the covering completely.
+// cellId is used internally.
+void checkCellIdCovering(const GeoHashConverter& converter,
+ const R2Region& region,
+ const R2CellUnion& covering,
+ const GeoHash cellId = GeoHash()) {
+ Box cell = converter.unhashToBoxCovering(cellId);
+
+ // The covering may or may not contain this disjoint cell, we don't care.
+ if (region.fastDisjoint(cell))
+ return;
+
+ // If the covering contains this id, that's fine.
+ if (covering.contains(cellId))
+ return;
+
+ // The covering doesn't contain this cell, so the region shouldn't contain this cell.
+ if (region.fastContains(cell)) {
+ log() << "covering " << covering.toString();
+ log() << "cellId " << cellId;
+ }
+ ASSERT_FALSE(region.fastContains(cell));
+
+ // The region intersects with this cell. So the covering should intersect with it too.
+ // We need to go deeper until a leaf. When we reach a leaf, it must be caught above
+ // - disjoint with the region, we don't care.
+ // - intersected with the region, contained in the covering.
+ // We can guarantee the disjoint/intersection test is exact, since it's a circle.
+ GeoHash children[4];
+ ASSERT_TRUE(cellId.subdivide(children)); // Not a leaf
+ for (int i = 0; i < 4; i++) {
+ checkCellIdCovering(converter, region, covering, children[i]);
+ }
+}
+
+void checkCovering(const GeoHashConverter& converter,
+ const R2Region& region,
+ const R2RegionCoverer& coverer,
+ const vector<GeoHash> covering) {
+ // Keep track of how many cells have the same coverer.minLevel() ancestor.
+ map<GeoHash, int> minLevelCells;
+ // Check covering's minLevel and maxLevel.
+ for (size_t i = 0; i < covering.size(); ++i) {
+ unsigned int level = covering[i].getBits();
+ ASSERT_NOT_LESS_THAN(level, coverer.minLevel());
+ ASSERT_NOT_GREATER_THAN(level, coverer.maxLevel());
+ minLevelCells[covering[i].parent(coverer.minLevel())] += 1;
+ }
+ if (covering.size() > (unsigned int)coverer.maxCells()) {
+ // If the covering has more than the requested number of cells, then check
+ // that the cell count cannot be reduced by using the parent of some cell.
+ for (map<GeoHash, int>::const_iterator i = minLevelCells.begin(); i != minLevelCells.end();
+ ++i) {
+ ASSERT_EQUALS(i->second, 1);
}
}
- double randDouble(double lowerBound, double upperBound) {
- verify(lowerBound <= upperBound);
- const int NUMBITS = 53;
- // Random double in [0, 1)
- double r = ldexp((double)(rand.nextInt64() & ((1ULL << NUMBITS) - 1ULL)), -NUMBITS);
- return lowerBound + r * ( upperBound - lowerBound );
+ R2CellUnion cellUnion;
+ cellUnion.init(covering);
+ checkCellIdCovering(converter, region, cellUnion);
+}
+
+// Generate a circle within [0, MAXBOUND]
+GeometryContainer* getRandomCircle(double radius) {
+ ASSERT_LESS_THAN(radius, MAXBOUND / 2);
+
+ // Format: { $center : [ [-74, 40.74], 10 ] }
+ GeometryContainer* container = new GeometryContainer();
+ container->parseFromQuery(
+ BSON("$center" << BSON_ARRAY(BSON_ARRAY(randDouble(radius, MAXBOUND - radius)
+ << randDouble(radius, MAXBOUND - radius))
+ << radius)).firstElement());
+ return container;
+}
+
+// Test the covering for arbitrary random circle.
+TEST(R2RegionCoverer, RandomCircles) {
+ GeoHashConverter converter(getConverterParams());
+ R2RegionCoverer coverer(&converter);
+ coverer.setMaxCells(8);
+
+ for (int i = 0; i < 1000; i++) {
+ // Using R2BoxRegion, the disjoint with circle gives poor results around the corner,
+ // so many small cells are considered as intersected in the priority queue, which is
+ // very slow for larger minLevel (smaller cell). So we limit minLevels in [0, 6].
+ coverer.setMinLevel(rand.nextInt32(7));
+ coverer.setMaxLevel(coverer.minLevel() + 4);
+
+ double radius = randDouble(0.0, MAXBOUND / 2);
+ unique_ptr<GeometryContainer> geometry(getRandomCircle(radius));
+ const R2Region& region = geometry->getR2Region();
+
+ vector<GeoHash> covering;
+ coverer.getCovering(region, &covering);
+ checkCovering(converter, region, coverer, covering);
}
+}
+
+// Test the covering for very small circles, since the above test doesn't cover finest cells.
+TEST(R2RegionCoverer, RandomTinyCircles) {
+ GeoHashConverter converter(getConverterParams());
+ R2RegionCoverer coverer(&converter);
+ coverer.setMaxCells(rand.nextInt32(20) + 1); // [1, 20]
+
+ for (int i = 0; i < 10000; i++) {
+ do {
+ coverer.setMinLevel(rand.nextInt32(GeoHash::kMaxBits + 1));
+ coverer.setMaxLevel(rand.nextInt32(GeoHash::kMaxBits + 1));
+ } while (coverer.minLevel() > coverer.maxLevel());
+
+ // 100 * 2 ^ -32 ~= 2.3E-8 (cell edge length)
+ double radius = randDouble(1E-15, ldexp(100.0, -32) * 10);
+ unique_ptr<GeometryContainer> geometry(getRandomCircle(radius));
+ const R2Region& region = geometry->getR2Region();
+
+ vector<GeoHash> covering;
+ coverer.getCovering(region, &covering);
+ checkCovering(converter, region, coverer, covering);
+ }
+}
+
+//
+// Shape Intersection
+//
+TEST(ShapeIntersection, Lines) {
+ /*
+ * E |D
+ * A___B |C G
+ * F
+ */
+ Point a(0, 0), b(1, 0), c(2, 0), d(2, 1);
+ Point e(0.5, 1), f(0.5, -0.5), g(3, 0);
- // Check the given region is covered by the covering completely.
- // cellId is used internally.
- void checkCellIdCovering(const GeoHashConverter& converter,
- const R2Region& region,
- const R2CellUnion& covering,
- const GeoHash cellId = GeoHash()) {
-
- Box cell = converter.unhashToBoxCovering(cellId);
+ /*
+ * Basic disjoint
+ * / |
+ * / |
+ */
+ ASSERT_FALSE(linesIntersect(a, d, c, b));
+ ASSERT_FALSE(linesIntersect(c, b, a, d)); // commutative
- // The covering may or may not contain this disjoint cell, we don't care.
- if (region.fastDisjoint(cell)) return;
+ /*
+ * Basic disjoint (axis aligned)
+ * |
+ * ___ |
+ */
+ ASSERT_FALSE(linesIntersect(a, b, c, d));
+ ASSERT_FALSE(linesIntersect(c, d, a, b)); // commutative
- // If the covering contains this id, that's fine.
- if (covering.contains(cellId)) return;
+ /*
+ * Basic intersection
+ * \/
+ * /\
+ */
+ ASSERT_TRUE(linesIntersect(e, c, f, d));
+ ASSERT_TRUE(linesIntersect(f, d, e, c)); // commutative
- // The covering doesn't contain this cell, so the region shouldn't contain this cell.
- if (region.fastContains(cell)) {
- log() << "covering " << covering.toString();
- log() << "cellId " << cellId;
- }
- ASSERT_FALSE(region.fastContains(cell));
-
- // The region intersects with this cell. So the covering should intersect with it too.
- // We need to go deeper until a leaf. When we reach a leaf, it must be caught above
- // - disjoint with the region, we don't care.
- // - intersected with the region, contained in the covering.
- // We can guarantee the disjoint/intersection test is exact, since it's a circle.
- GeoHash children[4];
- ASSERT_TRUE(cellId.subdivide( children )); // Not a leaf
- for ( int i = 0; i < 4; i++ ) {
- checkCellIdCovering( converter, region, covering, children[i] );
- }
- }
+ /*
+ * Basic intersection (axis aligned)
+ * _|_
+ * |
+ */
+ ASSERT_TRUE(linesIntersect(a, b, e, f));
+ ASSERT_TRUE(linesIntersect(f, e, b, a)); // commutative
- void checkCovering(const GeoHashConverter& converter,
- const R2Region& region,
- const R2RegionCoverer& coverer,
- const vector<GeoHash> covering) {
-
- // Keep track of how many cells have the same coverer.minLevel() ancestor.
- map<GeoHash, int> minLevelCells;
- // Check covering's minLevel and maxLevel.
- for (size_t i = 0; i < covering.size(); ++i) {
- unsigned int level = covering[i].getBits();
- ASSERT_NOT_LESS_THAN(level, coverer.minLevel());
- ASSERT_NOT_GREATER_THAN(level, coverer.maxLevel());
- minLevelCells[covering[i].parent(coverer.minLevel())] += 1;
- }
- if (covering.size() > (unsigned int)coverer.maxCells()) {
- // If the covering has more than the requested number of cells, then check
- // that the cell count cannot be reduced by using the parent of some cell.
- for (map<GeoHash, int>::const_iterator i = minLevelCells.begin();
- i != minLevelCells.end(); ++i) {
- ASSERT_EQUALS(i->second, 1);
- }
- }
+ /*
+ * One vertex on the line
+ * \
+ * ____ \
+ */
+ ASSERT_FALSE(linesIntersect(a, b, e, c));
+ ASSERT_FALSE(linesIntersect(e, c, a, b)); // commutative
- R2CellUnion cellUnion;
- cellUnion.init(covering);
- checkCellIdCovering(converter, region, cellUnion);
- }
+ /*
+ * One vertex on the segment
+ * \
+ * ___\___
+ */
+ ASSERT_TRUE(linesIntersect(a, c, b, e));
+ ASSERT_TRUE(linesIntersect(e, b, a, c)); // commutative
- // Generate a circle within [0, MAXBOUND]
- GeometryContainer* getRandomCircle(double radius) {
- ASSERT_LESS_THAN(radius, MAXBOUND / 2);
-
- // Format: { $center : [ [-74, 40.74], 10 ] }
- GeometryContainer* container = new GeometryContainer();
- container->parseFromQuery(BSON("$center"
- << BSON_ARRAY(
- BSON_ARRAY(randDouble(radius, MAXBOUND - radius)
- << randDouble(radius, MAXBOUND - radius))
- << radius)).firstElement());
- return container;
- }
+ /*
+ * Two segments share one vertex
+ * /
+ * /____
+ */
+ ASSERT_TRUE(linesIntersect(a, c, a, e));
+ ASSERT_TRUE(linesIntersect(a, e, a, c)); // commutative
- // Test the covering for arbitrary random circle.
- TEST(R2RegionCoverer, RandomCircles) {
- GeoHashConverter converter(getConverterParams());
- R2RegionCoverer coverer(&converter);
- coverer.setMaxCells( 8 );
-
- for (int i = 0; i < 1000; i++) {
- // Using R2BoxRegion, the disjoint with circle gives poor results around the corner,
- // so many small cells are considered as intersected in the priority queue, which is
- // very slow for larger minLevel (smaller cell). So we limit minLevels in [0, 6].
- coverer.setMinLevel( rand.nextInt32( 7 ) );
- coverer.setMaxLevel( coverer.minLevel() + 4 );
-
- double radius = randDouble(0.0, MAXBOUND / 2);
- unique_ptr<GeometryContainer> geometry(getRandomCircle(radius));
- const R2Region& region = geometry->getR2Region();
-
- vector<GeoHash> covering;
- coverer.getCovering(region, &covering);
- checkCovering(converter, region, coverer, covering);
- }
- }
+ /*
+ * Intersected segments on the same line
+ * A___B===C---G
+ */
+ ASSERT_TRUE(linesIntersect(a, c, b, g));
+ ASSERT_TRUE(linesIntersect(b, g, c, a)); // commutative
- // Test the covering for very small circles, since the above test doesn't cover finest cells.
- TEST(R2RegionCoverer, RandomTinyCircles) {
- GeoHashConverter converter(getConverterParams());
- R2RegionCoverer coverer(&converter);
- coverer.setMaxCells( rand.nextInt32(20) + 1 ); // [1, 20]
-
- for (int i = 0; i < 10000; i++) {
- do {
- coverer.setMinLevel( rand.nextInt32( GeoHash::kMaxBits + 1 ) );
- coverer.setMaxLevel( rand.nextInt32( GeoHash::kMaxBits + 1 ) );
- } while (coverer.minLevel() > coverer.maxLevel());
-
- // 100 * 2 ^ -32 ~= 2.3E-8 (cell edge length)
- double radius = randDouble(1E-15, ldexp(100.0, -32) * 10);
- unique_ptr<GeometryContainer> geometry(getRandomCircle(radius));
- const R2Region& region = geometry->getR2Region();
-
- vector<GeoHash> covering;
- coverer.getCovering(region, &covering);
- checkCovering(converter, region, coverer, covering);
- }
- }
+ /*
+ * Disjoint segments on the same line
+ * A___B C---G
+ */
+ ASSERT_FALSE(linesIntersect(a, b, c, g));
+ ASSERT_FALSE(linesIntersect(c, g, a, b)); // commutative
+
+ /*
+ * Segments on the same line share one vertex.
+ * /D
+ * /B
+ * F/
+ */
+ ASSERT_TRUE(linesIntersect(d, b, b, f));
+ ASSERT_TRUE(linesIntersect(f, b, d, b)); // commutative
+ // axis aligned
+ ASSERT_TRUE(linesIntersect(a, c, g, c));
+ ASSERT_TRUE(linesIntersect(c, g, a, c)); // commutative
+}
+
+TEST(ShapeIntersection, Polygons) {
+ // Convex polygon (triangle)
+
+ /*
+ * Disjoint, bounds disjoint
+ * /|
+ * / | []
+ * /__|
+ */
+ vector<Point> triangleVetices;
+ triangleVetices.push_back(Point(0, 0));
+ triangleVetices.push_back(Point(1, 0));
+ triangleVetices.push_back(Point(1, 4));
+ Polygon triangle(triangleVetices);
+ Box box;
+
+ box = Box(1.5, 1.5, 1);
+ ASSERT_FALSE(edgesIntersectsWithBox(triangle.points(), box));
+ ASSERT_FALSE(polygonIntersectsWithBox(triangle, box));
+ ASSERT_FALSE(polygonContainsBox(triangle, box));
+
+ /*
+ * Disjoint, bounds intersect
+ * [] /|
+ * / |
+ * /__|
+ */
+ box = Box(-0.5, 3.5, 1);
+ ASSERT_FALSE(edgesIntersectsWithBox(triangle.points(), box));
+ ASSERT_FALSE(polygonIntersectsWithBox(triangle, box));
+ ASSERT_FALSE(polygonContainsBox(triangle, box));
+
+ /*
+ * Intersect on one polygon vertex
+ * _____
+ * | |
+ * |_ /|_|
+ * / |
+ * /__|
+ */
+ box = Box(0, 3, 2);
+ ASSERT_TRUE(edgesIntersectsWithBox(triangle.points(), box));
+ ASSERT_TRUE(polygonIntersectsWithBox(triangle, box));
+ ASSERT_FALSE(polygonContainsBox(triangle, box));
+
+ /*
+ * Box contains polygon
+ * __________
+ * | |
+ * | /| |
+ * | / | |
+ * | /__| |
+ * |__________|
+ */
+ box = Box(-1, -1, 6);
+ ASSERT_FALSE(edgesIntersectsWithBox(triangle.points(), box));
+ ASSERT_TRUE(polygonIntersectsWithBox(triangle, box));
+ ASSERT_FALSE(polygonContainsBox(triangle, box));
+
+ /*
+ * Polygon contains box
+ * /|
+ * / |
+ * / |
+ * / []|
+ * /____|
+ */
+ box = Box(0.1, 0.1, 0.2);
+ ASSERT_FALSE(edgesIntersectsWithBox(triangle.points(), box));
+ ASSERT_TRUE(polygonIntersectsWithBox(triangle, box));
+ ASSERT_TRUE(polygonContainsBox(triangle, box));
+
+ /*
+ * Intersect, but no vertex is contained by the other shape.
+ * ___ /|_
+ * | / | |
+ * | / | |
+ * |_/___|_|
+ * /____|
+ */
+ box = Box(0, 1, 2);
+ ASSERT_TRUE(edgesIntersectsWithBox(triangle.points(), box));
+ ASSERT_TRUE(polygonIntersectsWithBox(triangle, box));
+ ASSERT_FALSE(polygonContainsBox(triangle, box));
+
+ // Concave polygon
+
+ /*
+ * (0,4)
+ * |\
+ * | \(1,1)
+ * | `.
+ * |____`. (4,0)
+ * (0,0)
+ */
+ vector<Point> concaveVetices;
+ concaveVetices.push_back(Point(0, 0));
+ concaveVetices.push_back(Point(4, 0));
+ concaveVetices.push_back(Point(1, 1));
+ concaveVetices.push_back(Point(0, 4));
+ Polygon concave(concaveVetices);
+
+ /*
+ * Disjoint
+ * |\
+ * | \
+ * | `.
+ * |____`.
+ * []
+ */
+ box = Box(1, -1, 0.9);
+ ASSERT_FALSE(edgesIntersectsWithBox(concave.points(), box));
+ ASSERT_FALSE(polygonIntersectsWithBox(concave, box));
+ ASSERT_FALSE(polygonContainsBox(concave, box));
+
+ /*
+ * Disjoint, bounds intersect
+ * |\
+ * | \[]
+ * | `.
+ * |____`.
+ */
+ box = Box(1.1, 1.1, 0.2);
+ ASSERT_FALSE(edgesIntersectsWithBox(concave.points(), box));
+ ASSERT_FALSE(polygonIntersectsWithBox(concave, box));
+ ASSERT_FALSE(polygonContainsBox(concave, box));
+
+ /*
+ * Intersect, one box vertex is contained by the polygon.
+ * |\
+ * |+\+ (1.5, 1.5)
+ * |+-`.
+ * |____`.
+ */
+ box = Box(0.5, 0.5, 1);
+ ASSERT_TRUE(edgesIntersectsWithBox(concave.points(), box));
+ ASSERT_TRUE(polygonIntersectsWithBox(concave, box));
+ ASSERT_FALSE(polygonContainsBox(concave, box));
+
+ /*
+ * Intersect, no vertex is contained by the other shape.
+ * |\
+ * +| \--+
+ * || `.|
+ * ||____`.
+ * +-----+
+ */
+ box = Box(-0.5, -0.5, 3);
+ ASSERT_TRUE(edgesIntersectsWithBox(concave.points(), box));
+ ASSERT_TRUE(polygonIntersectsWithBox(concave, box));
+ ASSERT_FALSE(polygonContainsBox(concave, box));
+}
+
+TEST(ShapeIntersection, Annulus) {
+ R2Annulus annulus(Point(0.0, 0.0), 1, 5);
+ Box box;
+
+ // Disjoint, out of outer circle
+ box = Box(4, 4, 1);
+ ASSERT_TRUE(annulus.fastDisjoint(box));
+ ASSERT_FALSE(annulus.fastContains(box));
+
+ // Box contains outer circle
+ box = Box(-6, -5.5, 12);
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_FALSE(annulus.fastContains(box));
+
+ // Box intersects with the outer circle, but not the inner circle
+ box = Box(3, 3, 4);
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_FALSE(annulus.fastContains(box));
+
+ // Box is contained by the annulus
+ box = Box(2, 2, 1);
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_TRUE(annulus.fastContains(box));
+
+ // Box is contained by the outer circle and intersects with the inner circle
+ box = Box(0.4, 0.5, 3);
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_FALSE(annulus.fastContains(box));
+
+ // Box intersects with both outer and inner circle
+ box = Box(-4, -4, 4.5);
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_FALSE(annulus.fastContains(box));
+
+ // Box is inside the inner circle
+ box = Box(-0.1, -0.2, 0.5);
+ ASSERT_TRUE(annulus.fastDisjoint(box));
+ ASSERT_FALSE(annulus.fastContains(box));
+
+ // Box contains the inner circle, but intersects with the outer circle
+ box = Box(-2, -2, 7);
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_FALSE(annulus.fastContains(box));
//
- // Shape Intersection
+ // Annulus contains both inner and outer circles as boundaries.
//
- TEST(ShapeIntersection, Lines) {
- /*
- * E |D
- * A___B |C G
- * F
- */
- Point a(0, 0), b(1, 0), c(2, 0), d(2, 1);
- Point e(0.5, 1), f(0.5, -0.5), g(3, 0);
-
- /*
- * Basic disjoint
- * / |
- * / |
- */
- ASSERT_FALSE(linesIntersect(a, d, c, b));
- ASSERT_FALSE(linesIntersect(c, b, a, d)); // commutative
-
- /*
- * Basic disjoint (axis aligned)
- * |
- * ___ |
- */
- ASSERT_FALSE(linesIntersect(a, b, c, d));
- ASSERT_FALSE(linesIntersect(c, d, a, b)); // commutative
-
- /*
- * Basic intersection
- * \/
- * /\
- */
- ASSERT_TRUE(linesIntersect(e, c, f, d));
- ASSERT_TRUE(linesIntersect(f, d, e, c)); // commutative
-
- /*
- * Basic intersection (axis aligned)
- * _|_
- * |
- */
- ASSERT_TRUE(linesIntersect(a, b, e, f));
- ASSERT_TRUE(linesIntersect(f, e, b, a)); // commutative
-
- /*
- * One vertex on the line
- * \
- * ____ \
- */
- ASSERT_FALSE(linesIntersect(a, b, e, c));
- ASSERT_FALSE(linesIntersect(e, c, a, b)); // commutative
-
- /*
- * One vertex on the segment
- * \
- * ___\___
- */
- ASSERT_TRUE(linesIntersect(a, c, b, e));
- ASSERT_TRUE(linesIntersect(e, b, a, c)); // commutative
-
- /*
- * Two segments share one vertex
- * /
- * /____
- */
- ASSERT_TRUE(linesIntersect(a, c, a, e));
- ASSERT_TRUE(linesIntersect(a, e, a, c)); // commutative
-
- /*
- * Intersected segments on the same line
- * A___B===C---G
- */
- ASSERT_TRUE(linesIntersect(a, c, b, g));
- ASSERT_TRUE(linesIntersect(b, g, c, a)); // commutative
-
- /*
- * Disjoint segments on the same line
- * A___B C---G
- */
- ASSERT_FALSE(linesIntersect(a, b, c, g));
- ASSERT_FALSE(linesIntersect(c, g, a, b)); // commutative
-
- /*
- * Segments on the same line share one vertex.
- * /D
- * /B
- * F/
- */
- ASSERT_TRUE(linesIntersect(d, b, b, f));
- ASSERT_TRUE(linesIntersect(f, b, d, b)); // commutative
- // axis aligned
- ASSERT_TRUE(linesIntersect(a, c, g, c));
- ASSERT_TRUE(linesIntersect(c, g, a, c)); // commutative
- }
-
- TEST(ShapeIntersection, Polygons) {
- // Convex polygon (triangle)
-
- /*
- * Disjoint, bounds disjoint
- * /|
- * / | []
- * /__|
- */
- vector<Point> triangleVetices;
- triangleVetices.push_back(Point(0, 0));
- triangleVetices.push_back(Point(1, 0));
- triangleVetices.push_back(Point(1, 4));
- Polygon triangle(triangleVetices);
- Box box;
-
- box = Box(1.5, 1.5, 1);
- ASSERT_FALSE(edgesIntersectsWithBox(triangle.points(), box));
- ASSERT_FALSE(polygonIntersectsWithBox(triangle, box));
- ASSERT_FALSE(polygonContainsBox(triangle, box));
-
- /*
- * Disjoint, bounds intersect
- * [] /|
- * / |
- * /__|
- */
- box = Box(-0.5, 3.5, 1);
- ASSERT_FALSE(edgesIntersectsWithBox(triangle.points(), box));
- ASSERT_FALSE(polygonIntersectsWithBox(triangle, box));
- ASSERT_FALSE(polygonContainsBox(triangle, box));
-
- /*
- * Intersect on one polygon vertex
- * _____
- * | |
- * |_ /|_|
- * / |
- * /__|
- */
- box = Box(0, 3, 2);
- ASSERT_TRUE(edgesIntersectsWithBox(triangle.points(), box));
- ASSERT_TRUE(polygonIntersectsWithBox(triangle, box));
- ASSERT_FALSE(polygonContainsBox(triangle, box));
-
- /*
- * Box contains polygon
- * __________
- * | |
- * | /| |
- * | / | |
- * | /__| |
- * |__________|
- */
- box = Box(-1, -1, 6);
- ASSERT_FALSE(edgesIntersectsWithBox(triangle.points(), box));
- ASSERT_TRUE(polygonIntersectsWithBox(triangle, box));
- ASSERT_FALSE(polygonContainsBox(triangle, box));
-
- /*
- * Polygon contains box
- * /|
- * / |
- * / |
- * / []|
- * /____|
- */
- box = Box(0.1, 0.1, 0.2);
- ASSERT_FALSE(edgesIntersectsWithBox(triangle.points(), box));
- ASSERT_TRUE(polygonIntersectsWithBox(triangle, box));
- ASSERT_TRUE(polygonContainsBox(triangle, box));
-
- /*
- * Intersect, but no vertex is contained by the other shape.
- * ___ /|_
- * | / | |
- * | / | |
- * |_/___|_|
- * /____|
- */
- box = Box(0, 1, 2);
- ASSERT_TRUE(edgesIntersectsWithBox(triangle.points(), box));
- ASSERT_TRUE(polygonIntersectsWithBox(triangle, box));
- ASSERT_FALSE(polygonContainsBox(triangle, box));
-
- // Concave polygon
-
- /*
- * (0,4)
- * |\
- * | \(1,1)
- * | `.
- * |____`. (4,0)
- * (0,0)
- */
- vector<Point> concaveVetices;
- concaveVetices.push_back(Point(0, 0));
- concaveVetices.push_back(Point(4, 0));
- concaveVetices.push_back(Point(1, 1));
- concaveVetices.push_back(Point(0, 4));
- Polygon concave(concaveVetices);
-
- /*
- * Disjoint
- * |\
- * | \
- * | `.
- * |____`.
- * []
- */
- box = Box(1, -1, 0.9);
- ASSERT_FALSE(edgesIntersectsWithBox(concave.points(), box));
- ASSERT_FALSE(polygonIntersectsWithBox(concave, box));
- ASSERT_FALSE(polygonContainsBox(concave, box));
-
- /*
- * Disjoint, bounds intersect
- * |\
- * | \[]
- * | `.
- * |____`.
- */
- box = Box(1.1, 1.1, 0.2);
- ASSERT_FALSE(edgesIntersectsWithBox(concave.points(), box));
- ASSERT_FALSE(polygonIntersectsWithBox(concave, box));
- ASSERT_FALSE(polygonContainsBox(concave, box));
-
- /*
- * Intersect, one box vertex is contained by the polygon.
- * |\
- * |+\+ (1.5, 1.5)
- * |+-`.
- * |____`.
- */
- box = Box(0.5, 0.5, 1);
- ASSERT_TRUE(edgesIntersectsWithBox(concave.points(), box));
- ASSERT_TRUE(polygonIntersectsWithBox(concave, box));
- ASSERT_FALSE(polygonContainsBox(concave, box));
-
- /*
- * Intersect, no vertex is contained by the other shape.
- * |\
- * +| \--+
- * || `.|
- * ||____`.
- * +-----+
- */
- box = Box(-0.5, -0.5, 3);
- ASSERT_TRUE(edgesIntersectsWithBox(concave.points(), box));
- ASSERT_TRUE(polygonIntersectsWithBox(concave, box));
- ASSERT_FALSE(polygonContainsBox(concave, box));
- }
-
- TEST(ShapeIntersection, Annulus) {
- R2Annulus annulus(Point(0.0, 0.0), 1, 5);
- Box box;
-
- // Disjoint, out of outer circle
- box = Box(4, 4, 1);
- ASSERT_TRUE(annulus.fastDisjoint(box));
- ASSERT_FALSE(annulus.fastContains(box));
-
- // Box contains outer circle
- box = Box(-6, -5.5, 12);
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_FALSE(annulus.fastContains(box));
-
- // Box intersects with the outer circle, but not the inner circle
- box = Box(3, 3, 4);
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_FALSE(annulus.fastContains(box));
-
- // Box is contained by the annulus
- box = Box(2, 2, 1);
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_TRUE(annulus.fastContains(box));
-
- // Box is contained by the outer circle and intersects with the inner circle
- box = Box(0.4, 0.5, 3);
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_FALSE(annulus.fastContains(box));
-
- // Box intersects with both outer and inner circle
- box = Box(-4, -4, 4.5);
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_FALSE(annulus.fastContains(box));
-
- // Box is inside the inner circle
- box = Box(-0.1, -0.2, 0.5);
- ASSERT_TRUE(annulus.fastDisjoint(box));
- ASSERT_FALSE(annulus.fastContains(box));
-
- // Box contains the inner circle, but intersects with the outer circle
- box = Box(-2, -2, 7);
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_FALSE(annulus.fastContains(box));
-
- //
- // Annulus contains both inner and outer circles as boundaries.
- //
-
- // Box only touches the outer boundary
- box = Box(3, 4, 1); // Lower left touches boundary
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_FALSE(annulus.fastContains(box));
- box = Box(-4, -5, 1); // Upper right touches boundary
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_FALSE(annulus.fastContains(box));
-
- // Box is contained by the annulus touching the outer boundary
- box = Box(-4, -3, 0.1);
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_TRUE(annulus.fastContains(box));
-
- // Box is contained by the annulus touching the inner boundary
- box = Box(0, 1, 1);
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_TRUE(annulus.fastContains(box));
-
- // Box only touches the inner boundary at (-0.6, 0.8)
- box = Box(-0.6, 0.3, 0.5);
- ASSERT_FALSE(annulus.fastDisjoint(box));
- ASSERT_FALSE(annulus.fastContains(box));
- }
-} // namespace
+ // Box only touches the outer boundary
+ box = Box(3, 4, 1); // Lower left touches boundary
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_FALSE(annulus.fastContains(box));
+ box = Box(-4, -5, 1); // Upper right touches boundary
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_FALSE(annulus.fastContains(box));
+
+ // Box is contained by the annulus touching the outer boundary
+ box = Box(-4, -3, 0.1);
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_TRUE(annulus.fastContains(box));
+
+ // Box is contained by the annulus touching the inner boundary
+ box = Box(0, 1, 1);
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_TRUE(annulus.fastContains(box));
+
+ // Box only touches the inner boundary at (-0.6, 0.8)
+ box = Box(-0.6, 0.3, 0.5);
+ ASSERT_FALSE(annulus.fastDisjoint(box));
+ ASSERT_FALSE(annulus.fastContains(box));
+}
+
+} // namespace
diff --git a/src/mongo/db/geo/s2.h b/src/mongo/db/geo/s2.h
index 426e9f0e5b5..7a3a1c6a840 100644
--- a/src/mongo/db/geo/s2.h
+++ b/src/mongo/db/geo/s2.h
@@ -42,4 +42,3 @@
#ifdef __clang__
#pragma GCC diagnostic pop
#endif
-
diff --git a/src/mongo/db/geo/shapes.cpp b/src/mongo/db/geo/shapes.cpp
index d87cb9334ce..fa6018877bb 100644
--- a/src/mongo/db/geo/shapes.cpp
+++ b/src/mongo/db/geo/shapes.cpp
@@ -39,767 +39,753 @@ namespace mongo {
////////////// Point
- Point::Point() : x(0), y(0) { }
+Point::Point() : x(0), y(0) {}
- Point::Point(double x, double y) : x(x), y(y) { }
+Point::Point(double x, double y) : x(x), y(y) {}
- Point::Point(const BSONElement& e) {
- BSONObjIterator i(e.Obj());
- x = i.next().number();
- y = i.next().number();
- }
+Point::Point(const BSONElement& e) {
+ BSONObjIterator i(e.Obj());
+ x = i.next().number();
+ y = i.next().number();
+}
- Point::Point(const BSONObj& o) {
- BSONObjIterator i(o);
- x = i.next().number();
- y = i.next().number();
- }
+Point::Point(const BSONObj& o) {
+ BSONObjIterator i(o);
+ x = i.next().number();
+ y = i.next().number();
+}
- string Point::toString() const {
- StringBuilder buf;
- buf << "(" << x << "," << y << ")";
- return buf.str();
- }
+string Point::toString() const {
+ StringBuilder buf;
+ buf << "(" << x << "," << y << ")";
+ return buf.str();
+}
////////////// Circle
- Circle::Circle() {}
- Circle::Circle(double radius, Point center) : radius(radius), center(center) {}
+Circle::Circle() {}
+Circle::Circle(double radius, Point center) : radius(radius), center(center) {}
////////////// Box
- Box::Box() {}
+Box::Box() {}
- Box::Box(double x, double y, double size) :
- _min(x, y), _max(x + size, y + size) {
- }
+Box::Box(double x, double y, double size) : _min(x, y), _max(x + size, y + size) {}
- Box::Box(const Point& ptA, const Point& ptB) {
- init(ptA, ptB);
- }
+Box::Box(const Point& ptA, const Point& ptB) {
+ init(ptA, ptB);
+}
- void Box::init(const Point& ptA, const Point& ptB) {
- _min.x = min(ptA.x, ptB.x);
- _min.y = min(ptA.y, ptB.y);
- _max.x = max(ptA.x, ptB.x);
- _max.y = max(ptA.y, ptB.y);
- }
+void Box::init(const Point& ptA, const Point& ptB) {
+ _min.x = min(ptA.x, ptB.x);
+ _min.y = min(ptA.y, ptB.y);
+ _max.x = max(ptA.x, ptB.x);
+ _max.y = max(ptA.y, ptB.y);
+}
- void Box::init(const Box& other) {
- init(other._min, other._max);
- }
+void Box::init(const Box& other) {
+ init(other._min, other._max);
+}
- BSONArray Box::toBSON() const {
- return BSON_ARRAY(BSON_ARRAY(_min.x << _min.y) << BSON_ARRAY(_max.x << _max.y));
- }
+BSONArray Box::toBSON() const {
+ return BSON_ARRAY(BSON_ARRAY(_min.x << _min.y) << BSON_ARRAY(_max.x << _max.y));
+}
- string Box::toString() const {
- StringBuilder buf;
- buf << _min.toString() << " -->> " << _max.toString();
- return buf.str();
- }
+string Box::toString() const {
+ StringBuilder buf;
+ buf << _min.toString() << " -->> " << _max.toString();
+ return buf.str();
+}
- bool Box::between(double min, double max, double val, double fudge) const {
- return val + fudge >= min && val <= max + fudge;
- }
+bool Box::between(double min, double max, double val, double fudge) const {
+ return val + fudge >= min && val <= max + fudge;
+}
- bool Box::onBoundary(double bound, double val, double fudge) const {
- return (val >= bound - fudge && val <= bound + fudge);
- }
+bool Box::onBoundary(double bound, double val, double fudge) const {
+ return (val >= bound - fudge && val <= bound + fudge);
+}
- bool Box::mid(double amin, double amax,
- double bmin, double bmax, bool min, double* res) const {
- verify(amin <= amax);
- verify(bmin <= bmax);
+bool Box::mid(double amin, double amax, double bmin, double bmax, bool min, double* res) const {
+ verify(amin <= amax);
+ verify(bmin <= bmax);
- if (amin < bmin) {
- if (amax < bmin)
- return false;
- *res = min ? bmin : amax;
- return true;
- }
- if (amin > bmax)
+ if (amin < bmin) {
+ if (amax < bmin)
return false;
- *res = min ? amin : bmax;
+ *res = min ? bmin : amax;
return true;
}
-
- bool Box::intersects(const Box& other) const {
-
- bool intersectX = between(_min.x, _max.x, other._min.x) // contain part of other range
- || between(_min.x, _max.x, other._max.x) // contain part of other range
- || between(other._min.x, other._max.x, _min.x); // other range contains us
-
- bool intersectY = between(_min.y, _max.y, other._min.y)
- || between(_min.y, _max.y, other._max.y)
- || between(other._min.y, other._max.y, _min.y);
-
- return intersectX && intersectY;
- }
-
- double Box::legacyIntersectFraction(const Box& other) const {
-
- Point boundMin(0,0);
- Point boundMax(0,0);
-
- if (!mid(_min.x, _max.x, other._min.x, other._max.x, true, &boundMin.x) ||
- !mid(_min.x, _max.x, other._min.x, other._max.x, false, &boundMax.x) ||
- !mid(_min.y, _max.y, other._min.y, other._max.y, true, &boundMin.y) ||
- !mid(_min.y, _max.y, other._min.y, other._max.y, false, &boundMax.y))
- return 0;
-
- Box intersection(boundMin, boundMax);
- return intersection.area() / area();
- }
-
- double Box::area() const {
- return (_max.x - _min.x) * (_max.y - _min.y);
- }
-
- double Box::maxDim() const {
- return max(_max.x - _min.x, _max.y - _min.y);
- }
-
- Point Box::center() const {
- return Point((_min.x + _max.x) / 2,
- (_min.y + _max.y) / 2);
- }
-
- void Box::truncate(double min, double max) {
- if (_min.x < min) _min.x = min;
- if (_min.y < min) _min.y = min;
- if (_max.x > max) _max.x = max;
- if (_max.y > max) _max.y = max;
- }
-
- void Box::fudge(double error) {
- _min.x -= error;
- _min.y -= error;
- _max.x += error;
- _max.y += error;
- }
-
- void Box::expandToInclude(const Point& pt) {
- _min.x = min(_min.x, pt.x);
- _min.y = min(_min.y, pt.y);
- _max.x = max(_max.x, pt.x);
- _max.y = max(_max.y, pt.y);
- }
-
- bool Box::onBoundary(Point p, double fudge) const {
- return onBoundary(_min.x, p.x, fudge) ||
- onBoundary(_max.x, p.x, fudge) ||
- onBoundary(_min.y, p.y, fudge) ||
- onBoundary(_max.y, p.y, fudge);
- }
-
- bool Box::inside(Point p, double fudge) const {
- bool res = inside(p.x, p.y, fudge);
- return res;
- }
-
- bool Box::inside(double x, double y, double fudge) const {
- return between(_min.x, _max.x , x, fudge) &&
- between(_min.y, _max.y , y, fudge);
- }
-
- bool Box::contains(const Box& other, double fudge) const {
- return inside(other._min, fudge) && inside(other._max, fudge);
- }
+ if (amin > bmax)
+ return false;
+ *res = min ? amin : bmax;
+ return true;
+}
+
+bool Box::intersects(const Box& other) const {
+ bool intersectX = between(_min.x, _max.x, other._min.x) // contain part of other range
+ || between(_min.x, _max.x, other._max.x) // contain part of other range
+ || between(other._min.x, other._max.x, _min.x); // other range contains us
+
+ bool intersectY = between(_min.y, _max.y, other._min.y) ||
+ between(_min.y, _max.y, other._max.y) || between(other._min.y, other._max.y, _min.y);
+
+ return intersectX && intersectY;
+}
+
+double Box::legacyIntersectFraction(const Box& other) const {
+ Point boundMin(0, 0);
+ Point boundMax(0, 0);
+
+ if (!mid(_min.x, _max.x, other._min.x, other._max.x, true, &boundMin.x) ||
+ !mid(_min.x, _max.x, other._min.x, other._max.x, false, &boundMax.x) ||
+ !mid(_min.y, _max.y, other._min.y, other._max.y, true, &boundMin.y) ||
+ !mid(_min.y, _max.y, other._min.y, other._max.y, false, &boundMax.y))
+ return 0;
+
+ Box intersection(boundMin, boundMax);
+ return intersection.area() / area();
+}
+
+double Box::area() const {
+ return (_max.x - _min.x) * (_max.y - _min.y);
+}
+
+double Box::maxDim() const {
+ return max(_max.x - _min.x, _max.y - _min.y);
+}
+
+Point Box::center() const {
+ return Point((_min.x + _max.x) / 2, (_min.y + _max.y) / 2);
+}
+
+void Box::truncate(double min, double max) {
+ if (_min.x < min)
+ _min.x = min;
+ if (_min.y < min)
+ _min.y = min;
+ if (_max.x > max)
+ _max.x = max;
+ if (_max.y > max)
+ _max.y = max;
+}
+
+void Box::fudge(double error) {
+ _min.x -= error;
+ _min.y -= error;
+ _max.x += error;
+ _max.y += error;
+}
+
+void Box::expandToInclude(const Point& pt) {
+ _min.x = min(_min.x, pt.x);
+ _min.y = min(_min.y, pt.y);
+ _max.x = max(_max.x, pt.x);
+ _max.y = max(_max.y, pt.y);
+}
+
+bool Box::onBoundary(Point p, double fudge) const {
+ return onBoundary(_min.x, p.x, fudge) || onBoundary(_max.x, p.x, fudge) ||
+ onBoundary(_min.y, p.y, fudge) || onBoundary(_max.y, p.y, fudge);
+}
+
+bool Box::inside(Point p, double fudge) const {
+ bool res = inside(p.x, p.y, fudge);
+ return res;
+}
+
+bool Box::inside(double x, double y, double fudge) const {
+ return between(_min.x, _max.x, x, fudge) && between(_min.y, _max.y, y, fudge);
+}
+
+bool Box::contains(const Box& other, double fudge) const {
+ return inside(other._min, fudge) && inside(other._max, fudge);
+}
////////////// Polygon
- Polygon::Polygon() {
- }
-
- Polygon::Polygon(const vector<Point>& points) {
- init(points);
- }
-
- void Polygon::init(const vector<Point>& points) {
-
- _points.clear();
- _bounds.reset();
- _centroid.reset();
-
- _points.insert(_points.begin(), points.begin(), points.end());
- }
-
- void Polygon::init(const Polygon& other) {
- init(other._points);
- }
-
- int Polygon::size(void) const { return _points.size(); }
+Polygon::Polygon() {}
+
+Polygon::Polygon(const vector<Point>& points) {
+ init(points);
+}
+
+void Polygon::init(const vector<Point>& points) {
+ _points.clear();
+ _bounds.reset();
+ _centroid.reset();
+
+ _points.insert(_points.begin(), points.begin(), points.end());
+}
+
+void Polygon::init(const Polygon& other) {
+ init(other._points);
+}
+
+int Polygon::size(void) const {
+ return _points.size();
+}
+
+bool Polygon::contains(const Point& p) const {
+ return contains(p, 0) > 0;
+}
+
+/*
+ * Return values:
+ * -1 if no intersection
+ * 0 if maybe an intersection (using fudge)
+ * 1 if there is an intersection
+ *
+ * A ray casting intersection method is used.
+ */
+int Polygon::contains(const Point& p, double fudge) const {
+ Box fudgeBox(Point(p.x - fudge, p.y - fudge), Point(p.x + fudge, p.y + fudge));
+
+ int counter = 0;
+ Point p1 = _points[0];
+ for (int i = 1; i <= size(); i++) {
+ // XXX: why is there a mod here?
+ Point p2 = _points[i % size()];
+
+ // We need to check whether or not this segment intersects our error box
+ if (fudge > 0 &&
+ // Points not too far below box
+ fudgeBox._min.y <= std::max(p1.y, p2.y) &&
+ // Points not too far above box
+ fudgeBox._max.y >= std::min(p1.y, p2.y) &&
+ // Points not too far to left of box
+ fudgeBox._min.x <= std::max(p1.x, p2.x) &&
+ // Points not too far to right of box
+ fudgeBox._max.x >= std::min(p1.x, p2.x)) {
+ // If our box contains one or more of these points, we need to do an exact
+ // check.
+ if (fudgeBox.inside(p1)) {
+ return 0;
+ }
+ if (fudgeBox.inside(p2)) {
+ return 0;
+ }
- bool Polygon::contains(const Point& p) const { return contains(p, 0) > 0; }
+ // Do intersection check for vertical sides
+ if (p1.y != p2.y) {
+ double invSlope = (p2.x - p1.x) / (p2.y - p1.y);
- /*
- * Return values:
- * -1 if no intersection
- * 0 if maybe an intersection (using fudge)
- * 1 if there is an intersection
- *
- * A ray casting intersection method is used.
- */
- int Polygon::contains(const Point &p, double fudge) const {
- Box fudgeBox(Point(p.x - fudge, p.y - fudge), Point(p.x + fudge, p.y + fudge));
-
- int counter = 0;
- Point p1 = _points[0];
- for (int i = 1; i <= size(); i++) {
- // XXX: why is there a mod here?
- Point p2 = _points[i % size()];
-
- // We need to check whether or not this segment intersects our error box
- if (fudge > 0 &&
- // Points not too far below box
- fudgeBox._min.y <= std::max(p1.y, p2.y) &&
- // Points not too far above box
- fudgeBox._max.y >= std::min(p1.y, p2.y) &&
- // Points not too far to left of box
- fudgeBox._min.x <= std::max(p1.x, p2.x) &&
- // Points not too far to right of box
- fudgeBox._max.x >= std::min(p1.x, p2.x)) {
-
-
- // If our box contains one or more of these points, we need to do an exact
- // check.
- if (fudgeBox.inside(p1)) {
+ double xintersT = (fudgeBox._max.y - p1.y) * invSlope + p1.x;
+ if (fudgeBox._min.x <= xintersT && fudgeBox._max.x >= xintersT) {
return 0;
}
- if (fudgeBox.inside(p2)) {
+
+ double xintersB = (fudgeBox._min.y - p1.y) * invSlope + p1.x;
+ if (fudgeBox._min.x <= xintersB && fudgeBox._max.x >= xintersB) {
return 0;
}
+ }
- // Do intersection check for vertical sides
- if (p1.y != p2.y) {
- double invSlope = (p2.x - p1.x) / (p2.y - p1.y);
+ // Do intersection check for horizontal sides
+ if (p1.x != p2.x) {
+ double slope = (p2.y - p1.y) / (p2.x - p1.x);
- double xintersT = (fudgeBox._max.y - p1.y) * invSlope + p1.x;
- if (fudgeBox._min.x <= xintersT && fudgeBox._max.x >= xintersT) {
- return 0;
- }
-
- double xintersB = (fudgeBox._min.y - p1.y) * invSlope + p1.x;
- if (fudgeBox._min.x <= xintersB && fudgeBox._max.x >= xintersB) {
- return 0;
- }
+ double yintersR = (p1.x - fudgeBox._max.x) * slope + p1.y;
+ if (fudgeBox._min.y <= yintersR && fudgeBox._max.y >= yintersR) {
+ return 0;
}
- // Do intersection check for horizontal sides
- if (p1.x != p2.x) {
- double slope = (p2.y - p1.y) / (p2.x - p1.x);
-
- double yintersR = (p1.x - fudgeBox._max.x) * slope + p1.y;
- if (fudgeBox._min.y <= yintersR && fudgeBox._max.y >= yintersR) {
- return 0;
- }
-
- double yintersL = (p1.x - fudgeBox._min.x) * slope + p1.y;
- if (fudgeBox._min.y <= yintersL && fudgeBox._max.y >= yintersL) {
- return 0;
- }
- }
- } else if (fudge == 0){
- // If this is an exact vertex, we won't intersect, so check this
- if (p.y == p1.y && p.x == p1.x) return 1;
- else if (p.y == p2.y && p.x == p2.x) return 1;
-
- // If this is a horizontal line we won't intersect, so check this
- if (p1.y == p2.y && p.y == p1.y){
- // Check that the x-coord lies in the line
- if (p.x >= std::min(p1.x, p2.x) && p.x <= std::max(p1.x, p2.x))
- return 1;
+ double yintersL = (p1.x - fudgeBox._min.x) * slope + p1.y;
+ if (fudgeBox._min.y <= yintersL && fudgeBox._max.y >= yintersL) {
+ return 0;
}
}
+ } else if (fudge == 0) {
+ // If this is an exact vertex, we won't intersect, so check this
+ if (p.y == p1.y && p.x == p1.x)
+ return 1;
+ else if (p.y == p2.y && p.x == p2.x)
+ return 1;
+
+ // If this is a horizontal line we won't intersect, so check this
+ if (p1.y == p2.y && p.y == p1.y) {
+ // Check that the x-coord lies in the line
+ if (p.x >= std::min(p1.x, p2.x) && p.x <= std::max(p1.x, p2.x))
+ return 1;
+ }
+ }
- // Normal intersection test.
- // TODO: Invert these for clearer logic?
- if (p.y > std::min(p1.y, p2.y)) {
- if (p.y <= std::max(p1.y, p2.y)) {
- if (p.x <= std::max(p1.x, p2.x)) {
- if (p1.y != p2.y) {
- double xinters = (p.y-p1.y)*(p2.x-p1.x)/(p2.y-p1.y)+p1.x;
- // Special case of point on vertical line
- if (p1.x == p2.x && p.x == p1.x){
-
- // Need special case for the vertical edges, for example:
- // 1) \e pe/----->
- // vs.
- // 2) \ep---e/----->
- //
- // if we count exact as intersection, then 1 is in but 2 is out
- // if we count exact as no-int then 1 is out but 2 is in.
-
- return 1;
- } else if (p1.x == p2.x || p.x <= xinters) {
- counter++;
- }
+ // Normal intersection test.
+ // TODO: Invert these for clearer logic?
+ if (p.y > std::min(p1.y, p2.y)) {
+ if (p.y <= std::max(p1.y, p2.y)) {
+ if (p.x <= std::max(p1.x, p2.x)) {
+ if (p1.y != p2.y) {
+ double xinters = (p.y - p1.y) * (p2.x - p1.x) / (p2.y - p1.y) + p1.x;
+ // Special case of point on vertical line
+ if (p1.x == p2.x && p.x == p1.x) {
+ // Need special case for the vertical edges, for example:
+ // 1) \e pe/----->
+ // vs.
+ // 2) \ep---e/----->
+ //
+ // if we count exact as intersection, then 1 is in but 2 is out
+ // if we count exact as no-int then 1 is out but 2 is in.
+
+ return 1;
+ } else if (p1.x == p2.x || p.x <= xinters) {
+ counter++;
}
}
}
}
-
- p1 = p2;
- }
-
- if (counter % 2 == 0) {
- return -1;
- } else {
- return 1;
- }
- }
-
- const Point& Polygon::centroid() const {
-
- if (_centroid) {
- return *_centroid;
- }
-
- _centroid.reset(new Point());
-
- double signedArea = 0.0;
- double area = 0.0; // Partial signed area
-
- /// For all vertices except last
- int i = 0;
- for (i = 0; i < size() - 1; ++i) {
- area = _points[i].x * _points[i+1].y - _points[i+1].x * _points[i].y ;
- signedArea += area;
- _centroid->x += (_points[i].x + _points[i+1].x) * area;
- _centroid->y += (_points[i].y + _points[i+1].y) * area;
}
- // Do last vertex
- area = _points[i].x * _points[0].y - _points[0].x * _points[i].y;
- _centroid->x += (_points[i].x + _points[0].x) * area;
- _centroid->y += (_points[i].y + _points[0].y) * area;
- signedArea += area;
- signedArea *= 0.5;
- _centroid->x /= (6 * signedArea);
- _centroid->y /= (6 * signedArea);
-
- return *_centroid;
+ p1 = p2;
}
- const Box& Polygon::bounds() const {
-
- if (_bounds) {
- return *_bounds;
- }
-
- _bounds.reset(new Box(_points[0], _points[0]));
-
- for (int i = 1; i < size(); i++) {
- _bounds->expandToInclude(_points[i]);
- }
-
- return *_bounds;
+ if (counter % 2 == 0) {
+ return -1;
+ } else {
+ return 1;
}
+}
- R2Annulus::R2Annulus() :
- _inner(0.0), _outer(0.0) {
+const Point& Polygon::centroid() const {
+ if (_centroid) {
+ return *_centroid;
}
- R2Annulus::R2Annulus(const Point& center, double inner, double outer) :
- _center(center), _inner(inner), _outer(outer) {
- }
+ _centroid.reset(new Point());
- const Point& R2Annulus::center() const {
- return _center;
- }
-
- double R2Annulus::getInner() const {
- return _inner;
- }
+ double signedArea = 0.0;
+ double area = 0.0; // Partial signed area
- double R2Annulus::getOuter() const {
- return _outer;
+ /// For all vertices except last
+ int i = 0;
+ for (i = 0; i < size() - 1; ++i) {
+ area = _points[i].x * _points[i + 1].y - _points[i + 1].x * _points[i].y;
+ signedArea += area;
+ _centroid->x += (_points[i].x + _points[i + 1].x) * area;
+ _centroid->y += (_points[i].y + _points[i + 1].y) * area;
}
- bool R2Annulus::contains(const Point& point) const {
+ // Do last vertex
+ area = _points[i].x * _points[0].y - _points[0].x * _points[i].y;
+ _centroid->x += (_points[i].x + _points[0].x) * area;
+ _centroid->y += (_points[i].y + _points[0].y) * area;
+ signedArea += area;
+ signedArea *= 0.5;
+ _centroid->x /= (6 * signedArea);
+ _centroid->y /= (6 * signedArea);
- // See if we're inside the inner radius
- if (distanceCompare(point, _center, _inner) < 0) {
- return false;
- }
+ return *_centroid;
+}
- // See if we're outside the outer radius
- if (distanceCompare(point, _center, _outer) > 0) {
- return false;
- }
-
- return true;
+const Box& Polygon::bounds() const {
+ if (_bounds) {
+ return *_bounds;
}
- Box R2Annulus::getR2Bounds() const {
- return Box(_center.x - _outer, _center.y - _outer, 2 * _outer); // Box(_min.x, _min.y, edgeLength)
- }
+ _bounds.reset(new Box(_points[0], _points[0]));
- bool R2Annulus::fastContains(const Box& other) const {
- return circleContainsBox(Circle(_outer, _center), other)
- && !circleInteriorIntersectsWithBox(Circle(_inner, _center), other);
+ for (int i = 1; i < size(); i++) {
+ _bounds->expandToInclude(_points[i]);
}
- bool R2Annulus::fastDisjoint(const Box& other) const {
- return !circleIntersectsWithBox(Circle(_outer, _center), other)
- || circleInteriorContainsBox(Circle(_inner, _center), other);
- }
+ return *_bounds;
+}
- string R2Annulus::toString() const {
- return str::stream() << "center: " << _center.toString() << " inner: " << _inner
- << " outer: " << _outer;
- }
+R2Annulus::R2Annulus() : _inner(0.0), _outer(0.0) {}
- /////// Other methods
+R2Annulus::R2Annulus(const Point& center, double inner, double outer)
+ : _center(center), _inner(inner), _outer(outer) {}
- double S2Distance::distanceRad(const S2Point& pointA, const S2Point& pointB) {
- S1Angle angle(pointA, pointB);
- return angle.radians();
- }
+const Point& R2Annulus::center() const {
+ return _center;
+}
- double S2Distance::minDistanceRad(const S2Point& point, const S2Polyline& line) {
- int tmp;
- S1Angle angle(point, line.Project(point, &tmp));
- return angle.radians();
- }
+double R2Annulus::getInner() const {
+ return _inner;
+}
- double S2Distance::minDistanceRad(const S2Point& point, const S2Polygon& polygon) {
- S1Angle angle(point, polygon.Project(point));
- return angle.radians();
- }
+double R2Annulus::getOuter() const {
+ return _outer;
+}
- double S2Distance::minDistanceRad(const S2Point& point, const S2Cap& cap) {
- S1Angle angleToCenter(point, cap.axis());
- return (angleToCenter - cap.angle()).radians();
+bool R2Annulus::contains(const Point& point) const {
+ // See if we're inside the inner radius
+ if (distanceCompare(point, _center, _inner) < 0) {
+ return false;
}
- /**
- * Distance method that compares x or y coords when other direction is zero,
- * avoids numerical error when distances are very close to radius but axis-aligned.
- *
- * An example of the problem is:
- * (52.0 - 51.9999) - 0.0001 = 3.31965e-15 and 52.0 - 51.9999 > 0.0001 in double arithmetic
- * but:
- * 51.9999 + 0.0001 <= 52.0
- *
- * This avoids some (but not all!) suprising results in $center queries where points are
- * (radius + center.x, center.y) or vice-versa.
- */
- bool distanceWithin(const Point &p1, const Point &p2, double radius) {
- return distanceCompare(p1, p2, radius) <= 0.0;
+ // See if we're outside the outer radius
+ if (distanceCompare(point, _center, _outer) > 0) {
+ return false;
}
- // Compare the distance between p1 and p2 with the radius.
- // Float-number comparison might be inaccurate.
- //
- // > 0: distance is greater than radius
- // = 0: distance equals radius
- // < 0: distance is less than radius
- double distanceCompare(const Point &p1, const Point &p2, double radius) {
- double a = p2.x - p1.x;
- double b = p2.y - p1.y;
-
- if (a == 0) {
- //
- // Note: For some, unknown reason, when a 32-bit g++ optimizes this call, the sum is
- // calculated imprecisely. We need to force the compiler to always evaluate it
- // correctly, hence the weirdness.
- //
- // On some 32-bit linux machines, removing the volatile keyword or calculating the sum
- // inline will make certain geo tests fail. Of course this check will force volatile
- // for all 32-bit systems, not just affected systems.
- if (sizeof(void*) <= 4){
- volatile double sum = p2.y > p1.y ? p1.y + radius : p2.y + radius;
- return p2.y > p1.y ? p2.y - sum : p1.y - sum;
- } else {
- // Original math, correct for most systems
- return p2.y > p1.y ? p2.y - (p1.y + radius) : p1.y - (p2.y + radius);
- }
- }
+ return true;
+}
- if (b == 0) {
- if (sizeof(void*) <= 4){
- volatile double sum = p2.x > p1.x ? p1.x + radius : p2.x + radius;
- return p2.x > p1.x ? p2.x - sum : p1.x - sum;
- } else {
- return p2.x > p1.x ? p2.x - (p1.x + radius) : p1.x - (p2.x + radius);
- }
- }
+Box R2Annulus::getR2Bounds() const {
+ return Box(
+ _center.x - _outer, _center.y - _outer, 2 * _outer); // Box(_min.x, _min.y, edgeLength)
+}
- return sqrt((a * a) + (b * b)) - radius;
- }
+bool R2Annulus::fastContains(const Box& other) const {
+ return circleContainsBox(Circle(_outer, _center), other) &&
+ !circleInteriorIntersectsWithBox(Circle(_inner, _center), other);
+}
- // note: multiply by earth radius for distance
- double spheredist_rad(const Point& p1, const Point& p2) {
- // this uses the n-vector formula: http://en.wikipedia.org/wiki/N-vector
- // If you try to match the code to the formula, note that I inline the cross-product.
-
- double sinx1(sin(p1.x)), cosx1(cos(p1.x));
- double siny1(sin(p1.y)), cosy1(cos(p1.y));
- double sinx2(sin(p2.x)), cosx2(cos(p2.x));
- double siny2(sin(p2.y)), cosy2(cos(p2.y));
-
- double cross_prod =
- (cosy1*cosx1 * cosy2*cosx2) +
- (cosy1*sinx1 * cosy2*sinx2) +
- (siny1 * siny2);
-
- if (cross_prod >= 1 || cross_prod <= -1) {
- // fun with floats
- verify(fabs(cross_prod)-1 < 1e-6);
- return cross_prod > 0 ? 0 : M_PI;
- }
+bool R2Annulus::fastDisjoint(const Box& other) const {
+ return !circleIntersectsWithBox(Circle(_outer, _center), other) ||
+ circleInteriorContainsBox(Circle(_inner, _center), other);
+}
- return acos(cross_prod);
- }
+string R2Annulus::toString() const {
+ return str::stream() << "center: " << _center.toString() << " inner: " << _inner
+ << " outer: " << _outer;
+}
- // @param p1 A point on the sphere where x and y are degrees.
- // @param p2 A point on the sphere where x and y are degrees.
- // @return The distance between the two points in RADIANS. Multiply by radius to get arc
- // length.
- double spheredist_deg(const Point& p1, const Point& p2) {
- return spheredist_rad(Point(deg2rad(p1.x), deg2rad(p1.y)),
- Point(deg2rad(p2.x), deg2rad(p2.y)));
- }
+/////// Other methods
- // Technically lat/long bounds, not really tied to earth radius.
- bool isValidLngLat(double lng, double lat) {
- return abs(lng) <= 180 && abs(lat) <= 90;
- }
+double S2Distance::distanceRad(const S2Point& pointA, const S2Point& pointB) {
+ S1Angle angle(pointA, pointB);
+ return angle.radians();
+}
- double distance(const Point& p1, const Point &p2) {
- double a = p1.x - p2.x;
- double b = p1.y - p2.y;
+double S2Distance::minDistanceRad(const S2Point& point, const S2Polyline& line) {
+ int tmp;
+ S1Angle angle(point, line.Project(point, &tmp));
+ return angle.radians();
+}
- // Avoid numerical error if possible...
- if (a == 0) return abs(b);
- if (b == 0) return abs(a);
+double S2Distance::minDistanceRad(const S2Point& point, const S2Polygon& polygon) {
+ S1Angle angle(point, polygon.Project(point));
+ return angle.radians();
+}
- return sqrt((a * a) + (b * b));
- }
+double S2Distance::minDistanceRad(const S2Point& point, const S2Cap& cap) {
+ S1Angle angleToCenter(point, cap.axis());
+ return (angleToCenter - cap.angle()).radians();
+}
- static inline Vector2_d toVector2(const Point& p) {
- return Vector2_d(p.x, p.y);
- }
-
- // Given a segment (A, B) and a segment (C, D), check whether they intersect.
- bool linesIntersect(const Point& pA, const Point& pB, const Point& pC, const Point& pD) {
- Vector2_d a = toVector2(pA);
- Vector2_d b = toVector2(pB);
- Vector2_d c = toVector2(pC);
- Vector2_d d = toVector2(pD);
-
- // The normal of line AB
- Vector2_d normalAB = (b - a).Ortho();
-
- // Dot products of AC and the normal of AB
- // = 0 : C is on the line AB
- // > 0 : C is on one side
- // < 0 : C is on the other side
- double dotProdNormalAB_AC = normalAB.DotProd(c - a);
- double dotProdNormalAB_AD = normalAB.DotProd(d - a);
-
- // C and D can not on the same side of line AB
- if (dotProdNormalAB_AC * dotProdNormalAB_AD > 0) return false;
-
- // AB and CD are on the same line
- if (dotProdNormalAB_AC == 0 && dotProdNormalAB_AD == 0) {
- // Test if C or D is on segment AB.
- return (c - a).DotProd(c - b) <= 0 || (d - a).DotProd(d - b) <= 0;
- }
-
- // Check if A and B are on different sides of line CD.
- Vector2_d normalCD = (d - c).Ortho();
- double dotProdNormalCD_CA = normalCD.DotProd(a - c);
- double dotProdNormalCD_CB = normalCD.DotProd(b - c);
- return dotProdNormalCD_CA * dotProdNormalCD_CB <= 0; // Perhaps A or B is on line CD
- }
-
- static bool circleContainsBoxInternal(const Circle& circle,
- const Box& box,
- bool includeCircleBoundary) {
-
- // NOTE: a circle of zero radius is a point, and there are NO points contained inside a
- // zero-radius circle, not even the point itself.
-
- const Point& a = box._min;
- const Point& b = box._max;
- double compareLL = distanceCompare( circle.center, a, circle.radius ); // Lower left
- double compareUR = distanceCompare( circle.center, b, circle.radius ); // Upper right
- // Upper Left
- double compareUL = distanceCompare( circle.center, Point( a.x, b.y ), circle.radius );
- // Lower right
- double compareLR = distanceCompare( circle.center, Point( b.x, a.y ), circle.radius );
- if ( includeCircleBoundary ) {
- return compareLL <= 0 && compareUR <= 0 && compareUL <= 0 && compareLR <= 0;
- }
- else {
- return compareLL < 0 && compareUR < 0 && compareUL < 0 && compareLR < 0;
+/**
+ * Distance method that compares x or y coords when other direction is zero,
+ * avoids numerical error when distances are very close to radius but axis-aligned.
+ *
+ * An example of the problem is:
+ * (52.0 - 51.9999) - 0.0001 = 3.31965e-15 and 52.0 - 51.9999 > 0.0001 in double arithmetic
+ * but:
+ * 51.9999 + 0.0001 <= 52.0
+ *
+ * This avoids some (but not all!) suprising results in $center queries where points are
+ * (radius + center.x, center.y) or vice-versa.
+ */
+bool distanceWithin(const Point& p1, const Point& p2, double radius) {
+ return distanceCompare(p1, p2, radius) <= 0.0;
+}
+
+// Compare the distance between p1 and p2 with the radius.
+// Float-number comparison might be inaccurate.
+//
+// > 0: distance is greater than radius
+// = 0: distance equals radius
+// < 0: distance is less than radius
+double distanceCompare(const Point& p1, const Point& p2, double radius) {
+ double a = p2.x - p1.x;
+ double b = p2.y - p1.y;
+
+ if (a == 0) {
+ //
+ // Note: For some, unknown reason, when a 32-bit g++ optimizes this call, the sum is
+ // calculated imprecisely. We need to force the compiler to always evaluate it
+ // correctly, hence the weirdness.
+ //
+ // On some 32-bit linux machines, removing the volatile keyword or calculating the sum
+ // inline will make certain geo tests fail. Of course this check will force volatile
+ // for all 32-bit systems, not just affected systems.
+ if (sizeof(void*) <= 4) {
+ volatile double sum = p2.y > p1.y ? p1.y + radius : p2.y + radius;
+ return p2.y > p1.y ? p2.y - sum : p1.y - sum;
+ } else {
+ // Original math, correct for most systems
+ return p2.y > p1.y ? p2.y - (p1.y + radius) : p1.y - (p2.y + radius);
}
}
- bool circleContainsBox(const Circle& circle, const Box& box) {
- return circleContainsBoxInternal(circle, box, true);
- }
-
- bool circleInteriorContainsBox(const Circle& circle, const Box& box) {
- return circleContainsBoxInternal(circle, box, false);
- }
-
- // Check the intersection by measuring the distance between circle center and box center.
- static bool circleIntersectsWithBoxInternal(const Circle& circle,
- const Box& box,
- bool includeCircleBoundary) {
-
- // NOTE: a circle of zero radius is a point, and there are NO points to intersect inside a
- // zero-radius circle, not even the point itself.
- if (circle.radius == 0.0 && !includeCircleBoundary)
- return false;
-
- /* Collapses the four quadrants down into one.
- * ________
- * r|___B___ \ <- a quarter round corner here. Let's name it "D".
- * | | |
- * h| | |
- * | A |C|
- * |_______|_|
- * w r
- */
-
- Point boxCenter = box.center();
- double dx = abs(circle.center.x - boxCenter.x);
- double dy = abs(circle.center.y - boxCenter.y);
- double w = (box._max.x - box._min.x) / 2;
- double h = (box._max.y - box._min.y) / 2;
- const double& r = circle.radius;
-
- // Check if circle.center is in A, B or C.
- // The circle center could be above the box (B) or right to the box (C), but close enough.
- if (includeCircleBoundary) {
- if ((dx <= w + r && dy <= h) || (dx <= w && dy <= h + r)) return true;
+ if (b == 0) {
+ if (sizeof(void*) <= 4) {
+ volatile double sum = p2.x > p1.x ? p1.x + radius : p2.x + radius;
+ return p2.x > p1.x ? p2.x - sum : p1.x - sum;
} else {
- if ((dx < w + r && dy < h) || (dx < w && dy < h + r)) return true;
+ return p2.x > p1.x ? p2.x - (p1.x + radius) : p1.x - (p2.x + radius);
}
-
- // Now check if circle.center is in the round corner "D".
- double compareResult = distanceCompare(Point(dx, dy), Point(w, h), r);
- return compareResult < 0 || (compareResult == 0 && includeCircleBoundary);
}
- bool circleIntersectsWithBox(const Circle& circle, const Box& box) {
- return circleIntersectsWithBoxInternal(circle, box, true);
- }
+ return sqrt((a * a) + (b * b)) - radius;
+}
+
+// note: multiply by earth radius for distance
+double spheredist_rad(const Point& p1, const Point& p2) {
+ // this uses the n-vector formula: http://en.wikipedia.org/wiki/N-vector
+ // If you try to match the code to the formula, note that I inline the cross-product.
+
+ double sinx1(sin(p1.x)), cosx1(cos(p1.x));
+ double siny1(sin(p1.y)), cosy1(cos(p1.y));
+ double sinx2(sin(p2.x)), cosx2(cos(p2.x));
+ double siny2(sin(p2.y)), cosy2(cos(p2.y));
+
+ double cross_prod =
+ (cosy1 * cosx1 * cosy2 * cosx2) + (cosy1 * sinx1 * cosy2 * sinx2) + (siny1 * siny2);
+
+ if (cross_prod >= 1 || cross_prod <= -1) {
+ // fun with floats
+ verify(fabs(cross_prod) - 1 < 1e-6);
+ return cross_prod > 0 ? 0 : M_PI;
+ }
+
+ return acos(cross_prod);
+}
+
+// @param p1 A point on the sphere where x and y are degrees.
+// @param p2 A point on the sphere where x and y are degrees.
+// @return The distance between the two points in RADIANS. Multiply by radius to get arc
+// length.
+double spheredist_deg(const Point& p1, const Point& p2) {
+ return spheredist_rad(Point(deg2rad(p1.x), deg2rad(p1.y)), Point(deg2rad(p2.x), deg2rad(p2.y)));
+}
+
+// Technically lat/long bounds, not really tied to earth radius.
+bool isValidLngLat(double lng, double lat) {
+ return abs(lng) <= 180 && abs(lat) <= 90;
+}
+
+double distance(const Point& p1, const Point& p2) {
+ double a = p1.x - p2.x;
+ double b = p1.y - p2.y;
+
+ // Avoid numerical error if possible...
+ if (a == 0)
+ return abs(b);
+ if (b == 0)
+ return abs(a);
+
+ return sqrt((a * a) + (b * b));
+}
+
+static inline Vector2_d toVector2(const Point& p) {
+ return Vector2_d(p.x, p.y);
+}
+
+// Given a segment (A, B) and a segment (C, D), check whether they intersect.
+bool linesIntersect(const Point& pA, const Point& pB, const Point& pC, const Point& pD) {
+ Vector2_d a = toVector2(pA);
+ Vector2_d b = toVector2(pB);
+ Vector2_d c = toVector2(pC);
+ Vector2_d d = toVector2(pD);
+
+ // The normal of line AB
+ Vector2_d normalAB = (b - a).Ortho();
+
+ // Dot products of AC and the normal of AB
+ // = 0 : C is on the line AB
+ // > 0 : C is on one side
+ // < 0 : C is on the other side
+ double dotProdNormalAB_AC = normalAB.DotProd(c - a);
+ double dotProdNormalAB_AD = normalAB.DotProd(d - a);
+
+ // C and D can not on the same side of line AB
+ if (dotProdNormalAB_AC * dotProdNormalAB_AD > 0)
+ return false;
+
+ // AB and CD are on the same line
+ if (dotProdNormalAB_AC == 0 && dotProdNormalAB_AD == 0) {
+ // Test if C or D is on segment AB.
+ return (c - a).DotProd(c - b) <= 0 || (d - a).DotProd(d - b) <= 0;
+ }
+
+ // Check if A and B are on different sides of line CD.
+ Vector2_d normalCD = (d - c).Ortho();
+ double dotProdNormalCD_CA = normalCD.DotProd(a - c);
+ double dotProdNormalCD_CB = normalCD.DotProd(b - c);
+ return dotProdNormalCD_CA * dotProdNormalCD_CB <= 0; // Perhaps A or B is on line CD
+}
+
+static bool circleContainsBoxInternal(const Circle& circle,
+ const Box& box,
+ bool includeCircleBoundary) {
+ // NOTE: a circle of zero radius is a point, and there are NO points contained inside a
+ // zero-radius circle, not even the point itself.
+
+ const Point& a = box._min;
+ const Point& b = box._max;
+ double compareLL = distanceCompare(circle.center, a, circle.radius); // Lower left
+ double compareUR = distanceCompare(circle.center, b, circle.radius); // Upper right
+ // Upper Left
+ double compareUL = distanceCompare(circle.center, Point(a.x, b.y), circle.radius);
+ // Lower right
+ double compareLR = distanceCompare(circle.center, Point(b.x, a.y), circle.radius);
+ if (includeCircleBoundary) {
+ return compareLL <= 0 && compareUR <= 0 && compareUL <= 0 && compareLR <= 0;
+ } else {
+ return compareLL < 0 && compareUR < 0 && compareUL < 0 && compareLR < 0;
+ }
+}
+
+bool circleContainsBox(const Circle& circle, const Box& box) {
+ return circleContainsBoxInternal(circle, box, true);
+}
+
+bool circleInteriorContainsBox(const Circle& circle, const Box& box) {
+ return circleContainsBoxInternal(circle, box, false);
+}
+
+// Check the intersection by measuring the distance between circle center and box center.
+static bool circleIntersectsWithBoxInternal(const Circle& circle,
+ const Box& box,
+ bool includeCircleBoundary) {
+ // NOTE: a circle of zero radius is a point, and there are NO points to intersect inside a
+ // zero-radius circle, not even the point itself.
+ if (circle.radius == 0.0 && !includeCircleBoundary)
+ return false;
+
+ /* Collapses the four quadrants down into one.
+ * ________
+ * r|___B___ \ <- a quarter round corner here. Let's name it "D".
+ * | | |
+ * h| | |
+ * | A |C|
+ * |_______|_|
+ * w r
+ */
- bool circleInteriorIntersectsWithBox(const Circle& circle, const Box& box) {
- return circleIntersectsWithBoxInternal(circle, box, false);
+ Point boxCenter = box.center();
+ double dx = abs(circle.center.x - boxCenter.x);
+ double dy = abs(circle.center.y - boxCenter.y);
+ double w = (box._max.x - box._min.x) / 2;
+ double h = (box._max.y - box._min.y) / 2;
+ const double& r = circle.radius;
+
+ // Check if circle.center is in A, B or C.
+ // The circle center could be above the box (B) or right to the box (C), but close enough.
+ if (includeCircleBoundary) {
+ if ((dx <= w + r && dy <= h) || (dx <= w && dy <= h + r))
+ return true;
+ } else {
+ if ((dx < w + r && dy < h) || (dx < w && dy < h + r))
+ return true;
}
- bool lineIntersectsWithBox(const Point& a, const Point& b, const Box& box) {
- Point upperLeft(box._min.x, box._max.y);
- Point lowerRight(box._max.x, box._min.y);
+ // Now check if circle.center is in the round corner "D".
+ double compareResult = distanceCompare(Point(dx, dy), Point(w, h), r);
+ return compareResult < 0 || (compareResult == 0 && includeCircleBoundary);
+}
- return linesIntersect(a, b, upperLeft, box._min)
- || linesIntersect(a, b, box._min, lowerRight)
- || linesIntersect(a, b, lowerRight, box._max)
- || linesIntersect(a, b, box._max, upperLeft);
- }
+bool circleIntersectsWithBox(const Circle& circle, const Box& box) {
+ return circleIntersectsWithBoxInternal(circle, box, true);
+}
- // Doc: The last point specified is always implicitly connected to the first.
- // [[ 0 , 0 ], [ 3 , 6 ], [ 6 , 0 ]]
- bool edgesIntersectsWithBox(const vector<Point>& vertices, const Box& box) {
- for (size_t i = 0; i < vertices.size() - 1; i++) {
- if (lineIntersectsWithBox(vertices[i], vertices[i+1], box)) return true;
- }
- // The last point and first point.
- return lineIntersectsWithBox(vertices[vertices.size() - 1], vertices[0], box);
- }
+bool circleInteriorIntersectsWithBox(const Circle& circle, const Box& box) {
+ return circleIntersectsWithBoxInternal(circle, box, false);
+}
- bool polygonContainsBox(const Polygon& polygon, const Box& box) {
- // All vertices of box have to be inside the polygon.
- if (!polygon.contains(box._min)
- || !polygon.contains(box._max)
- || !polygon.contains(Point(box._min.x, box._max.y))
- || !polygon.contains(Point(box._max.x, box._min.y)))
- return false;
+bool lineIntersectsWithBox(const Point& a, const Point& b, const Box& box) {
+ Point upperLeft(box._min.x, box._max.y);
+ Point lowerRight(box._max.x, box._min.y);
- // No intersection between the polygon edges and the box.
- return !edgesIntersectsWithBox(polygon.points(), box);
- }
+ return linesIntersect(a, b, upperLeft, box._min) ||
+ linesIntersect(a, b, box._min, lowerRight) || linesIntersect(a, b, lowerRight, box._max) ||
+ linesIntersect(a, b, box._max, upperLeft);
+}
- bool polygonIntersectsWithBox(const Polygon& polygon, const Box& box) {
- // 1. Polygon contains the box.
- // Check the relaxed condition that whether the polygon include any vertex of the box.
- if (polygon.contains(box._min)
- || polygon.contains(box._max)
- || polygon.contains(Point(box._min.x, box._max.y))
- || polygon.contains(Point(box._max.x, box._min.y)))
+// Doc: The last point specified is always implicitly connected to the first.
+// [[ 0 , 0 ], [ 3 , 6 ], [ 6 , 0 ]]
+bool edgesIntersectsWithBox(const vector<Point>& vertices, const Box& box) {
+ for (size_t i = 0; i < vertices.size() - 1; i++) {
+ if (lineIntersectsWithBox(vertices[i], vertices[i + 1], box))
return true;
-
- // 2. Box contains polygon.
- // Check the relaxed condition that whether the box include any vertex of the polygon.
- for (vector<Point>::const_iterator it = polygon.points().begin();
- it != polygon.points().end(); it++) {
- if (box.inside(*it)) return true;
- }
-
- // 3. Otherwise they intersect on a portion of both shapes.
- // Edges intersects
- return edgesIntersectsWithBox(polygon.points(), box);
}
+ // The last point and first point.
+ return lineIntersectsWithBox(vertices[vertices.size() - 1], vertices[0], box);
+}
+
+bool polygonContainsBox(const Polygon& polygon, const Box& box) {
+ // All vertices of box have to be inside the polygon.
+ if (!polygon.contains(box._min) || !polygon.contains(box._max) ||
+ !polygon.contains(Point(box._min.x, box._max.y)) ||
+ !polygon.contains(Point(box._max.x, box._min.y)))
+ return false;
+
+ // No intersection between the polygon edges and the box.
+ return !edgesIntersectsWithBox(polygon.points(), box);
+}
+
+bool polygonIntersectsWithBox(const Polygon& polygon, const Box& box) {
+ // 1. Polygon contains the box.
+ // Check the relaxed condition that whether the polygon include any vertex of the box.
+ if (polygon.contains(box._min) || polygon.contains(box._max) ||
+ polygon.contains(Point(box._min.x, box._max.y)) ||
+ polygon.contains(Point(box._max.x, box._min.y)))
+ return true;
- bool ShapeProjection::supportsProject(const PointWithCRS& point, const CRS crs) {
-
- // Can always trivially project or project from SPHERE->FLAT
- if (point.crs == crs || point.crs == SPHERE)
+ // 2. Box contains polygon.
+ // Check the relaxed condition that whether the box include any vertex of the polygon.
+ for (vector<Point>::const_iterator it = polygon.points().begin(); it != polygon.points().end();
+ it++) {
+ if (box.inside(*it))
return true;
-
- invariant(point.crs == FLAT);
- // If crs is FLAT, we might be able to upgrade the point to SPHERE if it's a valid SPHERE
- // point (lng/lat in bounds). In this case, we can use FLAT data with SPHERE predicates.
- return isValidLngLat(point.oldPoint.x, point.oldPoint.y);
}
- bool ShapeProjection::supportsProject(const PolygonWithCRS& polygon, const CRS crs) {
- return polygon.crs == crs
- || (polygon.crs == STRICT_SPHERE && crs == SPHERE);
- }
+ // 3. Otherwise they intersect on a portion of both shapes.
+ // Edges intersects
+ return edgesIntersectsWithBox(polygon.points(), box);
+}
- void ShapeProjection::projectInto(PointWithCRS* point, CRS crs) {
- dassert(supportsProject(*point, crs));
-
- if (point->crs == crs)
- return;
+bool ShapeProjection::supportsProject(const PointWithCRS& point, const CRS crs) {
+ // Can always trivially project or project from SPHERE->FLAT
+ if (point.crs == crs || point.crs == SPHERE)
+ return true;
- if (FLAT == point->crs) {
- // Prohibit projection to STRICT_SPHERE CRS
- invariant(SPHERE == crs);
+ invariant(point.crs == FLAT);
+ // If crs is FLAT, we might be able to upgrade the point to SPHERE if it's a valid SPHERE
+ // point (lng/lat in bounds). In this case, we can use FLAT data with SPHERE predicates.
+ return isValidLngLat(point.oldPoint.x, point.oldPoint.y);
+}
- // Note that it's (lat, lng) for S2 but (lng, lat) for MongoDB.
- S2LatLng latLng =
- S2LatLng::FromDegrees(point->oldPoint.y, point->oldPoint.x).Normalized();
- dassert(latLng.is_valid());
- point->point = latLng.ToPoint();
- point->cell = S2Cell(point->point);
- point->crs = SPHERE;
- return;
- }
+bool ShapeProjection::supportsProject(const PolygonWithCRS& polygon, const CRS crs) {
+ return polygon.crs == crs || (polygon.crs == STRICT_SPHERE && crs == SPHERE);
+}
- // Prohibit projection to STRICT_SPHERE CRS
- invariant(SPHERE == point->crs && FLAT == crs);
- // Just remove the additional spherical information
- point->point = S2Point();
- point->cell = S2Cell();
- point->crs = FLAT;
- }
+void ShapeProjection::projectInto(PointWithCRS* point, CRS crs) {
+ dassert(supportsProject(*point, crs));
- void ShapeProjection::projectInto(PolygonWithCRS* polygon, CRS crs) {
- if (polygon->crs == crs) return;
+ if (point->crs == crs)
+ return;
- // Only project from STRICT_SPHERE to SPHERE
- invariant(STRICT_SPHERE == polygon->crs && SPHERE == crs);
- polygon->crs = SPHERE;
- }
+ if (FLAT == point->crs) {
+ // Prohibit projection to STRICT_SPHERE CRS
+ invariant(SPHERE == crs);
+
+ // Note that it's (lat, lng) for S2 but (lng, lat) for MongoDB.
+ S2LatLng latLng = S2LatLng::FromDegrees(point->oldPoint.y, point->oldPoint.x).Normalized();
+ dassert(latLng.is_valid());
+ point->point = latLng.ToPoint();
+ point->cell = S2Cell(point->point);
+ point->crs = SPHERE;
+ return;
+ }
+
+ // Prohibit projection to STRICT_SPHERE CRS
+ invariant(SPHERE == point->crs && FLAT == crs);
+ // Just remove the additional spherical information
+ point->point = S2Point();
+ point->cell = S2Cell();
+ point->crs = FLAT;
+}
+
+void ShapeProjection::projectInto(PolygonWithCRS* polygon, CRS crs) {
+ if (polygon->crs == crs)
+ return;
+
+ // Only project from STRICT_SPHERE to SPHERE
+ invariant(STRICT_SPHERE == polygon->crs && SPHERE == crs);
+ polygon->crs = SPHERE;
+}
} // namespace mongo
diff --git a/src/mongo/db/geo/shapes.h b/src/mongo/db/geo/shapes.h
index 5eb2f8bceaa..3d8863ff964 100644
--- a/src/mongo/db/geo/shapes.h
+++ b/src/mongo/db/geo/shapes.h
@@ -43,325 +43,312 @@
#include "third_party/s2/s2polyline.h"
#ifndef M_PI
-# define M_PI 3.14159265358979323846
+#define M_PI 3.14159265358979323846
#endif
namespace mongo {
- struct Point;
- struct Circle;
- class Box;
- class Polygon;
+struct Point;
+struct Circle;
+class Box;
+class Polygon;
+
+inline double deg2rad(const double deg) {
+ return deg * (M_PI / 180.0);
+}
+
+inline double rad2deg(const double rad) {
+ return rad * (180.0 / M_PI);
+}
+
+inline double computeXScanDistance(double y, double maxDistDegrees) {
+ // TODO: this overestimates for large maxDistDegrees far from the equator
+ return maxDistDegrees / std::min(cos(deg2rad(std::min(+89.0, y + maxDistDegrees))),
+ cos(deg2rad(std::max(-89.0, y - maxDistDegrees))));
+}
+
+bool isValidLngLat(double lng, double lat);
+bool linesIntersect(const Point& pA, const Point& pB, const Point& pC, const Point& pD);
+bool circleContainsBox(const Circle& circle, const Box& box);
+bool circleInteriorContainsBox(const Circle& circle, const Box& box);
+bool circleIntersectsWithBox(const Circle& circle, const Box& box);
+bool circleInteriorIntersectsWithBox(const Circle& circle, const Box& box);
+bool edgesIntersectsWithBox(const std::vector<Point>& vertices, const Box& box);
+bool polygonContainsBox(const Polygon& polygon, const Box& box);
+bool polygonIntersectsWithBox(const Polygon& polygon, const Box& box);
- inline double deg2rad(const double deg) { return deg * (M_PI / 180.0); }
-
- inline double rad2deg(const double rad) { return rad * (180.0 / M_PI); }
-
- inline double computeXScanDistance(double y, double maxDistDegrees) {
- // TODO: this overestimates for large maxDistDegrees far from the equator
- return maxDistDegrees / std::min(cos(deg2rad(std::min(+89.0, y + maxDistDegrees))),
- cos(deg2rad(std::max(-89.0, y - maxDistDegrees))));
- }
+/**
+ * Distance utilities for R2 geometries
+ */
+double distance(const Point& p1, const Point& p2);
+bool distanceWithin(const Point& p1, const Point& p2, double radius);
+double distanceCompare(const Point& p1, const Point& p2, double radius);
+// Still needed for non-wrapping $nearSphere
+double spheredist_rad(const Point& p1, const Point& p2);
+double spheredist_deg(const Point& p1, const Point& p2);
- bool isValidLngLat(double lng, double lat);
- bool linesIntersect(const Point& pA, const Point& pB, const Point& pC, const Point& pD);
- bool circleContainsBox(const Circle& circle, const Box& box);
- bool circleInteriorContainsBox(const Circle& circle, const Box& box);
- bool circleIntersectsWithBox(const Circle& circle, const Box& box);
- bool circleInteriorIntersectsWithBox(const Circle& circle, const Box& box);
- bool edgesIntersectsWithBox(const std::vector<Point>& vertices, const Box& box);
- bool polygonContainsBox(const Polygon& polygon, const Box& box);
- bool polygonIntersectsWithBox(const Polygon& polygon, const Box& box);
- /**
- * Distance utilities for R2 geometries
+/**
+ * Distance utilities for S2 geometries
+ */
+struct S2Distance {
+ static double distanceRad(const S2Point& pointA, const S2Point& pointB);
+ static double minDistanceRad(const S2Point& point, const S2Polyline& line);
+ static double minDistanceRad(const S2Point& point, const S2Polygon& polygon);
+ static double minDistanceRad(const S2Point& point, const S2Cap& cap);
+};
+
+struct Point {
+ Point();
+ Point(double x, double y);
+ explicit Point(const BSONElement& e);
+ explicit Point(const BSONObj& o);
+ std::string toString() const;
+
+ double x;
+ double y;
+};
+
+struct Circle {
+ Circle();
+ Circle(double radius, Point center);
+
+ double radius;
+ Point center;
+};
+
+class Box {
+public:
+ Box();
+ Box(double x, double y, double size);
+ Box(const Point& ptA, const Point& ptB);
+
+ void init(const Point& ptA, const Point& ptB);
+ void init(const Box& other);
+
+ BSONArray toBSON() const;
+ std::string toString() const;
+
+ bool between(double min, double max, double val, double fudge = 0) const;
+ bool onBoundary(double bound, double val, double fudge = 0) const;
+ bool mid(double amin, double amax, double bmin, double bmax, bool min, double* res) const;
+
+ double area() const;
+ double maxDim() const;
+ Point center() const;
+
+ // NOTE: Box boundaries are *inclusive*
+ bool onBoundary(Point p, double fudge = 0) const;
+ bool inside(Point p, double fudge = 0) const;
+ bool inside(double x, double y, double fudge = 0) const;
+ bool contains(const Box& other, double fudge = 0) const;
+ bool intersects(const Box& other) const;
+
+ // Box modifications
+ void truncate(double min, double max);
+ void fudge(double error);
+ void expandToInclude(const Point& pt);
+
+ // TODO: Remove after 2D near dependency goes away
+ double legacyIntersectFraction(const Box& other) const;
+
+ Point _min;
+ Point _max;
+};
+
+class Polygon {
+public:
+ Polygon();
+ Polygon(const std::vector<Point>& points);
+
+ void init(const std::vector<Point>& points);
+ void init(const Polygon& other);
+
+ int size() const;
+
+ bool contains(const Point& p) const;
+
+ /*
+ * Return values:
+ * -1 if no intersection
+ * 0 if maybe an intersection (using fudge)
+ * 1 if there is an intersection
*/
- double distance(const Point& p1, const Point &p2);
- bool distanceWithin(const Point &p1, const Point &p2, double radius);
- double distanceCompare(const Point &p1, const Point &p2, double radius);
- // Still needed for non-wrapping $nearSphere
- double spheredist_rad(const Point& p1, const Point& p2);
- double spheredist_deg(const Point& p1, const Point& p2);
-
-
+ int contains(const Point& p, double fudge) const;
/**
- * Distance utilities for S2 geometries
+ * Get the centroid of the polygon object.
*/
- struct S2Distance {
-
- static double distanceRad(const S2Point& pointA, const S2Point& pointB);
- static double minDistanceRad(const S2Point& point, const S2Polyline& line);
- static double minDistanceRad(const S2Point& point, const S2Polygon& polygon);
- static double minDistanceRad(const S2Point& point, const S2Cap& cap);
-
- };
-
- struct Point {
- Point();
- Point(double x, double y);
- explicit Point(const BSONElement& e);
- explicit Point(const BSONObj& o);
- std::string toString() const;
-
- double x;
- double y;
- };
-
- struct Circle {
- Circle();
- Circle(double radius, Point center);
-
- double radius;
- Point center;
- };
-
- class Box {
- public:
-
- Box();
- Box(double x, double y, double size);
- Box(const Point& ptA, const Point& ptB);
-
- void init(const Point& ptA, const Point& ptB);
- void init(const Box& other);
-
- BSONArray toBSON() const;
- std::string toString() const;
-
- bool between(double min, double max, double val, double fudge = 0) const;
- bool onBoundary(double bound, double val, double fudge = 0) const;
- bool mid(double amin, double amax, double bmin, double bmax, bool min, double* res) const;
-
- double area() const;
- double maxDim() const;
- Point center() const;
-
- // NOTE: Box boundaries are *inclusive*
- bool onBoundary(Point p, double fudge = 0) const;
- bool inside(Point p, double fudge = 0) const;
- bool inside(double x, double y, double fudge = 0) const;
- bool contains(const Box& other, double fudge = 0) const;
- bool intersects(const Box& other) const;
-
- // Box modifications
- void truncate(double min, double max);
- void fudge(double error);
- void expandToInclude(const Point& pt);
-
- // TODO: Remove after 2D near dependency goes away
- double legacyIntersectFraction(const Box& other) const;
-
- Point _min;
- Point _max;
- };
-
- class Polygon {
- public:
-
- Polygon();
- Polygon(const std::vector<Point>& points);
-
- void init(const std::vector<Point>& points);
- void init(const Polygon& other);
-
- int size() const;
-
- bool contains(const Point& p) const;
-
- /*
- * Return values:
- * -1 if no intersection
- * 0 if maybe an intersection (using fudge)
- * 1 if there is an intersection
- */
- int contains(const Point &p, double fudge) const;
-
- /**
- * Get the centroid of the polygon object.
- */
- const Point& centroid() const;
- const Box& bounds() const;
- const std::vector<Point>& points() const { return _points; }
-
- private:
-
- // Only modified on creation and init()
- std::vector<Point> _points;
-
- // Cached attributes of the polygon
- mutable std::unique_ptr<Box> _bounds;
- mutable std::unique_ptr<Point> _centroid;
- };
-
- class R2Region {
- public:
-
- virtual ~R2Region() {
- }
-
- virtual Box getR2Bounds() const = 0;
-
- /**
- * Fast heuristic containment check
- *
- * Returns true if the region definitely contains the box.
- * Returns false if not or if too expensive to find out one way or another.
- */
- virtual bool fastContains(const Box& other) const = 0;
-
- /**
- * Fast heuristic disjoint check
- *
- * Returns true if the region definitely is disjoint from the box.
- * Returns false if not or if too expensive to find out one way or another.
- */
- virtual bool fastDisjoint(const Box& other) const = 0;
- };
-
- // Annulus is used by GeoNear. Both inner and outer circles are inlcuded.
- class R2Annulus : public R2Region {
- public:
-
- R2Annulus();
- R2Annulus(const Point& center, double inner, double outer);
-
- const Point& center() const;
-
- double getInner() const;
- double getOuter() const;
-
- bool contains(const Point& point) const;
-
- // R2Region interface
- Box getR2Bounds() const;
- bool fastContains(const Box& other) const;
- bool fastDisjoint(const Box& other) const;
-
- // For debugging
- std::string toString() const;
-
- private:
-
- Point _center;
- double _inner;
- double _outer;
- };
-
- // Clearly this isn't right but currently it's sufficient.
- enum CRS {
- UNSET,
- FLAT, // Equirectangular flat projection (i.e. trivial long/lat projection to flat map)
- SPHERE, // WGS84
- STRICT_SPHERE // WGS84 with strict winding order
- };
-
- // TODO: Make S2 less integral to these types - additional S2 shapes should be an optimization
- // when our CRS is not projected, i.e. SPHERE for now.
- // Generic shapes (Point, Line, Polygon) should hold the raw coordinate data - right now oldXXX
- // is a misnomer - this is the *original* data and the S2 transformation just an optimization.
-
- struct PointWithCRS {
-
- PointWithCRS() : crs(UNSET) {}
-
- S2Point point;
- S2Cell cell;
- Point oldPoint;
- CRS crs;
- };
-
- struct LineWithCRS {
-
- LineWithCRS() : crs(UNSET) {}
-
- S2Polyline line;
- CRS crs;
- };
-
- struct CapWithCRS {
-
- CapWithCRS() : crs(UNSET) {}
-
- S2Cap cap;
- Circle circle;
- CRS crs;
- };
-
- struct BoxWithCRS {
-
- BoxWithCRS() : crs(UNSET) {}
-
- Box box;
- CRS crs;
- };
-
- struct PolygonWithCRS {
-
- PolygonWithCRS() : crs(UNSET) {}
-
- std::unique_ptr<S2Polygon> s2Polygon;
- // Simple polygons with strict winding order may be bigger or smaller than a hemisphere.
- // Only used for query. We don't support storing/indexing big polygons.
- std::unique_ptr<BigSimplePolygon> bigPolygon;
-
- Polygon oldPolygon;
- CRS crs;
- };
-
- struct MultiPointWithCRS {
-
- MultiPointWithCRS() : crs(UNSET) {}
-
- std::vector<S2Point> points;
- std::vector<S2Cell> cells;
- CRS crs;
- };
+ const Point& centroid() const;
+ const Box& bounds() const;
+ const std::vector<Point>& points() const {
+ return _points;
+ }
- struct MultiLineWithCRS {
+private:
+ // Only modified on creation and init()
+ std::vector<Point> _points;
- MultiLineWithCRS() : crs(UNSET) {}
+ // Cached attributes of the polygon
+ mutable std::unique_ptr<Box> _bounds;
+ mutable std::unique_ptr<Point> _centroid;
+};
- OwnedPointerVector<S2Polyline> lines;
- CRS crs;
- };
+class R2Region {
+public:
+ virtual ~R2Region() {}
- struct MultiPolygonWithCRS {
+ virtual Box getR2Bounds() const = 0;
- MultiPolygonWithCRS() : crs(UNSET) {}
+ /**
+ * Fast heuristic containment check
+ *
+ * Returns true if the region definitely contains the box.
+ * Returns false if not or if too expensive to find out one way or another.
+ */
+ virtual bool fastContains(const Box& other) const = 0;
- OwnedPointerVector<S2Polygon> polygons;
- CRS crs;
- };
+ /**
+ * Fast heuristic disjoint check
+ *
+ * Returns true if the region definitely is disjoint from the box.
+ * Returns false if not or if too expensive to find out one way or another.
+ */
+ virtual bool fastDisjoint(const Box& other) const = 0;
+};
- struct GeometryCollection {
+// Annulus is used by GeoNear. Both inner and outer circles are inlcuded.
+class R2Annulus : public R2Region {
+public:
+ R2Annulus();
+ R2Annulus(const Point& center, double inner, double outer);
- std::vector<PointWithCRS> points;
+ const Point& center() const;
- // The amount of indirection here is painful but we can't operator= unique_ptr or
- // OwnedPointerVector.
- OwnedPointerVector<LineWithCRS> lines;
- OwnedPointerVector<PolygonWithCRS> polygons;
- OwnedPointerVector<MultiPointWithCRS> multiPoints;
- OwnedPointerVector<MultiLineWithCRS> multiLines;
- OwnedPointerVector<MultiPolygonWithCRS> multiPolygons;
+ double getInner() const;
+ double getOuter() const;
- bool supportsContains() {
- // Only polygons (and multiPolygons) support containment.
- return (polygons.vector().size() > 0 || multiPolygons.vector().size() > 0);
- }
- };
+ bool contains(const Point& point) const;
+
+ // R2Region interface
+ Box getR2Bounds() const;
+ bool fastContains(const Box& other) const;
+ bool fastDisjoint(const Box& other) const;
+
+ // For debugging
+ std::string toString() const;
+
+private:
+ Point _center;
+ double _inner;
+ double _outer;
+};
+
+// Clearly this isn't right but currently it's sufficient.
+enum CRS {
+ UNSET,
+ FLAT, // Equirectangular flat projection (i.e. trivial long/lat projection to flat map)
+ SPHERE, // WGS84
+ STRICT_SPHERE // WGS84 with strict winding order
+};
+
+// TODO: Make S2 less integral to these types - additional S2 shapes should be an optimization
+// when our CRS is not projected, i.e. SPHERE for now.
+// Generic shapes (Point, Line, Polygon) should hold the raw coordinate data - right now oldXXX
+// is a misnomer - this is the *original* data and the S2 transformation just an optimization.
+
+struct PointWithCRS {
+ PointWithCRS() : crs(UNSET) {}
+
+ S2Point point;
+ S2Cell cell;
+ Point oldPoint;
+ CRS crs;
+};
+
+struct LineWithCRS {
+ LineWithCRS() : crs(UNSET) {}
+
+ S2Polyline line;
+ CRS crs;
+};
+
+struct CapWithCRS {
+ CapWithCRS() : crs(UNSET) {}
+
+ S2Cap cap;
+ Circle circle;
+ CRS crs;
+};
+
+struct BoxWithCRS {
+ BoxWithCRS() : crs(UNSET) {}
+
+ Box box;
+ CRS crs;
+};
+
+struct PolygonWithCRS {
+ PolygonWithCRS() : crs(UNSET) {}
+
+ std::unique_ptr<S2Polygon> s2Polygon;
+ // Simple polygons with strict winding order may be bigger or smaller than a hemisphere.
+ // Only used for query. We don't support storing/indexing big polygons.
+ std::unique_ptr<BigSimplePolygon> bigPolygon;
+
+ Polygon oldPolygon;
+ CRS crs;
+};
+
+struct MultiPointWithCRS {
+ MultiPointWithCRS() : crs(UNSET) {}
+
+ std::vector<S2Point> points;
+ std::vector<S2Cell> cells;
+ CRS crs;
+};
+
+struct MultiLineWithCRS {
+ MultiLineWithCRS() : crs(UNSET) {}
+
+ OwnedPointerVector<S2Polyline> lines;
+ CRS crs;
+};
+
+struct MultiPolygonWithCRS {
+ MultiPolygonWithCRS() : crs(UNSET) {}
+
+ OwnedPointerVector<S2Polygon> polygons;
+ CRS crs;
+};
- //
- // Projection functions - we only project following types for now
- // - Point
- // - Polygon (from STRICT_SPHERE TO SPHERE)
- //
- struct ShapeProjection {
- static bool supportsProject(const PointWithCRS& point, const CRS crs);
- static bool supportsProject(const PolygonWithCRS& polygon, const CRS crs);
- static void projectInto(PointWithCRS* point, CRS crs);
- static void projectInto(PolygonWithCRS* point, CRS crs);
- };
+struct GeometryCollection {
+ std::vector<PointWithCRS> points;
+
+ // The amount of indirection here is painful but we can't operator= unique_ptr or
+ // OwnedPointerVector.
+ OwnedPointerVector<LineWithCRS> lines;
+ OwnedPointerVector<PolygonWithCRS> polygons;
+ OwnedPointerVector<MultiPointWithCRS> multiPoints;
+ OwnedPointerVector<MultiLineWithCRS> multiLines;
+ OwnedPointerVector<MultiPolygonWithCRS> multiPolygons;
+
+ bool supportsContains() {
+ // Only polygons (and multiPolygons) support containment.
+ return (polygons.vector().size() > 0 || multiPolygons.vector().size() > 0);
+ }
+};
+
+//
+// Projection functions - we only project following types for now
+// - Point
+// - Polygon (from STRICT_SPHERE TO SPHERE)
+//
+struct ShapeProjection {
+ static bool supportsProject(const PointWithCRS& point, const CRS crs);
+ static bool supportsProject(const PolygonWithCRS& polygon, const CRS crs);
+ static void projectInto(PointWithCRS* point, CRS crs);
+ static void projectInto(PolygonWithCRS* point, CRS crs);
+};
} // namespace mongo