summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHari Khalsa <hkhalsa@10gen.com>2013-10-01 22:43:48 -0400
committerHari Khalsa <hkhalsa@10gen.com>2013-10-04 23:27:44 -0400
commitab167c642f49206b4328882286cd5b83c19088bd (patch)
tree1f7c57c90a81cb53b8d37e96fe37ed1621036853
parentd09e608691aae000f3176b27cc67a7900229cd1e (diff)
downloadmongo-ab167c642f49206b4328882286cd5b83c19088bd.tar.gz
SERVER-10471 add s2near stage, enable all 2dsphere queries, enable 2d queries (just slow).
-rw-r--r--jstests/geo_mindistance.js14
-rw-r--r--jstests/geo_mindistance_boundaries.js41
-rw-r--r--jstests/geo_query_input_validation.js107
-rwxr-xr-xjstests/geo_s2index.js14
-rw-r--r--jstests/geo_s2largewithin.js3
-rw-r--r--jstests/geo_s2meridian.js7
-rw-r--r--jstests/geo_s2nearComplex.js15
-rw-r--r--jstests/geo_s2oddshapes.js16
-rw-r--r--jstests/geo_s2overlappingpolys.js40
-rwxr-xr-xjstests/geo_s2polywithholes.js4
-rw-r--r--jstests/geo_s2within.js10
-rw-r--r--jstests/geonear_cmd_input_validation.js7
-rw-r--r--src/mongo/db/exec/SConscript1
-rw-r--r--src/mongo/db/exec/index_scan.cpp7
-rw-r--r--src/mongo/db/exec/projection_executor.cpp8
-rw-r--r--src/mongo/db/exec/s2near.cpp292
-rw-r--r--src/mongo/db/exec/s2near.h141
-rw-r--r--src/mongo/db/geo/geonear.cpp8
-rw-r--r--src/mongo/db/geo/geoquery.cpp159
-rw-r--r--src/mongo/db/geo/geoquery.h51
-rw-r--r--src/mongo/db/geo/s2common.cpp18
-rw-r--r--src/mongo/db/geo/s2common.h4
-rw-r--r--src/mongo/db/index/expression_index.h107
-rw-r--r--src/mongo/db/index/s2_index_cursor.cpp11
-rw-r--r--src/mongo/db/index/s2_index_cursor.h1
-rw-r--r--src/mongo/db/index/s2_near_cursor.cpp3
-rw-r--r--src/mongo/db/index/s2_near_cursor.h1
-rw-r--r--src/mongo/db/index/s2_simple_cursor.cpp1
-rw-r--r--src/mongo/db/index/s2_simple_cursor.h1
-rw-r--r--src/mongo/db/matcher/expression.h4
-rw-r--r--src/mongo/db/matcher/expression_geo.cpp20
-rw-r--r--src/mongo/db/matcher/expression_geo.h10
-rw-r--r--src/mongo/db/matcher/expression_geo_test.cpp2
-rw-r--r--src/mongo/db/matcher/expression_parser.cpp32
-rw-r--r--src/mongo/db/matcher/expression_parser_geo.cpp21
-rw-r--r--src/mongo/db/query/canonical_query.cpp57
-rw-r--r--src/mongo/db/query/explain_plan.cpp4
-rw-r--r--src/mongo/db/query/index_bounds.cpp104
-rw-r--r--src/mongo/db/query/index_bounds.h3
-rw-r--r--src/mongo/db/query/index_bounds_builder.cpp43
-rw-r--r--src/mongo/db/query/index_bounds_builder.h5
-rw-r--r--src/mongo/db/query/index_tag.cpp10
-rw-r--r--src/mongo/db/query/interval.h21
-rw-r--r--src/mongo/db/query/new_find.cpp8
-rw-r--r--src/mongo/db/query/parsed_projection.h9
-rw-r--r--src/mongo/db/query/plan_enumerator.cpp27
-rw-r--r--src/mongo/db/query/projection_parser.cpp2
-rw-r--r--src/mongo/db/query/query_planner.cpp375
-rw-r--r--src/mongo/db/query/query_planner.h26
-rw-r--r--src/mongo/db/query/query_planner_test.cpp97
-rw-r--r--src/mongo/db/query/query_solution.cpp82
-rw-r--r--src/mongo/db/query/query_solution.h62
-rw-r--r--src/mongo/db/query/stage_builder.cpp33
-rw-r--r--src/mongo/db/query/stage_types.h8
54 files changed, 1650 insertions, 507 deletions
diff --git a/jstests/geo_mindistance.js b/jstests/geo_mindistance.js
index 16f76b9fa5a..5aa382ebb8d 100644
--- a/jstests/geo_mindistance.js
+++ b/jstests/geo_mindistance.js
@@ -47,7 +47,7 @@ for (var x = 0; x <= 10; x += 1) {
t.ensureIndex({loc: "2dsphere"});
var n_docs = t.count(),
- geoJSONPoint = {$geometry: {type: 'Point', coordinates: [0, 0]}},
+ geoJSONPoint = {type: 'Point', coordinates: [0, 0]},
legacyPoint = [0, 0];
//
@@ -56,8 +56,8 @@ var n_docs = t.count(),
//
var n_min1400_count = t.find({loc: {
- $near: geoJSONPoint, $minDistance: 1400 * km
-}}).count();
+ $near: {$geometry: geoJSONPoint, $minDistance: 1400 * km
+}}}).count();
assert.eq(
n_docs - n_docs_within(1400),
@@ -68,10 +68,10 @@ assert.eq(
);
var n_bw500_and_1000_count = t.find({loc: {
- $near: geoJSONPoint,
- $minDistance: 500 * km,
- $maxDistance: 1000 * km
-}}).count();
+ $near: {$geometry: geoJSONPoint,
+ $minDistance: 500 * km,
+ $maxDistance: 1000 * km
+}}}).count();
assert.eq(
n_docs_within(1000) - n_docs_within(500),
diff --git a/jstests/geo_mindistance_boundaries.js b/jstests/geo_mindistance_boundaries.js
index 2b6c26afb3f..80e933827b6 100644
--- a/jstests/geo_mindistance_boundaries.js
+++ b/jstests/geo_mindistance_boundaries.js
@@ -14,7 +14,7 @@ t.ensureIndex({loc: "2dsphere"});
var km = 1000,
earthRadiusMeters = 6378.1 * km,
- geoJSONPoint = {$geometry: {type: 'Point', coordinates: [0, 0]}},
+ geoJSONPoint = {type: 'Point', coordinates: [0, 0]},
// One degree of longitude at the equator, about 111 km.
degreeInMeters = 2 * Math.PI * earthRadiusMeters / 360,
metersEpsilon = Number.MIN_VALUE;
@@ -29,27 +29,32 @@ while (degreeInMeters + metersEpsilon == degreeInMeters) { metersEpsilon *= 2; }
// Test boundary conditions for $near and GeoJSON, in meters.
//
+
+// minDistance must be within the args to $near, not on the side.
+assert.throws(function() { t.find({loc:{$near:{$geometry: geoJSONPoint},
+ $minDistance:0.1}}).itcount();});
+
assert.eq(
1, t.find({loc: {
- $near: geoJSONPoint,
- $minDistance: degreeInMeters
- }}).count(),
+ $near: {$geometry: geoJSONPoint,
+ $minDistance: degreeInMeters
+ }}}).itcount(),
"Expected to find (0, 1) within $minDistance 1 degree from origin"
);
assert.eq(
1, t.find({loc: {
- $near: geoJSONPoint,
- $minDistance: degreeInMeters - metersEpsilon
- }}).count(),
+ $near: {$geometry: geoJSONPoint,
+ $minDistance: degreeInMeters - metersEpsilon
+ }}}).itcount(),
"Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
);
assert.eq(
0, t.find({loc: {
- $near: geoJSONPoint,
- $minDistance: degreeInMeters + metersEpsilon
- }}).count(),
+ $near: {$geometry: geoJSONPoint,
+ $minDistance: degreeInMeters + metersEpsilon
+ }}}).itcount(),
"Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
);
@@ -59,9 +64,9 @@ assert.eq(
assert.eq(
1, t.find({loc: {
- $nearSphere: geoJSONPoint,
- $minDistance: degreeInMeters
- }}).count(),
+ $nearSphere: {$geometry: geoJSONPoint,
+ $minDistance: degreeInMeters
+ }}}).itcount(),
"Expected to find (0, 1) within $minDistance 1 degree from origin"
);
@@ -69,7 +74,7 @@ assert.eq(
1, t.find({loc: {
$nearSphere: geoJSONPoint,
$minDistance: degreeInMeters - metersEpsilon
- }}).count(),
+ }}).itcount(),
"Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
);
@@ -77,7 +82,7 @@ assert.eq(
0, t.find({loc: {
$nearSphere: geoJSONPoint,
$minDistance: degreeInMeters + metersEpsilon
- }}).count(),
+ }}).itcount(),
"Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
);
@@ -98,7 +103,7 @@ assert.eq(
1, t.find({loc: {
$nearSphere: legacyPoint,
$minDistance: degreeInRadians
- }}).count(),
+ }}).itcount(),
"Expected to find (0, 1) within $minDistance 1 degree from origin"
);
@@ -106,7 +111,7 @@ assert.eq(
1, t.find({loc: {
$nearSphere: legacyPoint,
$minDistance: degreeInRadians - radiansEpsilon
- }}).count(),
+ }}).itcount(),
"Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
);
@@ -114,6 +119,6 @@ assert.eq(
0, t.find({loc: {
$nearSphere: legacyPoint,
$minDistance: degreeInRadians + radiansEpsilon
- }}).count(),
+ }}).itcount(),
"Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
);
diff --git a/jstests/geo_query_input_validation.js b/jstests/geo_query_input_validation.js
deleted file mode 100644
index 4eebe093794..00000000000
--- a/jstests/geo_query_input_validation.js
+++ /dev/null
@@ -1,107 +0,0 @@
-//
-// Test input validation for $near and $nearSphere queries.
-//
-var t = db.geo_query_input_validation;
-
-// The test matrix. Some combinations are not supported:
-// 2d index and $minDistance.
-// 2dsphere index, $near, and legacy coordinates.
-var indexTypes = ['2d', '2dsphere'],
- queryTypes = ['$near', '$nearSphere'],
- pointTypes = [
- {$geometry: {type: 'Point', coordinates: [0, 0]}},
- [0, 0]],
- optionNames = ['$minDistance', '$maxDistance'],
- badDistances = [-1, undefined, 'foo'];
-
-indexTypes.forEach(function(indexType) {
- t.drop();
- t.createIndex({'loc': indexType});
-
- queryTypes.forEach(function(queryType) {
- pointTypes.forEach(function(pointType) {
- optionNames.forEach(function(optionName) {
- var isLegacy = Array.isArray(pointType),
- pointDescription = (isLegacy ? "legacy coordinates" : "GeoJSON point");
-
- // Like {loc: {$nearSphere: [0, 0], $maxDistance: 1}}.
- var query = {};
- query[queryType] = pointType;
- query[optionName] = 1;
-
- var locQuery = {loc: query};
-
- if (indexType == '2d' && !isLegacy) {
- // Currently doesn't throw errors but also doesn't work as
- // expected: SERVER-10636. Stop processing this combination.
- return;
- }
-
- // Unsupported combinations should return errors.
- if (
- (indexType == '2d' && optionName == '$minDistance') ||
- (indexType == '2dsphere' && queryType == '$near' && isLegacy)
- ) {
- assert.throws(
- function() {
- t.find(locQuery).itcount();
- },
- [],
- queryType + " query with " + indexType
- + " index and " + pointDescription
- + " should've failed."
- );
-
- // Stop processing this combination in the test matrix.
- return;
- }
-
- // This is a supported combination. No error.
- t.find(locQuery).itcount();
-
- function makeQuery(distance) {
- // Like {$nearSphere: geoJSONPoint, $minDistance: -1}.
- var query = {};
- query[queryType] = pointType;
- query[optionName] = distance;
- return query;
- }
-
- function doQuery(query) {
- t.find({loc: query}).itcount();
- }
-
- // No error with $min/$maxDistance 1.
- doQuery(makeQuery(1));
-
- var outOfRangeDistances = [];
- if (indexType == '2d' && queryType == '$near') {
- // $maxDistance unlimited; no error.
- doQuery(makeQuery(1e10));
- } else if (isLegacy) {
- // Radians can't be more than pi.
- outOfRangeDistances.push(Math.PI + 0.1);
- } else {
- // $min/$maxDistance is in meters, so distances greater
- // than pi are ok, but not more than half earth's
- // circumference in meters.
- doQuery(makeQuery(Math.PI + 0.1));
-
- var earthRadiusMeters = 6378.1 * 1000;
- outOfRangeDistances.push(Math.PI * earthRadiusMeters + 100);
- }
-
- // Try several bad values for $min/$maxDistance.
- badDistances.concat(outOfRangeDistances).forEach(function(badDistance) {
-
- var msg = (
- queryType + " with " + pointDescription
- + " and " + indexType + " index should've failed with "
- + optionName + " " + badDistance);
-
- assert.throws(doQuery, [makeQuery(badDistance)], msg);
- });
- });
- });
- });
-});
diff --git a/jstests/geo_s2index.js b/jstests/geo_s2index.js
index c42bd3f1aaf..cabcea72d19 100755
--- a/jstests/geo_s2index.js
+++ b/jstests/geo_s2index.js
@@ -38,29 +38,29 @@ t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } )
assert(!db.getLastError())
res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointA} } });
-assert.eq(res.count(), 3);
+assert.eq(res.itcount(), 3);
res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointB} } });
-assert.eq(res.count(), 4);
+assert.eq(res.itcount(), 4);
res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointD} } });
-assert.eq(res.count(), 1);
+assert.eq(res.itcount(), 1);
res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : someline} } })
-assert.eq(res.count(), 5);
+assert.eq(res.itcount(), 5);
res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } })
-assert.eq(res.count(), 6);
+assert.eq(res.itcount(), 6);
res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.count(), 6);
+assert.eq(res.itcount(), 6);
res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } }).limit(1)
assert.eq(res.itcount(), 1);
res = t.find({ "nonGeo": "pointA",
"geo" : { "$geoIntersects" : { "$geometry" : somepoly} } })
-assert.eq(res.count(), 1);
+assert.eq(res.itcount(), 1);
// Don't crash mongod if we give it bad input.
t.drop()
diff --git a/jstests/geo_s2largewithin.js b/jstests/geo_s2largewithin.js
index 321e66352f7..2327f1fb02d 100644
--- a/jstests/geo_s2largewithin.js
+++ b/jstests/geo_s2largewithin.js
@@ -40,5 +40,6 @@ longPoly = {type: "Polygon",
]};
result = t.find({geo: {$geoWithin: {$geometry: longPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
+result = t.find({geo: {$geoWithin: {$geometry: longPoly}}});
assert.eq("origin", result[0].name)
diff --git a/jstests/geo_s2meridian.js b/jstests/geo_s2meridian.js
index 78582a13149..0d5b4b20e6d 100644
--- a/jstests/geo_s2meridian.js
+++ b/jstests/geo_s2meridian.js
@@ -29,7 +29,7 @@ lineAlongMeridian = {
}
result = t.find({geo: {$geoIntersects: {$geometry: lineAlongMeridian}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
t.drop();
t.ensureIndex({geo: "2dsphere"});
@@ -69,7 +69,7 @@ meridianCrossingPoly = {
};
result = t.find({geo: {$geoWithin: {$geometry: meridianCrossingPoly}}});
-assert.eq(result.count(), 3);
+assert.eq(result.itcount(), 3);
t.drop();
t.ensureIndex({geo: "2dsphere"});
@@ -103,6 +103,7 @@ pointOnPositiveSideOfMeridian = {
};
result = t.find({geo: {$geoNear: pointOnPositiveSideOfMeridian}});
-assert.eq(result.count(), 2);
+assert.eq(result.itcount(), 2);
+result = t.find({geo: {$geoNear: pointOnPositiveSideOfMeridian}});
assert.eq(result[0].name, "closer");
assert.eq(result[1].name, "farther");
diff --git a/jstests/geo_s2nearComplex.js b/jstests/geo_s2nearComplex.js
index 87dd034ca24..16a24d6db24 100644
--- a/jstests/geo_s2nearComplex.js
+++ b/jstests/geo_s2nearComplex.js
@@ -167,7 +167,7 @@ validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
print("Millis for uniform:")
print(t.find(query).explain().millis)
print("Total points:");
-print(t.find(query).count());
+print(t.find(query).itcount());
t.drop()
t.ensureIndex({geo: "2dsphere"})
@@ -179,7 +179,7 @@ validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
print("Millis for uniform with gaps:")
print(t.find(query).explain().millis)
print("Total points:");
-print(t.find(query).count());
+print(t.find(query).itcount());
t.drop()
t.ensureIndex({geo: "2dsphere"})
@@ -192,7 +192,7 @@ validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
print("Millis for uniform with clusters:");
print(t.find(query).explain().millis);
print("Total points:");
-print(t.find(query).count());
+print(t.find(query).itcount());
t.drop()
t.ensureIndex({geo: "2dsphere"})
@@ -211,7 +211,7 @@ validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
print("Millis for uniform near pole:")
print(t.find({geo: {$geoNear: {$geometry: originGeo}}}).explain().millis)
-assert.eq(t.find({geo: {$geoNear: {$geometry: originGeo}}}).count(), 50);
+assert.eq(t.find({geo: {$geoNear: {$geometry: originGeo}}}).itcount(), 50);
t.drop()
t.ensureIndex({geo: "2dsphere"})
@@ -228,7 +228,7 @@ validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
print("Millis for uniform on meridian:")
print(t.find({geo: {$near: {$geometry: originGeo}}}).explain().millis)
-assert.eq(t.find({geo: {$geoNear: {$geometry: originGeo}}}).count(), 50);
+assert.eq(t.find({geo: {$geoNear: {$geometry: originGeo}}}).itcount(), 50);
t.drop()
t.ensureIndex({geo: "2dsphere"})
@@ -245,7 +245,7 @@ validateOrdering({geo: {$near: {$geometry: originGeo}}})
print("Millis for uniform on negative meridian:");
print(t.find({geo: {$near: {$geometry: originGeo}}}).explain().millis);
-assert.eq(t.find({geo: {$near: {$geometry: originGeo}}}).count(), 50);
+assert.eq(t.find({geo: {$near: {$geometry: originGeo}}}).itcount(), 50);
// Near search with points that are really far away.
t.drop()
@@ -260,7 +260,8 @@ uniformPoints(origin, 10, 89, 90);
cur = t.find({geo: {$near: {$geometry: originGeo}}})
-assert.eq(cur.count(), 10);
+assert.eq(cur.itcount(), 10);
+cur = t.find({geo: {$near: {$geometry: originGeo}}})
print("Near search on very distant points:");
print(t.find({geo: {$near: {$geometry: originGeo}}}).explain().millis);
diff --git a/jstests/geo_s2oddshapes.js b/jstests/geo_s2oddshapes.js
index 0d825661106..99b5fe145af 100644
--- a/jstests/geo_s2oddshapes.js
+++ b/jstests/geo_s2oddshapes.js
@@ -41,13 +41,15 @@ var tallPoly = {type: "Polygon",
]};
//We expect that the testPoint (at the origin) will be within this poly.
var result = t.find({geo: {$within: {$geometry: tallPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
+var result = t.find({geo: {$within: {$geometry: tallPoly}}});
assert.eq(result[0].name, 'origin');
//We expect that the testPoint, and the testHorizLine should geoIntersect
//with this poly.
result = t.find({geo: {$geoIntersects: {$geometry: tallPoly}}});
-assert.eq(result.count(), 2);
+assert.eq(result.itcount(), 2);
+result = t.find({geo: {$geoIntersects: {$geometry: tallPoly}}});
assert.eq(result[0].name, 'horiz');
assert.eq(result[1].name, 'origin');
@@ -60,9 +62,9 @@ var longPoly = {type: "Polygon",
// Thanks to spherical geometry, this poly contains most of the hemisphere.
result = t.find({geo: {$within: {$geometry: longPoly}}});
-assert.eq(result.count(), 3);
+assert.eq(result.itcount(), 3);
result = t.find({geo: {$geoIntersects: {$geometry: longPoly}}});
-assert.eq(result.count(), 3);
+assert.eq(result.itcount(), 3);
//Test a poly that is the size of half the earth.
@@ -96,7 +98,8 @@ var largePoly = {type: "Polygon",
]};
result = t.find({geo: {$within: {$geometry: largePoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
+result = t.find({geo: {$within: {$geometry: largePoly}}});
var point = result[0]
assert.eq(point.name, 'inside');
@@ -130,7 +133,8 @@ smallPoly = {type: "Polygon",
]};
result = t.find({geo: {$within: {$geometry: smallPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
+result = t.find({geo: {$within: {$geometry: smallPoly}}});
point = result[0]
assert.eq(point.name, 'inside');
diff --git a/jstests/geo_s2overlappingpolys.js b/jstests/geo_s2overlappingpolys.js
index 197792a1f03..0d96222206c 100644
--- a/jstests/geo_s2overlappingpolys.js
+++ b/jstests/geo_s2overlappingpolys.js
@@ -19,9 +19,9 @@ var outerPoly = {type: "Polygon",
[[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0]]
]};
var result = t.find({geo: {$within: {$geometry: outerPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
result = t.find({geo: {$geoIntersects: {$geometry: outerPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
// Test 2: If a poly that covers half of the canonPoly, we expect that it should
@@ -34,11 +34,11 @@ var partialPoly = {type: "Polygon",
//Should not be within
result = t.find({geo: {$within: {$geometry: partialPoly}}});
-assert.eq(result.count(), 0);
+assert.eq(result.itcount(), 0);
//This should however count as a geoIntersect
result = t.find({geo: {$geoIntersects: {$geometry: partialPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
// Test 3: Polygons that intersect at a point or an edge have undefined
@@ -54,7 +54,7 @@ var sharedPointPoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: sharedPointPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
// Case (b): Polygons that intersect at one point (a vertex).
// behaviour: not geoIntersect
@@ -65,7 +65,7 @@ var sharedVertexPoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: sharedVertexPoly}}});
-assert.eq(result.count(), 0);
+assert.eq(result.itcount(), 0);
// Case (c): Polygons that intesersect at one point that is very close to a
// vertex should have the same behaviour as Case (b).
@@ -76,7 +76,7 @@ var almostSharedVertexPoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: almostSharedVertexPoly}}});
-assert.eq(result.count(), 0);
+assert.eq(result.itcount(), 0);
// Case (d): Polygons that intesersect at one point that is not quite as close
@@ -89,7 +89,7 @@ var notCloseEnoughSharedVertexPoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughSharedVertexPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
// Case (e): Polygons that come very close to having a point intersection
// on a non-vertex coordinate should intersect.
@@ -100,7 +100,7 @@ var almostSharedPointPoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: almostSharedPointPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
// Case (f): If we increase the error a little, it should no longer act
@@ -114,7 +114,7 @@ var notCloseEnoughSharedPointPoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughSharedPointPoly}}});
-assert.eq(result.count(), 0);
+assert.eq(result.itcount(), 0);
/* Test 3: Importantly, polygons with shared edges have undefined intersection
* under s2. Therefore these test serve more to make sure nothing changes than
@@ -130,7 +130,7 @@ var fullyCoveredEdgePoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: fullyCoveredEdgePoly}}});
-assert.eq(result.count(), 0);
+assert.eq(result.itcount(), 0);
// Case 2: A polygon who shares an edge with another polygon, where the searching
// polygon's edge fully covers the canon polygon's edge.
@@ -141,7 +141,7 @@ var coveringEdgePoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: coveringEdgePoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
// Case 2a: same as Case 2, except pulled slightly away from the polygon.
// Result: Intersection.
@@ -152,7 +152,7 @@ var closebyCoveringEdgePoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: closebyCoveringEdgePoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
// Case 2b: same as Case 4, except pulled slightly away from the polygon, so that it's not intersecting.
// Result: No Intersection.
@@ -163,7 +163,7 @@ var notCloseEnoughCoveringEdgePoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughCoveringEdgePoly}}});
-assert.eq(result.count(), 0);
+assert.eq(result.itcount(), 0);
// Case 3: A polygon who shares an edge with another polygon, where the searching
// polygon's edge partially covers by the canon polygon's edge.
@@ -174,7 +174,7 @@ var partiallyCoveringEdgePoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: partiallyCoveringEdgePoly}}});
-assert.eq(result.count(), 0);
+assert.eq(result.itcount(), 0);
//Polygons that intersect at three non-co-linear points should geoIntersect
@@ -184,7 +184,7 @@ var sharedPointsPoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: sharedPointsPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
//If a polygon contains a hole, and another polygon is within that hole, it should not be within or intersect.
@@ -194,9 +194,9 @@ var bigHolePoly = {type: "Polygon",
[[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0]]
]};
result = t.find({geo: {$within: {$geometry: bigHolePoly}}});
-assert.eq(result.count(), 0);
+assert.eq(result.itcount(), 0);
result = t.find({geo: {$geoIntersects: {$geometry: bigHolePoly}}});
-assert.eq(result.count(), 0);
+assert.eq(result.itcount(), 0);
// If a polygon has a hole, and another polygon is contained partially by that hole, it should be an intersection
// but not a within.
@@ -208,6 +208,6 @@ var internalOverlapPoly = {type: "Polygon",
]};
result = t.find({geo: {$geoIntersects: {$geometry: internalOverlapPoly}}});
-assert.eq(result.count(), 1);
+assert.eq(result.itcount(), 1);
result = t.find({geo: {$within: {$geometry: internalOverlapPoly}}});
-assert.eq(result.count(), 0);
+assert.eq(result.itcount(), 0);
diff --git a/jstests/geo_s2polywithholes.js b/jstests/geo_s2polywithholes.js
index 2b1e6ab437b..beb9932739f 100755
--- a/jstests/geo_s2polywithholes.js
+++ b/jstests/geo_s2polywithholes.js
@@ -17,7 +17,7 @@ var polygonWithNoHole = {"type" : "Polygon", "coordinates": [
// Test 1: Sanity check. Expect all three points.
var sanityResult = t.find({geo: {$within: {$geometry: polygonWithNoHole}}});
-assert.eq(sanityResult.count(), 3);
+assert.eq(sanityResult.itcount(), 3);
// Test 2: Polygon with a hole that isn't contained byt the poly shell.
var polygonWithProtrudingHole = {"type" : "Polygon", "coordinates": [
@@ -32,7 +32,7 @@ assert(db.getLastError());
// Can't search with bogus poly.
assert.throws(function() {
- return t.find({geo: {$within: {$geometry: polygonWithProtrudingHole}}}).count()
+ return t.find({geo: {$within: {$geometry: polygonWithProtrudingHole}}}).itcount()
})
// Test 3: This test will confirm that a polygon with overlapping holes throws
diff --git a/jstests/geo_s2within.js b/jstests/geo_s2within.js
index 491a0d94a03..87fd32a7676 100644
--- a/jstests/geo_s2within.js
+++ b/jstests/geo_s2within.js
@@ -11,7 +11,7 @@ t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [40.2, 5.
t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [42, 7]]}})
res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.count(), 1);
+assert.eq(res.itcount(), 1);
t.drop()
t.ensureIndex({geo: "2dsphere"})
@@ -21,16 +21,16 @@ somepoly = { "type" : "Polygon",
t.insert({geo:{ "type" : "Point", "coordinates": [ 40, 5 ] }})
res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.count(), 1);
+assert.eq(res.itcount(), 1);
// In the hole. Shouldn't find it.
t.insert({geo:{ "type" : "Point", "coordinates": [ 41.1, 6.1 ] }})
res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.count(), 1);
+assert.eq(res.itcount(), 1);
// Also in the hole.
t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.1, 6.1], [41.2, 6.2]]}})
res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.count(), 1);
+assert.eq(res.itcount(), 1);
// Half-hole, half-not. Shouldn't be $within.
t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.5, 6.5], [42.5, 7.5]]}})
res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.count(), 1);
+assert.eq(res.itcount(), 1);
diff --git a/jstests/geonear_cmd_input_validation.js b/jstests/geonear_cmd_input_validation.js
index 14e28432270..f9d392a6506 100644
--- a/jstests/geonear_cmd_input_validation.js
+++ b/jstests/geonear_cmd_input_validation.js
@@ -68,13 +68,6 @@ indexTypes.forEach(function(indexType) {
if (indexType == '2d') {
// maxDistance unlimited; no error.
db.runCommand(makeCommand(1e10));
- } else if (isLegacy) {
- // Radians can't be more than pi.
- outOfRangeDistances.push(Math.PI + 0.1);
- } else {
- // Meters can't be more than half circumference.
- var earthRadiusMeters = 6378.1 * 1000;
- outOfRangeDistances.push(Math.PI * earthRadiusMeters + 100);
}
// Try several bad values for min/maxDistance.
diff --git a/src/mongo/db/exec/SConscript b/src/mongo/db/exec/SConscript
index 96ab04eccea..423b4c9c655 100644
--- a/src/mongo/db/exec/SConscript
+++ b/src/mongo/db/exec/SConscript
@@ -45,6 +45,7 @@ env.StaticLibrary(
"or.cpp",
"projection.cpp",
"projection_executor.cpp",
+ "s2near.cpp",
"skip.cpp",
"sort.cpp",
"stagedebug_cmd.cpp",
diff --git a/src/mongo/db/exec/index_scan.cpp b/src/mongo/db/exec/index_scan.cpp
index 1305c861f46..e9b2e2eafbd 100644
--- a/src/mongo/db/exec/index_scan.cpp
+++ b/src/mongo/db/exec/index_scan.cpp
@@ -103,7 +103,12 @@ namespace mongo {
if (_params.bounds.isSimpleRange) {
// Start at one key, end at another.
- _indexCursor->seek(_params.bounds.startKey);
+ Status status = _indexCursor->seek(_params.bounds.startKey);
+ if (!status.isOK()) {
+ warning() << "Seek failed: " << status.toString();
+ _hitEnd = true;
+ return PlanStage::FAILURE;
+ }
}
else {
// XXX: must be actually a Btree
diff --git a/src/mongo/db/exec/projection_executor.cpp b/src/mongo/db/exec/projection_executor.cpp
index 9af491e6a58..13224d5f340 100644
--- a/src/mongo/db/exec/projection_executor.cpp
+++ b/src/mongo/db/exec/projection_executor.cpp
@@ -56,7 +56,11 @@ namespace mongo {
bob.append(elt);
}
- if (proj->_includedFields.size() > 0) {
+ // If _id is included it's inside _includedFields, and we want to ignore that when
+ // deciding whether to execute an inclusion or exclusion.
+ if (proj->_includedFields.size() > 0
+ || (proj->_includeID && proj->_includedFields.size() > 1)) {
+
// We only want stuff in _fields.
const vector<string>& fields = proj->_includedFields;
for (size_t i = 0; i < fields.size(); ++i) {
@@ -73,7 +77,7 @@ namespace mongo {
}
}
}
- else if (proj->_excludedFields.size() > 0) {
+ else {
// We want stuff NOT in _fields. This can't be covered, so we expect an obj.
if (!wsm->hasObj()) {
return Status(ErrorCodes::BadValue,
diff --git a/src/mongo/db/exec/s2near.cpp b/src/mongo/db/exec/s2near.cpp
new file mode 100644
index 00000000000..8f449f9e43a
--- /dev/null
+++ b/src/mongo/db/exec/s2near.cpp
@@ -0,0 +1,292 @@
+/**
+ * Copyright (C) 2013 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects for
+ * all of the code used other than as permitted herein. If you modify file(s)
+ * with this exception, you may extend this exception to your version of the
+ * file(s), but you are not obligated to do so. If you do not wish to do so,
+ * delete this exception statement from your version. If you delete this
+ * exception statement from all source files in the program, then also delete
+ * it in the license file.
+ */
+
+#include "mongo/db/exec/s2near.h"
+
+#include "mongo/db/exec/fetch.h"
+#include "mongo/db/exec/index_scan.h"
+#include "mongo/db/exec/working_set_common.h"
+#include "mongo/db/geo/geoconstants.h"
+#include "mongo/db/index/catalog_hack.h"
+#include "mongo/db/index/expression_index.h"
+#include "third_party/s2/s2regionintersection.h"
+
+namespace mongo {
+
+ S2NearStage::S2NearStage(const string& ns, const BSONObj& indexKeyPattern,
+ const NearQuery& nearQuery, const IndexBounds& baseBounds,
+ MatchExpression* filter, WorkingSet* ws) {
+ _ns = ns;
+ _ws = ws;
+ _indexKeyPattern = indexKeyPattern;
+ _nearQuery = nearQuery;
+ _baseBounds = baseBounds;
+ _filter = filter;
+ _worked = false;
+ _failed = false;
+
+ // The field we're near-ing from is the n-th field. Figure out what that 'n' is. We
+ // put the cover for the search annulus in this spot in the bounds.
+ _nearFieldIndex = 0;
+ BSONObjIterator specIt(_indexKeyPattern);
+ while (specIt.more()) {
+ if (specIt.next().fieldName() == _nearQuery.field) {
+ break;
+ }
+ ++_nearFieldIndex;
+ }
+
+ verify(_nearFieldIndex < _indexKeyPattern.nFields());
+
+ // FLAT implies the distances are in radians. Convert to meters.
+ if (FLAT == _nearQuery.centroid.crs) {
+ _nearQuery.minDistance *= kRadiusOfEarthInMeters;
+ _nearQuery.maxDistance *= kRadiusOfEarthInMeters;
+ }
+
+ // Make sure distances are sane. Possibly redundant given the checking during parsing.
+ _minDistance = max(0.0, _nearQuery.minDistance);
+ _maxDistance = min(M_PI * kRadiusOfEarthInMeters, _nearQuery.maxDistance);
+ _minDistance = min(_minDistance, _maxDistance);
+
+ // We grow _outerRadius in nextAnnulus() below.
+ _innerRadius = _outerRadius = _minDistance;
+
+ // XXX: where do we grab finestIndexedLevel from really? idx descriptor?
+ int finestIndexedLevel = S2::kAvgEdge.GetClosestLevel(500.0 / kRadiusOfEarthInMeters);
+ // Start with a conservative _radiusIncrement. When we're done searching a shell we
+ // increment the two radii by this.
+ _radiusIncrement = 5 * S2::kAvgEdge.GetValue(finestIndexedLevel) * kRadiusOfEarthInMeters;
+ }
+
+ S2NearStage::~S2NearStage() {
+ // _annulus temporarily takes ownership of some member variables.
+ // Release them to avoid double-deleting _innerCap and _outerCap.
+ _annulus.Release(NULL);
+ }
+
+ PlanStage::StageState S2NearStage::work(WorkingSetID* out) {
+ if (_failed) { return PlanStage::FAILURE; }
+ if (isEOF()) { return PlanStage::IS_EOF; }
+ ++_commonStats.works;
+
+ // If we haven't opened up our very first ixscan+fetch children, do it. This is kind of
+ // heavy so we don't want to do it in the ctor.
+ if (!_worked) {
+ nextAnnulus();
+ _worked = true;
+ }
+
+ // If we're still reading results from the child, do that.
+ if (NULL != _child.get()) {
+ return addResultToQueue(out);
+ }
+
+ // Not reading results. Perhaps we're returning buffered results.
+ if (!_results.empty()) {
+ Result result = _results.top();
+ _results.pop();
+ *out = result.id;
+
+ // Remove from invalidation map.
+ WorkingSetMember* member = _ws->get(*out);
+ if (member->hasLoc()) {
+ unordered_map<DiskLoc, WorkingSetID, DiskLoc::Hasher>::iterator it = _invalidationMap.find(member->loc);
+ verify(_invalidationMap.end() != it);
+ _invalidationMap.erase(it);
+ }
+
+ ++_commonStats.advanced;
+ return PlanStage::ADVANCED;
+ }
+
+ // Not EOF, not reading results, not returning any buffered results. Look in the next shell
+ // for results.
+ nextAnnulus();
+ return PlanStage::NEED_TIME;
+ }
+
+ void S2NearStage::nextAnnulus() {
+ // Step 1: Grow the annulus.
+ _innerRadius = _outerRadius;
+ _outerRadius += _radiusIncrement;
+ _outerRadius = min(_outerRadius, _maxDistance);
+ verify(_innerRadius <= _outerRadius);
+
+ // We might have just grown our radius beyond anything reasonable.
+ if (isEOF()) { return; }
+
+ // Step 2: Fill out bounds for the ixscan we use.
+ _innerCap = S2Cap::FromAxisAngle(_nearQuery.centroid.point,
+ S1Angle::Radians(_innerRadius / kRadiusOfEarthInMeters));
+ _outerCap = S2Cap::FromAxisAngle(_nearQuery.centroid.point,
+ S1Angle::Radians(_outerRadius / kRadiusOfEarthInMeters));
+ _innerCap = _innerCap.Complement();
+
+ vector<S2Region*> regions;
+ regions.push_back(&_innerCap);
+ regions.push_back(&_outerCap);
+
+ _annulus.Release(NULL);
+ _annulus.Init(&regions);
+
+ _baseBounds.fields[_nearFieldIndex].intervals.clear();
+ ExpressionMapping::cover2dsphere(_annulus, &_baseBounds.fields[_nearFieldIndex]);
+
+ // Step 3: Actually create the ixscan.
+ // TODO: Cache params.
+ IndexScanParams params;
+ NamespaceDetails* nsd = nsdetails(_ns.c_str());
+ if (NULL == nsd) {
+ _failed = true;
+ return;
+ }
+ int idxNo = nsd->findIndexByKeyPattern(_indexKeyPattern);
+ if (-1 == idxNo) {
+ _failed = true;
+ return;
+ }
+ params.descriptor = CatalogHack::getDescriptor(nsd, idxNo);
+ params.bounds = _baseBounds;
+ params.direction = 1;
+ IndexScan* scan = new IndexScan(params, _ws, NULL);
+
+ // Owns 'scan'.
+ _child.reset(new FetchStage(_ws, scan, _filter));
+ }
+
+ PlanStage::StageState S2NearStage::addResultToQueue(WorkingSetID* out) {
+ PlanStage::StageState state = _child->work(out);
+
+ // All done reading from _child.
+ if (PlanStage::IS_EOF == state) {
+ _child.reset();
+
+ // Adjust the annulus size depending on how many results we got.
+ if (_results.empty()) {
+ _radiusIncrement *= 2;
+ } else if (_results.size() < 300) {
+ _radiusIncrement *= 2;
+ } else if (_results.size() > 600) {
+ _radiusIncrement /= 2;
+ }
+
+ // Make a new ixscan next time.
+ return PlanStage::NEED_TIME;
+ }
+
+ // Nothing to do unless we advance.
+ if (PlanStage::ADVANCED != state) { return state; }
+
+ // TODO Speed improvements:
+ //
+ // 0. Modify fetch to preserve key data and test for intersection w/annulus.
+ //
+ // 1. keep track of what we've seen in this scan and possibly ignore it.
+ //
+ // 2. keep track of results we've returned before and ignore them.
+
+ WorkingSetMember* member = _ws->get(*out);
+ // Must have an object in order to get geometry out of it.
+ verify(member->hasObj());
+
+ // Get all the fields with that name from the document.
+ BSONElementSet geom;
+ member->obj.getFieldsDotted(_nearQuery.field, geom, false);
+ if (geom.empty()) {return PlanStage::NEED_TIME; }
+
+ // Some value that any distance we can calculate will be less than.
+ double minDistance = numeric_limits<double>::max();
+ for (BSONElementSet::iterator git = geom.begin(); git != geom.end(); ++git) {
+ if (!git->isABSONObj()) { return PlanStage::FAILURE; }
+ BSONObj obj = git->Obj();
+
+ double distToObj;
+ if (S2SearchUtil::distanceBetween(_nearQuery.centroid.point, obj, &distToObj)) {
+ minDistance = min(distToObj, minDistance);
+ }
+ else {
+ warning() << "unknown geometry: " << obj.toString();
+ }
+ }
+
+ // If the distance to the doc satisfies our distance criteria,
+ if (minDistance >= _innerRadius && minDistance < _outerRadius) {
+ _results.push(Result(*out, minDistance));
+ if (member->hasLoc()) {
+ _invalidationMap[member->loc] = *out;
+ }
+ }
+
+ return PlanStage::NEED_TIME;
+ }
+
+ void S2NearStage::prepareToYield() {
+ if (NULL != _child.get()) {
+ _child->prepareToYield();
+ }
+ }
+
+ void S2NearStage::recoverFromYield() {
+ if (NULL != _child.get()) {
+ _child->recoverFromYield();
+ }
+ }
+
+ void S2NearStage::invalidate(const DiskLoc& dl) {
+ if (NULL != _child.get()) {
+ _child->invalidate(dl);
+ }
+
+ unordered_map<DiskLoc, WorkingSetID, DiskLoc::Hasher>::iterator it
+ = _invalidationMap.find(dl);
+
+ if (it != _invalidationMap.end()) {
+ WorkingSetMember* member = _ws->get(it->second);
+ verify(member->hasLoc());
+ WorkingSetCommon::fetchAndInvalidateLoc(member);
+ verify(!member->hasLoc());
+ _invalidationMap.erase(it);
+ }
+ }
+
+ bool S2NearStage::isEOF() {
+ if (!_worked) { return false; }
+ if (_failed) { return true; }
+ // We're only done if we exhaust the search space.
+ return _innerRadius >= _maxDistance;
+ }
+
+ PlanStageStats* S2NearStage::getStats() {
+ // TODO: must agg stats across child ixscan/fetches.
+ // TODO: we can do better than this, need own common stats.
+ _commonStats.isEOF = isEOF();
+ return new PlanStageStats(_commonStats, STAGE_GEO_NEAR_2DSPHERE);
+ }
+
+} // namespace mongo
diff --git a/src/mongo/db/exec/s2near.h b/src/mongo/db/exec/s2near.h
new file mode 100644
index 00000000000..358e0eda09f
--- /dev/null
+++ b/src/mongo/db/exec/s2near.h
@@ -0,0 +1,141 @@
+/**
+ * Copyright (C) 2013 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects for
+ * all of the code used other than as permitted herein. If you modify file(s)
+ * with this exception, you may extend this exception to your version of the
+ * file(s), but you are not obligated to do so. If you do not wish to do so,
+ * delete this exception statement from your version. If you delete this
+ * exception statement from all source files in the program, then also delete
+ * it in the license file.
+ */
+
+#pragma once
+
+#include <queue>
+
+#include "mongo/db/exec/plan_stage.h"
+#include "mongo/db/geo/geoquery.h"
+#include "mongo/db/geo/s2common.h"
+#include "mongo/db/jsobj.h"
+#include "mongo/db/matcher/expression.h"
+#include "mongo/db/query/index_bounds.h"
+#include "mongo/platform/unordered_set.h"
+#include "third_party/s2/s2cap.h"
+#include "third_party/s2/s2regionintersection.h"
+
+namespace mongo {
+
+ /**
+ * Executes a geoNear search. Is a leaf node. Output type is LOC_AND_UNOWNED_OBJ.
+ */
+ class S2NearStage : public PlanStage {
+ public:
+ /**
+ * Takes: index to scan over, MatchExpression with near point, other MatchExpressions for
+ * covered data,
+ */
+ S2NearStage(const string& ns, const BSONObj& indexKeyPattern,
+ const NearQuery& nearQuery, const IndexBounds& baseBounds,
+ MatchExpression* filter, WorkingSet* ws);
+
+ virtual ~S2NearStage();
+
+ StageState work(WorkingSetID* out);
+ bool isEOF();
+
+ void prepareToYield();
+ void recoverFromYield();
+ void invalidate(const DiskLoc& dl);
+
+ PlanStageStats* getStats();
+
+ private:
+ StageState addResultToQueue(WorkingSetID* out);
+ void nextAnnulus();
+
+ bool _worked;
+
+ WorkingSet* _ws;
+
+ string _ns;
+
+ BSONObj _indexKeyPattern;
+
+ // This is the "array index" of the key field that is the near field. We use this to do
+ // cheap is-this-doc-in-the-annulus testing. We also need to know where to stuff the index
+ // bounds for the various annuluses/annuli.
+ int _nearFieldIndex;
+
+ NearQuery _nearQuery;
+
+ IndexBounds _baseBounds;
+
+ scoped_ptr<PlanStage> _child;
+
+ // We don't check this ourselves; we let the sub-fetch deal w/it.
+ MatchExpression* _filter;
+
+ // The S2 machinery that represents the search annulus. We keep this around after bounds
+ // generation to check for intersection.
+ S2Cap _innerCap;
+ S2Cap _outerCap;
+ S2RegionIntersection _annulus;
+
+ // We use this to hold on to the results in an annulus. Results are sorted to have
+ // decreasing distance.
+ struct Result {
+ Result(WorkingSetID wsid, double dist) : id(wsid), distance(dist) { }
+
+ bool operator<(const Result& other) const {
+ // We want increasing distance, not decreasing, so we reverse the <.
+ return distance > other.distance;
+ }
+
+ WorkingSetID id;
+ double distance;
+ };
+
+ // We compute an annulus of results and cache it here.
+ priority_queue<Result> _results;
+
+ // For fast invalidation. Perhaps not worth it.
+ unordered_map<DiskLoc, WorkingSetID, DiskLoc::Hasher> _invalidationMap;
+
+ // Geo-related variables.
+ // At what min distance (arc length) do we start looking for results?
+ double _minDistance;
+ // What's the max distance (arc length) we're willing to look for results?
+ double _maxDistance;
+
+ // These radii define the annulus we're currently looking at.
+ double _innerRadius;
+ double _outerRadius;
+
+ // When we search the next annulus, what to adjust our radius by? Grows when we search an
+ // annulus and find no results.
+ double _radiusIncrement;
+
+ // Did we encounter an unrecoverable error?
+ bool _failed;
+
+ CommonStats _commonStats;
+ };
+
+} // namespace mongo
diff --git a/src/mongo/db/geo/geonear.cpp b/src/mongo/db/geo/geonear.cpp
index 91818276d89..9fc4e3b5d6d 100644
--- a/src/mongo/db/geo/geonear.cpp
+++ b/src/mongo/db/geo/geonear.cpp
@@ -35,6 +35,7 @@
#include "mongo/db/auth/privilege.h"
#include "mongo/db/commands.h"
#include "mongo/db/curop.h"
+#include "mongo/db/geo/geoconstants.h"
#include "mongo/db/geo/s2common.h"
#include "mongo/db/index_names.h"
#include "mongo/db/index/2d_index_cursor.h"
@@ -197,6 +198,11 @@ namespace mongo {
// we've geo-indexed any of the fields in it.
vector<GeoQuery> regions;
+ if (FLAT == nearQuery.centroid.crs) {
+ nearQuery.maxDistance *= kRadiusOfEarthInMeters;
+ nearQuery.minDistance *= kRadiusOfEarthInMeters;
+ }
+
nic->seek(parsedArgs.query, nearQuery, regions);
// We do pass in the query above, but it's just so we can possibly use it in our index
@@ -218,7 +224,7 @@ namespace mongo {
double dist = nic->currentDistance();
// If we got the distance in radians, output it in radians too.
- if (nearQuery.fromRadians) { dist /= params.radius; }
+ if (FLAT == nearQuery.centroid.crs) { dist /= params.radius; }
dist *= parsedArgs.distanceMultiplier;
totalDistance += dist;
if (dist > farthestDist) { farthestDist = dist; }
diff --git a/src/mongo/db/geo/geoquery.cpp b/src/mongo/db/geo/geoquery.cpp
index abf6e8e3647..295e13b3636 100644
--- a/src/mongo/db/geo/geoquery.cpp
+++ b/src/mongo/db/geo/geoquery.cpp
@@ -1,44 +1,39 @@
/**
-* Copyright (C) 2013 10gen Inc.
-*
-* This program is free software: you can redistribute it and/or modify
-* it under the terms of the GNU Affero General Public License, version 3,
-* as published by the Free Software Foundation.
-*
-* This program is distributed in the hope that it will be useful,
-* but WITHOUT ANY WARRANTY; without even the implied warranty of
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-* GNU Affero General Public License for more details.
-*
-* You should have received a copy of the GNU Affero General Public License
-* along with this program. If not, see <http://www.gnu.org/licenses/>.
-*
-* As a special exception, the copyright holders give permission to link the
-* code of portions of this program with the OpenSSL library under certain
-* conditions as described in each individual source file and distribute
-* linked combinations including the program with the OpenSSL library. You
-* must comply with the GNU Affero General Public License in all respects for
-* all of the code used other than as permitted herein. If you modify file(s)
-* with this exception, you may extend this exception to your version of the
-* file(s), but you are not obligated to do so. If you do not wish to do so,
-* delete this exception statement from your version. If you delete this
-* exception statement from all source files in the program, then also delete
-* it in the license file.
-*/
+ * Copyright (C) 2013 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects for
+ * all of the code used other than as permitted herein. If you modify file(s)
+ * with this exception, you may extend this exception to your version of the
+ * file(s), but you are not obligated to do so. If you do not wish to do so,
+ * delete this exception statement from your version. If you delete this
+ * exception statement from all source files in the program, then also delete
+ * it in the license file.
+ */
#include "mongo/db/geo/geoquery.h"
-#ifdef _WIN32
-#include <float.h>
-#define nextafter _nextafter
-#else
-#include <cmath> // nextafter
-#endif
-
#include "mongo/db/geo/geoconstants.h"
namespace mongo {
+ using mongoutils::str::equals;
+
bool NearQuery::parseFromGeoNear(const BSONObj &obj, double radius) {
if (obj["near"].eoo()) { return false; }
BSONObj nearObj = obj["near"].embeddedObject();
@@ -47,102 +42,79 @@ namespace mongo {
return false;
}
- // The CRS for the legacy points dictates that distances are in radians.
- fromRadians = (FLAT == centroid.crs);
-
if (!obj["minDistance"].eoo()) {
uassert(17035, "minDistance must be a number", obj["minDistance"].isNumber());
double distArg = obj["minDistance"].number();
uassert(16901, "minDistance must be non-negative", distArg >= 0.0);
- if (fromRadians) {
- minDistance = distArg * radius;
- } else {
- minDistance = distArg;
- }
+ minDistance = distArg;
}
if (!obj["maxDistance"].eoo()) {
uassert(17036, "maxDistance must be a number", obj["maxDistance"].isNumber());
double distArg = obj["maxDistance"].number();
uassert(16902, "maxDistance must be non-negative", distArg >= 0.0);
- if (fromRadians) {
- maxDistance = distArg * radius;
- } else {
- maxDistance = distArg;
- }
-
- uassert(17037, "maxDistance too large",
- maxDistance <= nextafter(M_PI * radius, DBL_MAX));
+ maxDistance = distArg;
}
+
return true;
}
- bool NearQuery::parseFrom(const BSONObj &obj) {
+ bool NearQuery::parseLegacyQuery(const BSONObj &obj) {
bool hasGeometry = false;
- bool hasMaxDistance = false;
// First, try legacy near, e.g.:
// t.find({ loc : { $nearSphere: [0,0], $minDistance: 1, $maxDistance: 3 }})
// t.find({ loc : { $nearSphere: [0,0] }})
// t.find({ loc : { $near: { someGeoJSONPoint}})
+ // t.find({ loc : { $geoNear: { someGeoJSONPoint}})
BSONObjIterator it(obj);
while (it.more()) {
BSONElement e = it.next();
- bool isNearSphere = mongoutils::str::equals(e.fieldName(), "$nearSphere");
- bool isMinDistance = mongoutils::str::equals(e.fieldName(), "$minDistance");
- bool isMaxDistance = mongoutils::str::equals(e.fieldName(), "$maxDistance");
- bool isNear = mongoutils::str::equals(e.fieldName(), "$near")
- || mongoutils::str::equals(e.fieldName(), "$geoNear");
- if (isNearSphere || isNear) {
+ if (equals(e.fieldName(), "$near") || equals(e.fieldName(), "$geoNear")
+ || equals(e.fieldName(), "$nearSphere")) {
if (!e.isABSONObj()) { return false; }
BSONObj embeddedObj = e.embeddedObject();
if (!GeoParser::isPoint(embeddedObj)) { continue; }
if (!GeoParser::parsePoint(embeddedObj, &centroid)) { return false; }
- if (isNearSphere) {
- fromRadians = (centroid.crs == FLAT);
- hasGeometry = true;
- } else if (isNear && (centroid.crs == SPHERE)) {
- // We don't accept $near : [oldstylepoint].
- hasGeometry = true;
- }
- } else if (isMinDistance) {
+ isNearSphere = equals(e.fieldName(), "$nearSphere");
+ hasGeometry = true;
+ } else if (equals(e.fieldName(), "$minDistance")) {
uassert(16893, "$minDistance must be a number", e.isNumber());
minDistance = e.Number();
uassert(16894, "$minDistance must be non-negative", minDistance >= 0.0);
- } else if (isMaxDistance) {
+ } else if (equals(e.fieldName(), "$maxDistance")) {
uassert(16895, "$maxDistance must be a number", e.isNumber());
maxDistance = e.Number();
uassert(16896, "$maxDistance must be non-negative", maxDistance >= 0.0);
- hasMaxDistance = true;
}
}
- // Add fudge to maxValidDistance so we don't throw when the provided maxDistance
- // is on the edge.
- double maxValidDistance = nextafter(fromRadians ?
- M_PI :
- kRadiusOfEarthInMeters * M_PI, DBL_MAX);
+ return hasGeometry;
+ }
- uassert(17038, "$minDistance too large", minDistance < maxValidDistance);
- uassert(17039, "$maxDistance too large",
- !hasMaxDistance || maxDistance <= maxValidDistance);
+ bool NearQuery::parseNewQuery(const BSONObj &obj) {
+ bool hasGeometry = false;
- if (hasGeometry) { return true; }
+ BSONObjIterator objIt(obj);
+ if (!objIt.more()) { return false; }
+ BSONElement e = objIt.next();
+ // Just one arg. to $geoNear.
+ if (objIt.more()) { return false; }
- // Next, try "new" near:
+ // Parse "new" near:
// t.find({"geo" : {"$near" : {"$geometry": pointA, $minDistance: 1, $maxDistance: 3}}})
- BSONElement e = obj.firstElement();
+ // t.find({"geo" : {"$geoNear" : {"$geometry": pointA, $minDistance: 1, $maxDistance: 3}}})
if (!e.isABSONObj()) { return false; }
BSONObj::MatchType matchType = static_cast<BSONObj::MatchType>(e.getGtLtOp());
if (BSONObj::opNEAR != matchType) { return false; }
- // Restart it.
- it = BSONObjIterator(e.embeddedObject());
+ // Iterate over the argument.
+ BSONObjIterator it(e.embeddedObject());
while (it.more()) {
BSONElement e = it.next();
- if (mongoutils::str::equals(e.fieldName(), "$geometry")) {
+ if (equals(e.fieldName(), "$geometry")) {
if (e.isABSONObj()) {
BSONObj embeddedObj = e.embeddedObject();
uassert(16885, "$near requires a point, given " + embeddedObj.toString(),
@@ -152,21 +124,32 @@ namespace mongo {
(SPHERE == centroid.crs));
hasGeometry = true;
}
- } else if (mongoutils::str::equals(e.fieldName(), "$minDistance")) {
+ } else if (equals(e.fieldName(), "$minDistance")) {
uassert(16897, "$minDistance must be a number", e.isNumber());
minDistance = e.Number();
uassert(16898, "$minDistance must be non-negative", minDistance >= 0.0);
- uassert(17084, "$minDistance too large", minDistance < maxValidDistance);
- } else if (mongoutils::str::equals(e.fieldName(), "$maxDistance")) {
+ } else if (equals(e.fieldName(), "$maxDistance")) {
uassert(16899, "$maxDistance must be a number", e.isNumber());
maxDistance = e.Number();
uassert(16900, "$maxDistance must be non-negative", maxDistance >= 0.0);
- uassert(16992, "$maxDistance too large", maxDistance <= maxValidDistance);
}
}
+
return hasGeometry;
}
+
+ bool NearQuery::parseFrom(const BSONObj &obj) {
+ if (parseLegacyQuery(obj)) { return true; }
+ // Clear out any half-baked data.
+ minDistance = 0;
+ isNearSphere = false;
+ maxDistance = std::numeric_limits<double>::max();
+ centroid = PointWithCRS();
+ // And try parsing new format.
+ return parseNewQuery(obj);
+ }
+
bool GeoQuery::parseLegacyQuery(const BSONObj &obj) {
// Legacy within parsing #1: t.find({ loc : [0,0] }) This is should be
// point-only. We tag it as intersect and limit $within to
@@ -275,6 +258,12 @@ namespace mongo {
|| NULL != _geometryCollection;
}
+ bool GeometryContainer::hasFlatRegion() const {
+ return (NULL != _polygon && _polygon->crs == FLAT)
+ || NULL != _cap
+ || NULL != _box;
+ }
+
bool GeoQuery::satisfiesPredicate(const GeometryContainer &otherContainer) const {
verify(predicate == WITHIN || predicate == INTERSECT);
diff --git a/src/mongo/db/geo/geoquery.h b/src/mongo/db/geo/geoquery.h
index a991769637a..ca570c72e53 100644
--- a/src/mongo/db/geo/geoquery.h
+++ b/src/mongo/db/geo/geoquery.h
@@ -60,6 +60,7 @@ namespace mongo {
bool supportsContains() const;
bool hasS2Region() const;
+ bool hasFlatRegion() const;
// Used by s2cursor only to generate a covering of the query object.
// One region is not NULL and this returns it.
@@ -96,33 +97,49 @@ namespace mongo {
shared_ptr<S2RegionUnion> _region;
};
+ // TODO: Make a struct, turn parse stuff into something like
+ // static Status parseNearQuery(const BSONObj& obj, NearQuery** out);
class NearQuery {
public:
- NearQuery() : minDistance(0), maxDistance(std::numeric_limits<double>::max()),
- fromRadians(false) {}
- NearQuery(const string& f) : field(f), minDistance(0),
- maxDistance(std::numeric_limits<double>::max()),
- fromRadians(false) {}
+ NearQuery()
+ : minDistance(0),
+ maxDistance(std::numeric_limits<double>::max()),
+ isNearSphere(false) { }
+
+ NearQuery(const string& f)
+ : field(f),
+ minDistance(0),
+ maxDistance(std::numeric_limits<double>::max()),
+ isNearSphere(false) { }
- /**
- * If fromRadians is true after a parseFrom, minDistance and maxDistance are returned in
- * radians, not meters. The distances must be multiplied by the underlying index's radius
- * to convert them to meters.
- *
- * This is annoying but useful when we don't know what index we're using at parse time.
- */
bool parseFrom(const BSONObj &obj);
bool parseFromGeoNear(const BSONObj &obj, double radius);
+ // The name of the field that contains the geometry.
string field;
+
+ // The starting point of the near search.
PointWithCRS centroid;
- // Min and max distance IN METERS from centroid that we're willing to search.
+ // Min and max distance from centroid that we're willing to search.
+ // Distance is in whatever units the centroid's CRS implies.
+ // If centroid.crs == FLAT these are radians.
+ // If centroid.crs == SPHERE these are meters.
double minDistance;
double maxDistance;
- // Did we convert to this distance from radians? (If so, we output distances in radians.)
- bool fromRadians;
+ // It's either $near or $nearSphere.
+ bool isNearSphere;
+
+ string toString() const {
+ stringstream ss;
+ ss << " field=" << field;
+ return ss.str();
+ }
+
+ private:
+ bool parseLegacyQuery(const BSONObj &obj);
+ bool parseNewQuery(const BSONObj &obj);
};
// This represents either a $within or a $geoIntersects.
@@ -143,6 +160,10 @@ namespace mongo {
bool hasS2Region() const;
const S2Region& getRegion() const;
string getField() const { return field; }
+
+ Predicate getPred() const { return predicate; }
+ const GeometryContainer& getGeometry() const { return geoContainer; }
+
private:
// Try to parse the provided object into the right place.
bool parseLegacyQuery(const BSONObj &obj);
diff --git a/src/mongo/db/geo/s2common.cpp b/src/mongo/db/geo/s2common.cpp
index fcc51765106..2695c16f629 100644
--- a/src/mongo/db/geo/s2common.cpp
+++ b/src/mongo/db/geo/s2common.cpp
@@ -28,6 +28,7 @@
#include "mongo/db/geo/s2common.h"
+#include "mongo/db/geo/geoconstants.h"
#include "mongo/db/geo/geoparser.h"
#include "mongo/db/geo/geoquery.h"
#include "third_party/s2/s2.h"
@@ -105,8 +106,7 @@ namespace mongo {
return minDist;
}
- bool S2SearchUtil::distanceBetween(const S2Point& us, const BSONObj& them,
- const S2IndexingParams &params, double *out) {
+ bool S2SearchUtil::distanceBetween(const S2Point& us, const BSONObj& them, double *out) {
if (GeoParser::isGeometryCollection(them)) {
GeometryCollection c;
if (!GeoParser::parseGeometryCollection(them, &c)) { return false; }
@@ -150,37 +150,37 @@ namespace mongo {
}
}
- *out = params.radius * minDist;
+ *out = kRadiusOfEarthInMeters * minDist;
return true;
} else if (GeoParser::isMultiPoint(them)) {
MultiPointWithCRS multiPoint;
if (!GeoParser::parseMultiPoint(them, &multiPoint)) { return false; }
- *out = dist(us, multiPoint) * params.radius;
+ *out = dist(us, multiPoint) * kRadiusOfEarthInMeters;
return true;
} else if (GeoParser::isMultiLine(them)) {
MultiLineWithCRS multiLine;
if (!GeoParser::parseMultiLine(them, &multiLine)) { return false; }
- *out = dist(us, multiLine) * params.radius;
+ *out = dist(us, multiLine) * kRadiusOfEarthInMeters;
return true;
} else if (GeoParser::isMultiPolygon(them)) {
MultiPolygonWithCRS multiPolygon;
if (!GeoParser::parseMultiPolygon(them, &multiPolygon)) { return false; }
- *out = dist(us, multiPolygon) * params.radius;
+ *out = dist(us, multiPolygon) * kRadiusOfEarthInMeters;
return true;
} else if (GeoParser::isPolygon(them)) {
PolygonWithCRS poly;
if (!GeoParser::parsePolygon(them, &poly)) { return false; }
- *out = dist(us, poly.polygon) * params.radius;
+ *out = dist(us, poly.polygon) * kRadiusOfEarthInMeters;
return true;
} else if (GeoParser::isLine(them)) {
LineWithCRS line;
if (!GeoParser::parseLine(them, &line)) { return false; }
- *out = dist(us, line.line) * params.radius;
+ *out = dist(us, line.line) * kRadiusOfEarthInMeters;
return true;
} else if (GeoParser::isPoint(them)) {
PointWithCRS point;
if (!GeoParser::parsePoint(them, &point)) { return false; }
- *out = dist(us, point.point) * params.radius;
+ *out = dist(us, point.point) * kRadiusOfEarthInMeters;
return true;
} else {
return false;
diff --git a/src/mongo/db/geo/s2common.h b/src/mongo/db/geo/s2common.h
index 8f505cf79ef..15440514978 100644
--- a/src/mongo/db/geo/s2common.h
+++ b/src/mongo/db/geo/s2common.h
@@ -26,7 +26,6 @@
* it in the license file.
*/
-#include "mongo/db/diskloc.h"
#include "mongo/db/geo/geoparser.h"
#include "mongo/db/geo/geoconstants.h"
#include "third_party/s2/s2.h"
@@ -82,8 +81,7 @@ namespace mongo {
static void setCoverLimitsBasedOnArea(double area, S2RegionCoverer *coverer, int coarsestIndexedLevel);
static bool getKeysForObject(const BSONObj& obj, const S2IndexingParams& params,
vector<string>* out);
- static bool distanceBetween(const S2Point& us, const BSONObj& them,
- const S2IndexingParams &params, double *out);
+ static bool distanceBetween(const S2Point& us, const BSONObj& them, double *out);
};
} // namespace mongo
diff --git a/src/mongo/db/index/expression_index.h b/src/mongo/db/index/expression_index.h
index 104c9580a44..d172af5836b 100644
--- a/src/mongo/db/index/expression_index.h
+++ b/src/mongo/db/index/expression_index.h
@@ -30,14 +30,14 @@
#include "mongo/db/jsobj.h"
#include "mongo/db/hasher.h"
+#include "mongo/db/query/index_bounds_builder.h"
namespace mongo {
/**
* Functions that compute expression index mappings.
*
- * TODO: In the general case we would have (key pattern field, query value) -> projected value.
- * Given that our only acceptable key pattern field is "hashed" we do something less general.
+ * TODO: I think we could structure this more generally with respect to planning.
*/
class ExpressionMapping {
public:
@@ -46,6 +46,109 @@ namespace mongo {
bob.append("", BSONElementHasher::hash64(value, BSONElementHasher::DEFAULT_HASH_SEED));
return bob.obj();
}
+
+ static void cover2dsphere(const S2Region& region, OrderedIntervalList* oilOut) {
+ // XXX: should grab coarsest level from the index since the user can possibly change it.
+ int coarsestIndexedLevel =
+ S2::kAvgEdge.GetClosestLevel(100 * 1000.0 / kRadiusOfEarthInMeters);
+
+
+ // The min level of our covering is the level whose cells are the closest match to the
+ // *area* of the region (or the max indexed level, whichever is smaller) The max level
+ // is 4 sizes larger.
+ double edgeLen = sqrt(region.GetRectBound().Area());
+ S2RegionCoverer coverer;
+ coverer.set_min_level(min(coarsestIndexedLevel,
+ 2 + S2::kAvgEdge.GetClosestLevel(edgeLen)));
+ coverer.set_max_level(4 + coverer.min_level());
+
+ vector<S2CellId> cover;
+ coverer.GetCovering(region, &cover);
+
+ // Look at the cells we cover and all cells that are within our covering and finer.
+ // Anything with our cover as a strict prefix is contained within the cover and should
+ // be intersection tested.
+ bool considerCoarser = false;
+ set<string> intervalSet;
+ for (size_t i = 0; i < cover.size(); ++i) {
+ intervalSet.insert(cover[i].toString());
+ // If any of our covers could be covered by something in the index, we have
+ // to look at things coarser.
+ if (cover[i].level() > coarsestIndexedLevel) {
+ considerCoarser = true;
+ }
+ }
+
+ set<string> exactSet;
+ if (considerCoarser) {
+ // Look at the cells that cover us. We want to look at every cell that contains the
+ // covering we would index on if we were to insert the query geometry. We generate
+ // the would-index-with-this-covering and find all the cells strictly containing the
+ // cells in that set, until we hit the coarsest indexed cell. We use equality, not
+ // a prefix match. Why not prefix? Because we've already looked at everything
+ // finer or as fine as our initial covering.
+ //
+ // Say we have a fine point with cell id 212121, we go up one, get 21212, we don't
+ // want to look at cells 21212[not-1] because we know they're not going to intersect
+ // with 212121, but entries inserted with cell value 21212 (no trailing digits) may.
+ // And we've already looked at points with the cell id 211111 from the regex search
+ // created above, so we only want things where the value of the last digit is not
+ // stored (and therefore could be 1).
+ for (size_t i = 0; i < cover.size(); ++i) {
+ for (S2CellId id = cover[i].parent(); id.level() >= coarsestIndexedLevel;
+ id = id.parent()) {
+ exactSet.insert(id.toString());
+ }
+ }
+ }
+
+ // We turned the cell IDs into strings which define point intervals or prefixes of
+ // strings we want to look for.
+ set<string>::iterator exactIt = exactSet.begin();
+ set<string>::iterator intervalIt = intervalSet.begin();
+ while (exactSet.end() != exactIt && intervalSet.end() != intervalIt) {
+ const string& exact = *exactIt;
+ const string& ival = *intervalIt;
+ if (exact < ival) {
+ // add exact
+ oilOut->intervals.push_back(IndexBoundsBuilder::makePointInterval(exact));
+ exactIt++;
+ }
+ else {
+ string end = ival;
+ end[end.size() - 1]++;
+ oilOut->intervals.push_back(
+ IndexBoundsBuilder::makeRangeInterval(ival, end, true, false));
+ intervalIt++;
+ }
+ }
+
+ if (exactSet.end() != exactIt) {
+ verify(intervalSet.end() == intervalIt);
+ do {
+ oilOut->intervals.push_back(IndexBoundsBuilder::makePointInterval(*exactIt));
+ exactIt++;
+ } while (exactSet.end() != exactIt);
+ }
+ else if (intervalSet.end() != intervalIt) {
+ verify(exactSet.end() == exactIt);
+ do {
+ const string& ival = *intervalIt;
+ string end = ival;
+ end[end.size() - 1]++;
+ oilOut->intervals.push_back(
+ IndexBoundsBuilder::makeRangeInterval(ival, end, true, false));
+ intervalIt++;
+ } while (intervalSet.end() != intervalIt);
+ }
+
+ // Make sure that our intervals don't overlap each other and are ordered correctly.
+ // This perhaps should only be done in debug mode.
+ if (!oilOut->isValidFor(1)) {
+ cout << "check your assumptions! OIL = " << oilOut->toString() << endl;
+ verify(0);
+ }
+ }
};
} // namespace mongo
diff --git a/src/mongo/db/index/s2_index_cursor.cpp b/src/mongo/db/index/s2_index_cursor.cpp
index 6b82e331a7b..ca6496ad163 100644
--- a/src/mongo/db/index/s2_index_cursor.cpp
+++ b/src/mongo/db/index/s2_index_cursor.cpp
@@ -1,3 +1,4 @@
+// DEPRECATED
/**
* Copyright (C) 2013 10gen Inc.
*
@@ -65,13 +66,17 @@ namespace mongo {
BSONObj obj = e.Obj();
if (nearQuery.parseFrom(obj)) {
+ if (nearQuery.centroid.crs == FLAT && !nearQuery.isNearSphere) {
+ return Status(ErrorCodes::BadValue, "flat near query on spherical index");
+ }
if (isNearQuery) {
return Status(ErrorCodes::BadValue, "Only one $near clause allowed: " +
position.toString(), 16685);
}
isNearQuery = true;
- if (nearQuery.fromRadians) {
+ // FLAT implies the distances are in radians. Convert to meters.
+ if (FLAT == nearQuery.centroid.crs) {
nearQuery.minDistance *= _params.radius;
nearQuery.maxDistance *= _params.radius;
}
@@ -92,6 +97,10 @@ namespace mongo {
regions.push_back(geoQueryField);
}
+ if (!isNearQuery && regions.size() == 0) {
+ return Status(ErrorCodes::BadValue, "None of index's fields present in seek " + position.toString());
+ }
+
// Remove all the indexed geo regions from the query. The s2*cursor will
// instead create a covering for that key to speed up the search.
//
diff --git a/src/mongo/db/index/s2_index_cursor.h b/src/mongo/db/index/s2_index_cursor.h
index 05af626bb75..2bcccdc3640 100644
--- a/src/mongo/db/index/s2_index_cursor.h
+++ b/src/mongo/db/index/s2_index_cursor.h
@@ -1,3 +1,4 @@
+// DEPRECATED
/**
* Copyright (C) 2013 10gen Inc.
*
diff --git a/src/mongo/db/index/s2_near_cursor.cpp b/src/mongo/db/index/s2_near_cursor.cpp
index 2a1ae8a9eb4..0ee944e86df 100644
--- a/src/mongo/db/index/s2_near_cursor.cpp
+++ b/src/mongo/db/index/s2_near_cursor.cpp
@@ -1,3 +1,4 @@
+// DEPRECATED
/**
* Copyright (C) 2013 10gen Inc.
*
@@ -313,7 +314,7 @@ namespace mongo {
BSONObj obj = oi->Obj();
double dist;
bool ret = S2SearchUtil::distanceBetween(_nearQuery.centroid.point,
- obj, _params, &dist);
+ obj, &dist);
if (!ret) {
warning() << "unknown geometry: " << obj.toString();
dist = numeric_limits<double>::max();
diff --git a/src/mongo/db/index/s2_near_cursor.h b/src/mongo/db/index/s2_near_cursor.h
index 8355557ee3d..e6534237c2f 100644
--- a/src/mongo/db/index/s2_near_cursor.h
+++ b/src/mongo/db/index/s2_near_cursor.h
@@ -1,3 +1,4 @@
+// DEPRECATED
/**
* Copyright (C) 2013 10gen Inc.
*
diff --git a/src/mongo/db/index/s2_simple_cursor.cpp b/src/mongo/db/index/s2_simple_cursor.cpp
index 5931de08105..fff4e216188 100644
--- a/src/mongo/db/index/s2_simple_cursor.cpp
+++ b/src/mongo/db/index/s2_simple_cursor.cpp
@@ -1,3 +1,4 @@
+// DEPRECATED
/**
* Copyright (C) 2013 10gen Inc.
*
diff --git a/src/mongo/db/index/s2_simple_cursor.h b/src/mongo/db/index/s2_simple_cursor.h
index f9c391c7996..cb7d570cd8f 100644
--- a/src/mongo/db/index/s2_simple_cursor.h
+++ b/src/mongo/db/index/s2_simple_cursor.h
@@ -1,3 +1,4 @@
+// DEPRECATED
/**
* Copyright (C) 2013 10gen Inc.
*
diff --git a/src/mongo/db/matcher/expression.h b/src/mongo/db/matcher/expression.h
index de36e5ac45b..67cf6aa1016 100644
--- a/src/mongo/db/matcher/expression.h
+++ b/src/mongo/db/matcher/expression.h
@@ -145,6 +145,9 @@ namespace mongo {
// XXX: document
virtual MatchExpression* shallowClone() const = 0;
+ // XXX document
+ virtual bool equivalent( const MatchExpression* other ) const = 0;
+
//
// Determine if a document satisfies the tree-predicate.
//
@@ -188,7 +191,6 @@ namespace mongo {
virtual string toString() const;
virtual void debugString( StringBuilder& debug, int level = 0 ) const = 0;
- virtual bool equivalent( const MatchExpression* other ) const = 0;
protected:
void _debugAddSpace( StringBuilder& debug, int level ) const;
diff --git a/src/mongo/db/matcher/expression_geo.cpp b/src/mongo/db/matcher/expression_geo.cpp
index 9c5b2b0406f..5c0f5676f41 100644
--- a/src/mongo/db/matcher/expression_geo.cpp
+++ b/src/mongo/db/matcher/expression_geo.cpp
@@ -37,8 +37,10 @@ namespace mongo {
// Geo queries we don't need an index to answer: geoWithin and geoIntersects
//
- Status GeoMatchExpression::init( const StringData& path, const GeoQuery& query ) {
+ Status GeoMatchExpression::init( const StringData& path, const GeoQuery& query,
+ const BSONObj& rawObj ) {
_query = query;
+ _rawObj = rawObj;
return initPath( path );
}
@@ -80,7 +82,10 @@ namespace mongo {
LeafMatchExpression* GeoMatchExpression::shallowClone() const {
GeoMatchExpression* next = new GeoMatchExpression();
- next->init( path(), _query );
+ next->init( path(), _query, _rawObj);
+ if (getTag()) {
+ next->setTag(getTag()->clone());
+ }
return next;
}
@@ -88,8 +93,10 @@ namespace mongo {
// Parse-only geo expressions: geoNear (formerly known as near).
//
- Status GeoNearMatchExpression::init( const StringData& path, const NearQuery& query ) {
+ Status GeoNearMatchExpression::init( const StringData& path, const NearQuery& query,
+ const BSONObj& rawObj ) {
_query = query;
+ _rawObj = rawObj;
return initPath( path );
}
@@ -101,7 +108,7 @@ namespace mongo {
void GeoNearMatchExpression::debugString( StringBuilder& debug, int level ) const {
_debugAddSpace( debug, level );
- debug << "GEONEAR";
+ debug << "GEONEAR raw = " << _rawObj.toString();
MatchExpression::TagData* td = getTag();
if (NULL != td) {
debug << " ";
@@ -126,7 +133,10 @@ namespace mongo {
LeafMatchExpression* GeoNearMatchExpression::shallowClone() const {
GeoNearMatchExpression* next = new GeoNearMatchExpression();
- next->init( path(), _query );
+ next->init( path(), _query, _rawObj );
+ if (getTag()) {
+ next->setTag(getTag()->clone());
+ }
return next;
}
diff --git a/src/mongo/db/matcher/expression_geo.h b/src/mongo/db/matcher/expression_geo.h
index 36568ba8927..c6044cf6f08 100644
--- a/src/mongo/db/matcher/expression_geo.h
+++ b/src/mongo/db/matcher/expression_geo.h
@@ -43,7 +43,7 @@ namespace mongo {
GeoMatchExpression() : LeafMatchExpression( GEO ){}
virtual ~GeoMatchExpression(){}
- Status init( const StringData& path, const GeoQuery& query );
+ Status init( const StringData& path, const GeoQuery& query, const BSONObj& rawObj );
virtual bool matchesSingleElement( const BSONElement& e ) const;
@@ -53,7 +53,11 @@ namespace mongo {
virtual LeafMatchExpression* shallowClone() const;
+ const GeoQuery& getGeoQuery() const { return _query; }
+ const BSONObj getRawObj() const { return _rawObj; }
+
private:
+ BSONObj _rawObj;
GeoQuery _query;
};
@@ -62,7 +66,7 @@ namespace mongo {
GeoNearMatchExpression() : LeafMatchExpression( GEO_NEAR ){}
virtual ~GeoNearMatchExpression(){}
- Status init( const StringData& path, const NearQuery& query );
+ Status init( const StringData& path, const NearQuery& query, const BSONObj& rawObj );
// This shouldn't be called and as such will crash. GeoNear always requires an index.
virtual bool matchesSingleElement( const BSONElement& e ) const;
@@ -74,8 +78,10 @@ namespace mongo {
virtual LeafMatchExpression* shallowClone() const;
const NearQuery& getData() const { return _query; }
+ const BSONObj getRawObj() const { return _rawObj; }
private:
NearQuery _query;
+ BSONObj _rawObj;
};
} // namespace mongo
diff --git a/src/mongo/db/matcher/expression_geo_test.cpp b/src/mongo/db/matcher/expression_geo_test.cpp
index 14cdcd95c8b..86cb7be06e1 100644
--- a/src/mongo/db/matcher/expression_geo_test.cpp
+++ b/src/mongo/db/matcher/expression_geo_test.cpp
@@ -67,7 +67,7 @@ namespace mongo {
ASSERT(gne.init("a", nq).isOK());
// We can't match the data but we can make sure it was parsed OK.
- ASSERT_EQUALS(gne.getData().fromRadians, false);
+ ASSERT_EQUALS(gne.getData().centroid.crs, FLAT);
ASSERT_EQUALS(gne.getData().minDistance, 0);
ASSERT_EQUALS(gne.getData().maxDistance, 100);
}
diff --git a/src/mongo/db/matcher/expression_parser.cpp b/src/mongo/db/matcher/expression_parser.cpp
index 7bafdcc4f98..97b8094c180 100644
--- a/src/mongo/db/matcher/expression_parser.cpp
+++ b/src/mongo/db/matcher/expression_parser.cpp
@@ -217,9 +217,7 @@ namespace mongo {
case BSONObj::opWITHIN:
case BSONObj::opGEO_INTERSECTS:
- case BSONObj::opNEAR:
return expressionParserGeoCallback( name, x, context );
-
}
return StatusWithMatchExpression( ErrorCodes::BadValue,
@@ -334,6 +332,36 @@ namespace mongo {
Status MatchExpressionParser::_parseSub( const char* name,
const BSONObj& sub,
AndMatchExpression* root ) {
+ // The one exception to {field : {fully contained argument} } is, of course, geo. Example:
+ // sub == { field : {$near[Sphere]: [0,0], $maxDistance: 1000, $minDistance: 10 } }
+ // We peek inside of 'sub' to see if it's possibly a $near. If so, we can't iterate over
+ // its subfields and parse them one at a time (there is no $maxDistance without $near), so
+ // we hand the entire object over to the geo parsing routines.
+
+ BSONObjIterator geoIt(sub);
+ if (geoIt.more()) {
+ BSONElement firstElt = geoIt.next();
+ if (firstElt.isABSONObj()) {
+ const char* fieldName = firstElt.fieldName();
+ // TODO: Having these $fields here isn't ideal but we don't want to pull in anything
+ // from db/geo at this point, since it may not actually be linked in...
+ if (mongoutils::str::equals(fieldName, "$near")
+ || mongoutils::str::equals(fieldName, "$nearSphere")
+ || mongoutils::str::equals(fieldName, "$geoNear")
+ || mongoutils::str::equals(fieldName, "$maxDistance")
+ || mongoutils::str::equals(fieldName, "$minDistance")) {
+
+ StatusWithMatchExpression s = expressionParserGeoCallback(name,
+ firstElt.getGtLtOp(),
+ sub);
+ if (s.isOK()) {
+ root->add(s.getValue());
+ return Status::OK();
+ }
+ }
+ }
+ }
+
BSONObjIterator j( sub );
while ( j.more() ) {
BSONElement deep = j.next();
diff --git a/src/mongo/db/matcher/expression_parser_geo.cpp b/src/mongo/db/matcher/expression_parser_geo.cpp
index c81c014f394..c0051e869a3 100644
--- a/src/mongo/db/matcher/expression_parser_geo.cpp
+++ b/src/mongo/db/matcher/expression_parser_geo.cpp
@@ -42,23 +42,34 @@ namespace mongo {
int type,
const BSONObj& section ) {
if (BSONObj::opWITHIN == type || BSONObj::opGEO_INTERSECTS == type) {
- GeoQuery gq;
+ GeoQuery gq(name);
if ( !gq.parseFrom( section ) )
return StatusWithMatchExpression( ErrorCodes::BadValue, "bad geo query" );
auto_ptr<GeoMatchExpression> e( new GeoMatchExpression() );
- Status s = e->init( name, gq );
+
+ // Until the index layer accepts non-BSON predicates, or special indices are moved into
+ // stages, we have to clean up the raw object so it can be passed down to the index
+ // layer.
+ BSONObjBuilder bob;
+ bob.append(name, section);
+ Status s = e->init( name, gq, bob.obj() );
if ( !s.isOK() )
return StatusWithMatchExpression( s );
return StatusWithMatchExpression( e.release() );
}
else {
- NearQuery nq;
+ verify(BSONObj::opNEAR == type);
+ NearQuery nq(name);
if ( !nq.parseFrom( section ) )
return StatusWithMatchExpression( ErrorCodes::BadValue, "bad geo near query" );
-
auto_ptr<GeoNearMatchExpression> e( new GeoNearMatchExpression() );
- Status s = e->init( name, nq );
+ // Until the index layer accepts non-BSON predicates, or special indices are moved into
+ // stages, we have to clean up the raw object so it can be passed down to the index
+ // layer.
+ BSONObjBuilder bob;
+ bob.append(name, section);
+ Status s = e->init( name, nq, bob.obj() );
if ( !s.isOK() )
return StatusWithMatchExpression( s );
return StatusWithMatchExpression( e.release() );
diff --git a/src/mongo/db/query/canonical_query.cpp b/src/mongo/db/query/canonical_query.cpp
index caff54713b6..623e37de4b6 100644
--- a/src/mongo/db/query/canonical_query.cpp
+++ b/src/mongo/db/query/canonical_query.cpp
@@ -123,10 +123,7 @@ namespace mongo {
}
// TODO: Should this really live in the parsing? Or elsewhere?
- Status isValid(MatchExpression* root) {
- // XXX: There can only be one NEAR. If there is a NEAR, it must be either the root or the
- // root must be an AND and its child must be a NEAR.
-
+ Status argsValid(MatchExpression* root) {
MatchExpression::MatchType type = root->matchType();
if (MatchExpression::GT == type || MatchExpression::GTE == type
@@ -135,12 +132,13 @@ namespace mongo {
ComparisonMatchExpression* cme = static_cast<ComparisonMatchExpression*>(root);
BSONElement data = cme->getData();
if (RegEx == data.type()) {
- return Status(ErrorCodes::BadValue, "Can't have RegEx as arg to pred " + cme->toString());
+ return Status(ErrorCodes::BadValue,
+ "Can't have RegEx as arg to pred " + cme->toString());
}
}
for (size_t i = 0; i < root->numChildren(); ++i) {
- Status s = isValid(root->getChild(i));
+ Status s = argsValid(root->getChild(i));
if (!s.isOK()) {
return s;
}
@@ -149,6 +147,53 @@ namespace mongo {
return Status::OK();
}
+ size_t countNodes(MatchExpression* root, MatchExpression::MatchType type) {
+ size_t sum = 0;
+ if (type == root->matchType()) {
+ sum = 1;
+ }
+ for (size_t i = 0; i < root->numChildren(); ++i) {
+ sum += countNodes(root->getChild(i), type);
+ }
+ return sum;
+ }
+
+ // TODO: Move this to query_validator.cpp
+ Status isValid(MatchExpression* root) {
+ // TODO: This should really be done as part of type checking in the parser.
+ Status argStatus = argsValid(root);
+ if (!argStatus.isOK()) {
+ return argStatus;
+ }
+
+ // Analysis below should be done after squashing the tree to make it clearer.
+
+ // There can only be one NEAR. If there is a NEAR, it must be either the root or the root
+ // must be an AND and its child must be a NEAR.
+ size_t numGeoNear = countNodes(root, MatchExpression::GEO_NEAR);
+
+ if (0 == numGeoNear) {
+ return Status::OK();
+ }
+
+ if (numGeoNear > 1) {
+ return Status(ErrorCodes::BadValue, "Too many geoNear expressions");
+ }
+
+ if (MatchExpression::GEO_NEAR == root->matchType()) {
+ return Status::OK();
+ }
+ else if (MatchExpression::AND == root->matchType()) {
+ for (size_t i = 0; i < root->numChildren(); ++i) {
+ if (MatchExpression::GEO_NEAR == root->getChild(i)->matchType()) {
+ return Status::OK();
+ }
+ }
+ }
+
+ return Status(ErrorCodes::BadValue, "geoNear must be top-level expr");
+ }
+
Status CanonicalQuery::init(LiteParsedQuery* lpq) {
_pq.reset(lpq);
diff --git a/src/mongo/db/query/explain_plan.cpp b/src/mongo/db/query/explain_plan.cpp
index 4626801879e..42cc3dcecd6 100644
--- a/src/mongo/db/query/explain_plan.cpp
+++ b/src/mongo/db/query/explain_plan.cpp
@@ -81,7 +81,9 @@ namespace mongo {
// number of keys that cursor retrieved, and into the stage's stats 'advanced' for
// nscannedObjects', which would be the number of keys that survived the IXSCAN
// filter. Those keys would have been FETCH-ed, if a fetch is present.
- if (leaf->stageType == STAGE_COLLSCAN) {
+ //
+ // TODO: This is kind of a lie for STAGE_GEO_NEAR_2DSPHERE.
+ if (leaf->stageType == STAGE_COLLSCAN || leaf->stageType == STAGE_GEO_NEAR_2DSPHERE) {
res->setCursor("BasicCursor");
res->setNScanned(leaf->common.advanced);
res->setNScannedObjects(leaf->common.advanced);
diff --git a/src/mongo/db/query/index_bounds.cpp b/src/mongo/db/query/index_bounds.cpp
index 3e90098a2fd..6177e7d5b61 100644
--- a/src/mongo/db/query/index_bounds.cpp
+++ b/src/mongo/db/query/index_bounds.cpp
@@ -63,33 +63,41 @@ namespace mongo {
}
}
- string IndexBounds::toString() const {
+ string OrderedIntervalList::toString() const {
stringstream ss;
- for (size_t i = 0; i < fields.size(); ++i) {
- if (i > 0) {
+ ss << "['" << name << "']: ";
+ for (size_t j = 0; j < intervals.size(); ++j) {
+ ss << intervals[j].toString();
+ if (j < intervals.size() - 1) {
ss << ", ";
}
- const OrderedIntervalList& oil = fields[i];
- ss << "field #" << i << "['" << oil.name << "']: ";
- for (size_t j = 0; j < oil.intervals.size(); ++j) {
- const Interval& iv = oil.intervals[j];
- if (iv.startInclusive) {
- ss << "[";
- }
- else {
- ss << "(";
- }
- // false means omit the field name
- ss << iv.start.toString(false);
- ss << ", ";
- ss << iv.end.toString(false);
- if (iv.endInclusive) {
+ }
+ return ss.str();
+ }
+
+ string IndexBounds::toString() const {
+ stringstream ss;
+ if (isSimpleRange) {
+ ss << "[" << startKey.toString() << ", ";
+ if (endKey.isEmpty()) {
+ ss << "]";
+ }
+ else {
+ ss << endKey.toString();
+ if (endKeyInclusive) {
ss << "]";
}
else {
ss << ")";
}
}
+ return ss.str();
+ }
+ for (size_t i = 0; i < fields.size(); ++i) {
+ if (i > 0) {
+ ss << ", ";
+ }
+ ss << "field #" << i << fields[i].toString();
}
return ss.str();
@@ -122,6 +130,37 @@ namespace mongo {
// Validity checking for bounds
//
+ bool OrderedIntervalList::isValidFor(int expectedOrientation) const {
+ // Make sure each interval's start is oriented correctly with respect to its end.
+ for (size_t j = 0; j < intervals.size(); ++j) {
+ // false means don't consider field name.
+ int cmp = sgn(intervals[j].end.woCompare(intervals[j].start, false));
+
+ if (cmp == 0 && intervals[j].startInclusive
+ && intervals[j].endInclusive) { continue; }
+
+ if (cmp != expectedOrientation) {
+ log() << "interval " << intervals[j].toString() << " internally inconsistent";
+ return false;
+ }
+ }
+
+ // Make sure each interval is oriented correctly with respect to its neighbors.
+ for (size_t j = 1; j < intervals.size(); ++j) {
+ int cmp = sgn(intervals[j].start.woCompare(intervals[j - 1].end, false));
+
+ // TODO: We could care if the end of one interval is the start of another. The bounds
+ // are still valid but they're a bit sloppy; they could have been combined to form one
+ // interval if either of them is inclusive.
+ if (0 == cmp) { continue; }
+
+ if (cmp != expectedOrientation) {
+ return false;
+ }
+ }
+ return true;
+ }
+
bool IndexBounds::isValidFor(const BSONObj& keyPattern, int direction) {
BSONObjIterator it(keyPattern);
@@ -139,33 +178,8 @@ namespace mongo {
// not a number. Special indices are strings, not numbers.
int expectedOrientation = direction * ((elt.number() >= 0) ? 1 : -1);
- // Make sure each interval's start is oriented correctly with respect to its end.
- for (size_t j = 0; j < field.intervals.size(); ++j) {
- // false means don't consider field name.
- int cmp = sgn(field.intervals[j].end.woCompare(field.intervals[j].start, false));
-
- if (cmp == 0 && field.intervals[j].startInclusive
- && field.intervals[j].endInclusive) { continue; }
-
- if (cmp != expectedOrientation) { return false; }
- }
-
- // Make sure each interval is oriented correctly with respect to its neighbors.
- for (size_t j = 1; j < field.intervals.size(); ++j) {
- int cmp = sgn(field.intervals[j].start.woCompare(field.intervals[j - 1].end,
- false));
-
- if (cmp == 0) {
- // The end of one interval is the start of another. This is only valid if
- // they're both open intervals. Otherwise, it should have been combined to form
- // one interval.
- if (field.intervals[j].startInclusive || field.intervals[j - 1].endInclusive) {
- return false;
- }
- }
- else if (cmp != expectedOrientation) {
- return false;
- }
+ if (!field.isValidFor(expectedOrientation)) {
+ return false;
}
}
diff --git a/src/mongo/db/query/index_bounds.h b/src/mongo/db/query/index_bounds.h
index 58ca8f0570c..81aa7b008d2 100644
--- a/src/mongo/db/query/index_bounds.h
+++ b/src/mongo/db/query/index_bounds.h
@@ -48,6 +48,9 @@ namespace mongo {
// TODO: We could drop this. Only used in IndexBounds::isValidFor.
string name;
+
+ bool isValidFor(int expectedOrientation) const;
+ std::string toString() const;
};
/**
diff --git a/src/mongo/db/query/index_bounds_builder.cpp b/src/mongo/db/query/index_bounds_builder.cpp
index 0cb6e6ef382..2e0ac5cd6eb 100644
--- a/src/mongo/db/query/index_bounds_builder.cpp
+++ b/src/mongo/db/query/index_bounds_builder.cpp
@@ -27,9 +27,16 @@
*/
#include "mongo/db/query/index_bounds_builder.h"
-#include "mongo/db/query/indexability.h"
+
+#include "mongo/db/geo/geoconstants.h"
+#include "mongo/db/geo/s2common.h"
#include "mongo/db/index/expression_index.h"
+#include "mongo/db/matcher/expression_geo.h"
+#include "mongo/db/query/indexability.h"
#include "mongo/util/mongoutils/str.h"
+#include "third_party/s2/s2.h"
+#include "third_party/s2/s2cell.h"
+#include "third_party/s2/s2regioncoverer.h"
namespace mongo {
@@ -198,8 +205,24 @@ namespace mongo {
interval = allValues();
exact = false;
}
+ else if (MatchExpression::GEO == expr->matchType()) {
+ const GeoMatchExpression* gme = static_cast<const GeoMatchExpression*>(expr);
+ // Can only do this for 2dsphere.
+ if (!mongoutils::str::equals("2dsphere", elt.valuestrsafe())) {
+ warning() << "Planner error trying to build geo bounds for " << elt.toString()
+ << " index element.";
+ verify(0);
+ }
+
+ const S2Region& region = gme->getGeoQuery().getRegion();
+ ExpressionMapping::cover2dsphere(region, oilOut);
+ *exactOut = false;
+ // XXX: restructure this method
+ return;
+ }
else {
- warning() << "Planner error, trying to build bounds for expr " << expr->toString() << endl;
+ warning() << "Planner error, trying to build bounds for expr "
+ << expr->toString() << endl;
verify(0);
}
@@ -227,6 +250,15 @@ namespace mongo {
}
// static
+ Interval IndexBoundsBuilder::makeRangeInterval(const string& start, const string& end,
+ bool startInclusive, bool endInclusive) {
+ BSONObjBuilder bob;
+ bob.append("", start);
+ bob.append("", end);
+ return makeRangeInterval(bob.obj(), startInclusive, endInclusive);
+ }
+
+ // static
Interval IndexBoundsBuilder::makePointInterval(const BSONObj& obj) {
Interval ret;
ret._intervalData = obj;
@@ -236,6 +268,13 @@ namespace mongo {
}
// static
+ Interval IndexBoundsBuilder::makePointInterval(const string& str) {
+ BSONObjBuilder bob;
+ bob.append("", str);
+ return makePointInterval(bob.obj());
+ }
+
+ // static
BSONObj IndexBoundsBuilder::objFromElement(const BSONElement& elt) {
BSONObjBuilder bob;
bob.append(elt);
diff --git a/src/mongo/db/query/index_bounds_builder.h b/src/mongo/db/query/index_bounds_builder.h
index 656559515d6..7c96cab676a 100644
--- a/src/mongo/db/query/index_bounds_builder.h
+++ b/src/mongo/db/query/index_bounds_builder.h
@@ -59,6 +59,8 @@ namespace mongo {
OrderedIntervalList* oilOut, bool* exactOut);
private:
+ friend class ExpressionMapping;
+
/**
* Make a range interval from the provided object.
* The object must have exactly two fields. The first field is the start, the second the
@@ -68,12 +70,15 @@ namespace mongo {
*/
static Interval makeRangeInterval(const BSONObj& obj, bool startInclusive,
bool endInclusive);
+ static Interval makeRangeInterval(const string& start, const string& end,
+ bool startInclusive, bool endInclusive);
/**
* Make a point interval from the provided object.
* The object must have exactly one field which is the value of the point interval.
*/
static Interval makePointInterval(const BSONObj& obj);
+ static Interval makePointInterval(const string& str);
/**
* Since we have no BSONValue we must make an object that's a copy of a piece of another
diff --git a/src/mongo/db/query/index_tag.cpp b/src/mongo/db/query/index_tag.cpp
index f51bf8d9015..5b7d89ebdfb 100644
--- a/src/mongo/db/query/index_tag.cpp
+++ b/src/mongo/db/query/index_tag.cpp
@@ -23,6 +23,8 @@
namespace mongo {
+ // TODO: Move out of the enumerator and into the planner.
+
const size_t IndexTag::kNoIndex = std::numeric_limits<size_t>::max();
void tagForSort(MatchExpression* tree) {
@@ -57,6 +59,14 @@ namespace mongo {
return lhsValue < rhsValue;
}
+ // Next, order so that if there's a GEO_NEAR it's first.
+ if (MatchExpression::GEO_NEAR == lhs->matchType()) {
+ return true;
+ }
+ else if (MatchExpression::GEO_NEAR == rhs->matchType()) {
+ return false;
+ }
+
// Next, order so that the first field of a compound index appears first.
if (lhsPos != rhsPos) {
return lhsPos < rhsPos;
diff --git a/src/mongo/db/query/interval.h b/src/mongo/db/query/interval.h
index f726748773b..26dff5b2bcd 100644
--- a/src/mongo/db/query/interval.h
+++ b/src/mongo/db/query/interval.h
@@ -39,6 +39,27 @@ namespace mongo {
/** Creates an empty interval */
Interval();
+ string toString() const {
+ stringstream ss;
+ if (startInclusive) {
+ ss << "[";
+ }
+ else {
+ ss << "(";
+ }
+ // false means omit the field name
+ ss << start.toString(false);
+ ss << ", ";
+ ss << end.toString(false);
+ if (endInclusive) {
+ ss << "]";
+ }
+ else {
+ ss << ")";
+ }
+ return ss.str();
+ }
+
/**
* Creates an interval that starts at the first field of 'base' and ends at the second
* field of 'base'. (In other words, 'base' is a bsonobj with at least two elements, of
diff --git a/src/mongo/db/query/new_find.cpp b/src/mongo/db/query/new_find.cpp
index a3d6fce6afe..72a49983997 100644
--- a/src/mongo/db/query/new_find.cpp
+++ b/src/mongo/db/query/new_find.cpp
@@ -552,9 +552,17 @@ namespace mongo {
if (isExplain) {
TypeExplain* bareExplain;
Status res = runner->getExplainPlan(&bareExplain);
+
if (!res.isOK()) {
error() << "could not produce explain of query '" << pq.getFilter()
<< "', error: " << res.reason();
+ // If numResults and the data in bb don't correspond, we'll crash later when rooting
+ // through the reply msg.
+ BSONObj emptyObj;
+ bb.appendBuf((void*)emptyObj.objdata(), emptyObj.objsize());
+ // The explain output is actually a result.
+ numResults = 1;
+ // TODO: we can fill out millis etc. here just fine even if the plan screwed up.
}
else {
boost::scoped_ptr<TypeExplain> explain(bareExplain);
diff --git a/src/mongo/db/query/parsed_projection.h b/src/mongo/db/query/parsed_projection.h
index 6f16cfe3697..883320de631 100644
--- a/src/mongo/db/query/parsed_projection.h
+++ b/src/mongo/db/query/parsed_projection.h
@@ -83,17 +83,10 @@ namespace mongo {
bool requiresDocument() const {
// If you're excluding fields, you must have something to exclude them from.
- if (_excludedFields.size() > 0) {
- verify(0 == _includedFields.size());
- return true;
- }
-
- // If you're including fields, they could come from an index.
- return false;
+ return _excludedFields.size() > 0;
}
virtual const vector<string>& requiredFields() const {
- verify(0 == _excludedFields.size());
return _includedFields;
}
diff --git a/src/mongo/db/query/plan_enumerator.cpp b/src/mongo/db/query/plan_enumerator.cpp
index 4a7d058b979..62626eec11b 100644
--- a/src/mongo/db/query/plan_enumerator.cpp
+++ b/src/mongo/db/query/plan_enumerator.cpp
@@ -310,6 +310,14 @@ namespace mongo {
// To be exhaustive, we would compute all solutions of size 1, 2, ...,
// node->numChildren(). Each of these solutions would get a place in the
// memo structure.
+
+ // If there is a geoNear, we put it at the start of our options to ensure that, even
+ // if we enumerate one plan, we will index it.
+ //
+ // XXX: This is a crappy substitute for something smarter, namely always including
+ // geoNear in every possible selection inside of an AND. Revisit when we enum
+ // more than one plan.
+ size_t geoNearChild = IndexTag::kNoIndex;
// For efficiency concerns, we don't explore any more than the size-1 members
// of the power set. That is, we will only use one index at a time.
@@ -319,11 +327,28 @@ namespace mongo {
// indices.
if (prepMemo(node->getChild(i))) {
vector<size_t> option;
- option.push_back(_nodeToId[node->getChild(i)]);
+ size_t childID = _nodeToId[node->getChild(i)];
+ option.push_back(childID);
andSolution->subnodes.push_back(option);
+
+ // Fill out geoNearChild, possibly.
+ // TODO: Actually rank enumeration aside from "always pick GEO_NEAR".
+ if (NULL != _memo[childID]) {
+ NodeSolution* ns = _memo[childID];
+ if (NULL != ns->pred) {
+ verify(NULL != ns->pred->expr);
+ if (MatchExpression::GEO_NEAR == ns->pred->expr->matchType()) {
+ geoNearChild = andSolution->subnodes.size() - 1;
+ }
+ }
+ }
}
}
+ if (IndexTag::kNoIndex != geoNearChild && (0 != geoNearChild)) {
+ andSolution->subnodes[0].swap(andSolution->subnodes[geoNearChild]);
+ }
+
size_t myID = _inOrderCount++;
_nodeToId[node] = myID;
NodeSolution* soln = new NodeSolution();
diff --git a/src/mongo/db/query/projection_parser.cpp b/src/mongo/db/query/projection_parser.cpp
index 0388928cb12..4af01732e37 100644
--- a/src/mongo/db/query/projection_parser.cpp
+++ b/src/mongo/db/query/projection_parser.cpp
@@ -69,7 +69,7 @@ namespace mongo {
}
}
- if (qp->_includedFields.size() > 0 && qp->_includeID) {
+ if (qp->_includeID) {
qp->_includedFields.push_back(string("_id"));
}
diff --git a/src/mongo/db/query/query_planner.cpp b/src/mongo/db/query/query_planner.cpp
index e78477e4f94..7a4b5b50845 100644
--- a/src/mongo/db/query/query_planner.cpp
+++ b/src/mongo/db/query/query_planner.cpp
@@ -36,6 +36,7 @@
// For QueryOption_foobar
#include "mongo/client/dbclientinterface.h"
#include "mongo/db/matcher/expression_array.h"
+#include "mongo/db/matcher/expression_geo.h"
#include "mongo/db/matcher/expression_parser.h"
#include "mongo/db/query/canonical_query.h"
#include "mongo/db/query/index_bounds_builder.h"
@@ -89,7 +90,8 @@ namespace mongo {
}
// static
- bool QueryPlanner::compatible(const BSONElement& elt, const IndexEntry& index, MatchExpression* node) {
+ bool QueryPlanner::compatible(const BSONElement& elt, const IndexEntry& index,
+ MatchExpression* node) {
// XXX: CatalogHack::getAccessMethodName: do we have to worry about this? when?
string ixtype;
if (String != elt.type()) {
@@ -101,26 +103,52 @@ namespace mongo {
// we know elt.fieldname() == node->path()
+ // XXX: Our predicate may be normal and it may be over a geo index (compounding). Detect
+ // this at some point.
+
MatchExpression::MatchType exprtype = node->matchType();
// TODO: use indexnames
if ("" == ixtype) {
- // TODO: MatchExpression::TEXT, when it exists.
return exprtype != MatchExpression::GEO && exprtype != MatchExpression::GEO_NEAR;
}
else if ("hashed" == ixtype) {
- // TODO: Any others?
return exprtype == MatchExpression::MATCH_IN || exprtype == MatchExpression::EQ;
}
else if ("2dsphere" == ixtype) {
- // TODO Geo issues: Parsing is very well encapsulated in GeoQuery for 2dsphere right now
- // but for 2d it's not really parsed in the tree. Needs auditing.
+ if (exprtype == MatchExpression::GEO) {
+ // within or intersect.
+ GeoMatchExpression* gme = static_cast<GeoMatchExpression*>(node);
+ const GeoQuery& gq = gme->getGeoQuery();
+ const GeometryContainer& gc = gq.getGeometry();
+ return gc.hasS2Region();
+ }
+ else if (exprtype == MatchExpression::GEO_NEAR) {
+ GeoNearMatchExpression* gnme = static_cast<GeoNearMatchExpression*>(node);
+ // Make sure the near query is compatible with 2dsphere.
+ if (gnme->getData().centroid.crs == SPHERE || gnme->getData().isNearSphere) {
+ return true;
+ }
+ }
return false;
}
else if ("2d" == ixtype) {
- // TODO : Geo issues: see db/index_selection.cpp. I don't think 2d is parsed. Perhaps
- // we can parse it as a blob with the verification done ala index_selection.cpp?
- // Really, we should fully validate the syntax.
+ if (exprtype == MatchExpression::GEO) {
+ // 2d only supports within.
+ GeoMatchExpression* gme = static_cast<GeoMatchExpression*>(node);
+ const GeoQuery& gq = gme->getGeoQuery();
+ if (GeoQuery::WITHIN != gq.getPred()) {
+ return false;
+ }
+ const GeometryContainer& gc = gq.getGeometry();
+ return gc.hasFlatRegion();
+ }
+ else if (exprtype == MatchExpression::GEO_NEAR) {
+ GeoNearMatchExpression* gnme = static_cast<GeoNearMatchExpression*>(node);
+ if (gnme->getData().centroid.crs == FLAT) {
+ return true;
+ }
+ }
return false;
}
else if ("text" == ixtype || "_fts" == ixtype || "geoHaystack" == ixtype) {
@@ -203,70 +231,197 @@ namespace mongo {
}
// static
- IndexScanNode* QueryPlanner::makeIndexScan(const BSONObj& indexKeyPattern,
- MatchExpression* expr,
- bool* exact) {
- cout << "making ixscan for " << expr->toString() << endl;
-
- // Note that indexKeyPattern.firstElement().fieldName() may not equal expr->path() because
- // expr might be inside an array operator that provides a path prefix.
- IndexScanNode* isn = new IndexScanNode();
- isn->indexKeyPattern = indexKeyPattern;
- isn->bounds.fields.resize(indexKeyPattern.nFields());
- if (MatchExpression::ELEM_MATCH_VALUE == expr->matchType()) {
- // Root is tagged with an index. We have predicates over root's path. Pick one
- // to define the bounds.
-
- // TODO: We could/should merge the bounds (possibly subject to various multikey
- // etc. restrictions). For now we don't bother.
- IndexBoundsBuilder::translate(expr->getChild(0), indexKeyPattern.firstElement(),
- &isn->bounds.fields[0], exact);
- // TODO: I think this is valid but double check.
- *exact = false;
+ QuerySolutionNode* QueryPlanner::makeLeafNode(const BSONObj& indexKeyPattern,
+ MatchExpression* expr,
+ bool* exact) {
+ cout << "making leaf node for " << expr->toString() << endl;
+ // We're guaranteed that all GEO_NEARs are first. This slightly violates the "sort index
+ // predicates by their position in the compound index" rule but GEO_NEAR isn't an ixscan.
+ // This saves our bacon when we have {foo: 1, bar: "2dsphere"} and the predicate on bar is a
+ // $near. If we didn't get the GEO_NEAR first we'd create an IndexScanNode and later cast
+ // it to a GeoNear2DSphereNode
+ //
+ // This should gracefully deal with the case where we have a pred over foo but no geo clause
+ // over bar. In that case there is no GEO_NEAR to appear first and it's treated like a
+ // straight ixscan.
+ BSONElement elt = indexKeyPattern.firstElement();
+ bool indexIs2D = (String == elt.type() && "2d" == elt.String());
+
+ if (MatchExpression::GEO_NEAR == expr->matchType()) {
+ // We must not keep the expression node around.
+ *exact = true;
+ GeoNearMatchExpression* near = static_cast<GeoNearMatchExpression*>(expr);
+ if (indexIs2D) {
+ GeoNear2DNode* ret = new GeoNear2DNode();
+ ret->indexKeyPattern = indexKeyPattern;
+ ret->seek = near->getRawObj();
+ return ret;
+ }
+ else {
+ // Note that even if we're starting a GeoNear node, we may never get a
+ // predicate for $near. So, in that case, the builder "collapses" it into
+ // an ixscan.
+ cout << "Making geonear 2dblahblah kp " << indexKeyPattern.toString() << endl;
+ GeoNear2DSphereNode* ret = new GeoNear2DSphereNode();
+ ret->indexKeyPattern = indexKeyPattern;
+ ret->nq = near->getData();
+ ret->baseBounds.fields.resize(indexKeyPattern.nFields());
+ stringstream ss;
+ ret->appendToString(&ss, 0);
+ cout << "geonear 2dsphere out " << ss.str() << endl;
+ return ret;
+ }
}
- else {
- IndexBoundsBuilder::translate(expr, indexKeyPattern.firstElement(),
- &isn->bounds.fields[0], exact);
+ else if (indexIs2D) {
+ // We must not keep the expression node around.
+ *exact = true;
+ verify(MatchExpression::GEO == expr->matchType());
+ GeoMatchExpression* near = static_cast<GeoMatchExpression*>(expr);
+ verify(indexIs2D);
+ Geo2DNode* ret = new Geo2DNode();
+ ret->indexKeyPattern = indexKeyPattern;
+ ret->seek = near->getRawObj();
+ return ret;
}
+ else {
+ cout << "making ixscan for " << expr->toString() << endl;
- cout << "bounds are " << isn->bounds.toString() << " exact " << *exact << endl;
+ // Note that indexKeyPattern.firstElement().fieldName() may not equal expr->path()
+ // because expr might be inside an array operator that provides a path prefix.
+ IndexScanNode* isn = new IndexScanNode();
+ isn->indexKeyPattern = indexKeyPattern;
+ isn->bounds.fields.resize(indexKeyPattern.nFields());
+
+ // XXX XXX: move this if inside of the bounds builder.
+ if (MatchExpression::ELEM_MATCH_VALUE == expr->matchType()) {
+ // Root is tagged with an index. We have predicates over root's path. Pick one
+ // to define the bounds.
+
+ // TODO: We could/should merge the bounds (possibly subject to various multikey
+ // etc. restrictions). For now we don't bother.
+ IndexBoundsBuilder::translate(expr->getChild(0), indexKeyPattern.firstElement(),
+ &isn->bounds.fields[0], exact);
+ // TODO: I think this is valid but double check.
+ *exact = false;
+ }
+ else {
+ IndexBoundsBuilder::translate(expr, indexKeyPattern.firstElement(),
+ &isn->bounds.fields[0], exact);
+ }
- return isn;
+ cout << "bounds are " << isn->bounds.toString() << " exact " << *exact << endl;
+ return isn;
+ }
}
- // static
- void QueryPlanner::finishIndexScan(IndexScanNode* scan, const BSONObj& indexKeyPattern) {
- // Find the first field in the scan's bounds that was not filled out.
- size_t firstEmptyField = 0;
- for (firstEmptyField = 0; firstEmptyField < scan->bounds.fields.size(); ++firstEmptyField) {
- if ("" == scan->bounds.fields[firstEmptyField].name) {
- verify(scan->bounds.fields[firstEmptyField].intervals.empty());
- break;
- }
+ void QueryPlanner::mergeWithLeafNode(MatchExpression* expr, const BSONObj& keyPattern,
+ size_t pos, bool* exactOut, QuerySolutionNode* node) {
+ const StageType type = node->getType();
+
+ if (STAGE_GEO_2D == type || STAGE_GEO_NEAR_2D == type) {
+ warning() << "About to screw up 2d geo compound";
+ // This keeps the pred as a matcher but the limit argument to 2d indices applies
+ // post-match so this is wrong.
+ *exactOut = false;
}
+ else {
+ IndexBounds* boundsToFillOut = NULL;
- // All fields are filled out with bounds, nothing to do.
- if (firstEmptyField == scan->bounds.fields.size()) { return; }
+ if (STAGE_GEO_NEAR_2DSPHERE == type) {
+ GeoNear2DSphereNode* gn = static_cast<GeoNear2DSphereNode*>(node);
+ boundsToFillOut = &gn->baseBounds;
+ cout << "YO it's a geo near node\n";
+ }
+ else {
+ verify(type == STAGE_IXSCAN);
+ IndexScanNode* scan = static_cast<IndexScanNode*>(node);
+ boundsToFillOut = &scan->bounds;
+ cout << "YO it's an ixscan node\n";
+ }
- // Skip ahead to the firstEmptyField-th element, where we begin filling in bounds.
- BSONObjIterator it(indexKeyPattern);
- for (size_t i = 0; i < firstEmptyField; ++i) {
- verify(it.more());
- it.next();
+ cout << "pos is " << pos << endl;
+
+ // Get the ixtag->pos-th element of the index key pattern.
+ // TODO: cache this instead/with ixtag->pos?
+ BSONObjIterator it(keyPattern);
+ BSONElement keyElt = it.next();
+ for (size_t i = 0; i < pos; ++i) {
+ verify(it.more());
+ keyElt = it.next();
+ }
+ verify(!keyElt.eoo());
+ *exactOut = false;
+
+ //cout << "current bounds are " << currentScan->bounds.toString() << endl;
+ //cout << "node merging in " << child->toString() << endl;
+ //cout << "merging with field " << keyElt.toString(true, true) << endl;
+ //cout << "taking advantage of compound index "
+ //<< indices[currentIndexNumber].keyPattern.toString() << endl;
+
+ verify(boundsToFillOut->fields.size() > pos);
+ verify(boundsToFillOut->fields[pos].name.empty());
+ IndexBoundsBuilder::translate(expr, keyElt, &boundsToFillOut->fields[pos], exactOut);
}
+ }
- // For each field in the key...
- while (it.more()) {
- // Be extra sure there's no data there.
- verify("" == scan->bounds.fields[firstEmptyField].name);
- verify(scan->bounds.fields[firstEmptyField].intervals.empty());
- // ...build the "all values" interval.
- IndexBoundsBuilder::allValuesForField(it.next(), &scan->bounds.fields[firstEmptyField]);
- ++firstEmptyField;
+ // static
+ void QueryPlanner::finishLeafNode(QuerySolutionNode* node, const BSONObj& indexKeyPattern) {
+ const StageType type = node->getType();
+
+ if (STAGE_GEO_2D == type || STAGE_GEO_NEAR_2D == type) {
+ // XXX: do we do anything here?
+ return;
}
+ else {
+ IndexBounds* bounds = NULL;
- // Make sure that the length of the key is the length of the bounds we started.
- verify(firstEmptyField == scan->bounds.fields.size());
+ if (STAGE_GEO_NEAR_2DSPHERE == type) {
+ GeoNear2DSphereNode* gnode = static_cast<GeoNear2DSphereNode*>(node);
+ bounds = &gnode->baseBounds;
+ }
+ else {
+ verify(type == STAGE_IXSCAN);
+ IndexScanNode* scan = static_cast<IndexScanNode*>(node);
+ bounds = &scan->bounds;
+ }
+
+ // XXX: this currently fills out minkey/maxkey bounds for near queries, fix that. just
+ // set the field name of the near query field when starting a near scan.
+
+ // Find the first field in the scan's bounds that was not filled out.
+ // TODO: could cache this.
+ size_t firstEmptyField = 0;
+ for (firstEmptyField = 0; firstEmptyField < bounds->fields.size(); ++firstEmptyField) {
+ if ("" == bounds->fields[firstEmptyField].name) {
+ verify(bounds->fields[firstEmptyField].intervals.empty());
+ break;
+ }
+ }
+
+ // All fields are filled out with bounds, nothing to do.
+ if (firstEmptyField == bounds->fields.size()) { return; }
+
+ // Skip ahead to the firstEmptyField-th element, where we begin filling in bounds.
+ BSONObjIterator it(indexKeyPattern);
+ for (size_t i = 0; i < firstEmptyField; ++i) {
+ verify(it.more());
+ it.next();
+ }
+
+ // For each field in the key...
+ while (it.more()) {
+ // Be extra sure there's no data there.
+ verify("" == bounds->fields[firstEmptyField].name);
+ verify(bounds->fields[firstEmptyField].intervals.empty());
+ // ...build the "all values" interval.
+ IndexBoundsBuilder::allValuesForField(it.next(),
+ &bounds->fields[firstEmptyField]);
+ ++firstEmptyField;
+ }
+
+ // Make sure that the length of the key is the length of the bounds we started.
+ verify(firstEmptyField == bounds->fields.size());
+ }
}
// static
@@ -280,9 +435,8 @@ namespace mongo {
verify(MatchExpression::OR == root->matchType());
}
- auto_ptr<IndexScanNode> currentScan;
+ auto_ptr<QuerySolutionNode> currentScan;
size_t currentIndexNumber = IndexTag::kNoIndex;
- size_t nextIndexPos = 0;
size_t curChild = 0;
// This 'while' processes all IXSCANs, possibly merging scans by combining the bounds. We
@@ -409,7 +563,7 @@ namespace mongo {
if (!canMergeBounds || !shouldMergeBounds) {
if (NULL != currentScan.get()) {
- finishIndexScan(currentScan.get(), indices[currentIndexNumber].keyPattern);
+ finishLeafNode(currentScan.get(), indices[currentIndexNumber].keyPattern);
out->push_back(currentScan.release());
}
else {
@@ -417,10 +571,9 @@ namespace mongo {
}
currentIndexNumber = ixtag->index;
- nextIndexPos = 1;
bool exact = false;
- currentScan.reset(makeIndexScan(indices[currentIndexNumber].keyPattern,
+ currentScan.reset(makeLeafNode(indices[currentIndexNumber].keyPattern,
child, &exact));
if (exact && !inArrayOperator) {
@@ -441,29 +594,10 @@ namespace mongo {
// The child uses the same index we're currently building a scan for. Merge
// the bounds and filters.
verify(currentIndexNumber == ixtag->index);
- verify(ixtag->pos == nextIndexPos);
-
- // Get the ixtag->pos-th element of indexKeyPatterns[currentIndexNumber].
- // TODO: cache this instead/with ixtag->pos?
- BSONObjIterator it(indices[currentIndexNumber].keyPattern);
- BSONElement keyElt = it.next();
- for (size_t i = 0; i < ixtag->pos; ++i) {
- verify(it.more());
- keyElt = it.next();
- }
- verify(!keyElt.eoo());
- bool exact = false;
-
- //cout << "current bounds are " << currentScan->bounds.toString() << endl;
- //cout << "node merging in " << child->toString() << endl;
- //cout << "merging with field " << keyElt.toString(true, true) << endl;
- //cout << "taking advantage of compound index "
- //<< indices[currentIndexNumber].keyPattern.toString() << endl;
- // We know at this point that the only case where we do this is compound indices so
- // just short-cut and dump the bounds there.
- verify(currentScan->bounds.fields[ixtag->pos].name.empty());
- IndexBoundsBuilder::translate(child, keyElt, &currentScan->bounds.fields[ixtag->pos], &exact);
+ bool exact = false;
+ mergeWithLeafNode(child, indices[currentIndexNumber].keyPattern, ixtag->pos, &exact,
+ currentScan.get());
if (exact) {
root->getChildVector()->erase(root->getChildVector()->begin()
@@ -474,14 +608,12 @@ namespace mongo {
// We keep curChild in the AND for affixing later.
++curChild;
}
-
- ++nextIndexPos;
}
}
// Output the scan we're done with, if it exists.
if (NULL != currentScan.get()) {
- finishIndexScan(currentScan.get(), indices[currentIndexNumber].keyPattern);
+ finishLeafNode(currentScan.get(), indices[currentIndexNumber].keyPattern);
out->push_back(currentScan.release());
}
@@ -580,7 +712,9 @@ namespace mongo {
// Unlike an AND, an OR cannot have filters hanging off of it. We stop processing
// when any of our children lack index tags. If a node lacks an index tag it cannot
// be answered via an index.
- if (ixscanNodes.size() != expectedNumberScans || (!inArrayOperator && 0 != root->numChildren())) {
+ if (ixscanNodes.size() != expectedNumberScans
+ || (!inArrayOperator && 0 != root->numChildren())) {
+
warning() << "planner OR error, non-indexed child of OR.";
// We won't enumerate an OR without indices for each child, so this isn't an issue, even
// if we have an AND with an OR child -- we won't get here unless the OR is fully
@@ -666,13 +800,16 @@ namespace mongo {
IndexTag* tag = static_cast<IndexTag*>(root->getTag());
bool exact = false;
- auto_ptr<IndexScanNode> isn;
- isn.reset(makeIndexScan(indices[tag->index].keyPattern, root, &exact));
-
- finishIndexScan(isn.get(), indices[tag->index].keyPattern);
+ QuerySolutionNode* soln = makeLeafNode(indices[tag->index].keyPattern, root,
+ &exact);
+ verify(NULL != soln);
+ stringstream ss;
+ soln->appendToString(&ss, 0);
+ cout << "about to finish leaf node, soln " << ss.str() << endl;
+ finishLeafNode(soln, indices[tag->index].keyPattern);
if (inArrayOperator) {
- return isn.release();
+ return soln;
}
// If the bounds are exact, the set of documents that satisfy the predicate is
@@ -681,15 +818,16 @@ namespace mongo {
// If the bounds are not exact, the set of documents returned from the scan is a
// superset of documents that satisfy the predicate, and we must check the
// predicate.
- if (!exact) {
- FetchNode* fetch = new FetchNode();
- verify(NULL != autoRoot.get());
- fetch->filter.reset(autoRoot.release());
- fetch->child.reset(isn.release());
- return fetch;
+
+ if (exact) {
+ return soln;
}
- return isn.release();
+ FetchNode* fetch = new FetchNode();
+ verify(NULL != autoRoot.get());
+ fetch->filter.reset(autoRoot.release());
+ fetch->child.reset(soln);
+ return fetch;
}
else if (Indexability::arrayUsesIndexOnChildren(root)) {
QuerySolutionNode* solution = NULL;
@@ -799,6 +937,7 @@ namespace mongo {
// Project the results.
if (NULL != query.getProj()) {
if (query.getProj()->requiresDocument()) {
+ cout << "projection claims to require doc adding fetch.\n";
// If the projection requires the entire document, somebody must fetch.
if (!solnRoot->fetched()) {
FetchNode* fetch = new FetchNode();
@@ -811,6 +950,7 @@ namespace mongo {
bool covered = true;
for (size_t i = 0; i < fields.size(); ++i) {
if (!solnRoot->hasField(fields[i])) {
+ cout << "not covered cuz doesn't have field " << fields[i] << endl;
covered = false;
break;
}
@@ -853,12 +993,19 @@ namespace mongo {
/**
* Does the tree rooted at 'root' have a node with matchType 'type'?
*/
- bool hasNode(MatchExpression* root, MatchExpression::MatchType type) {
+ bool hasNode(MatchExpression* root, MatchExpression::MatchType type,
+ MatchExpression** out = NULL) {
if (type == root->matchType()) {
+ if (NULL != out) {
+ *out = root;
+ }
return true;
}
for (size_t i = 0; i < root->numChildren(); ++i) {
if (hasNode(root->getChild(i), type)) {
+ if (NULL != out) {
+ *out = root->getChild(i);
+ }
return true;
}
}
@@ -963,13 +1110,15 @@ namespace mongo {
if (0 == indices[i].keyPattern.woCompare(hintIndex)) {
relevantIndices.clear();
relevantIndices.push_back(indices[i]);
- cout << "hint specified, restricting indices to " << hintIndex.toString() << endl;
+ cout << "hint specified, restricting indices to " << hintIndex.toString()
+ << endl;
hintValid = true;
break;
}
}
if (!hintValid) {
- warning() << "Hinted index " << hintIndex.toString() << " does not exist, ignoring.";
+ warning() << "Hinted index " << hintIndex.toString()
+ << " does not exist, ignoring.";
}
}
else {
@@ -980,6 +1129,15 @@ namespace mongo {
// query.root() is now annotated with RelevantTag(s).
rateIndices(query.root(), "", relevantIndices);
+ // If there is a GEO_NEAR it must have an index it can use directly.
+ MatchExpression* gnNode;
+ if (hasNode(query.root(), MatchExpression::GEO_NEAR, &gnNode)) {
+ RelevantTag* tag = static_cast<RelevantTag*>(gnNode->getTag());
+ if (0 == tag->first.size() && 0 == tag->notFirst.size()) {
+ return;
+ }
+ }
+
// If we have any relevant indices, we try to create indexed plans.
if (0 < relevantIndices.size()) {
/*
@@ -998,7 +1156,8 @@ namespace mongo {
<< endl;
// This can fail if enumeration makes a mistake.
- QuerySolutionNode* solnRoot = buildIndexedDataAccess(rawTree, false, relevantIndices);
+ QuerySolutionNode* solnRoot = buildIndexedDataAccess(rawTree, false,
+ relevantIndices);
if (NULL == solnRoot) { continue; }
// This shouldn't ever fail.
@@ -1045,7 +1204,9 @@ namespace mongo {
// index is not over any predicates in the query.
//
// XXX XXX: Can we do this even if the index is sparse? Might we miss things?
- if (!query.getParsed().getSort().isEmpty()) {
+ if (!query.getParsed().getSort().isEmpty()
+ && !hasNode(query.root(), MatchExpression::GEO_NEAR)) {
+
// See if we have a sort provided from an index already.
bool usingIndexToSort = false;
for (size_t i = 0; i < out->size(); ++i) {
diff --git a/src/mongo/db/query/query_planner.h b/src/mongo/db/query/query_planner.h
index 0e4d2ea13e6..573b5c64767 100644
--- a/src/mongo/db/query/query_planner.h
+++ b/src/mongo/db/query/query_planner.h
@@ -182,16 +182,30 @@ namespace mongo {
//
/**
- * Create a new IndexScanNode. The bounds for 'expr' are computed and placed into the
+ * Create a new data access node.
+ *
+ * If the node is an index scan, the bounds for 'expr' are computed and placed into the
* first field's OIL position. The rest of the OILs are allocated but uninitialized.
+ *
+ * If the node is a geo node, XXX.
*/
- static IndexScanNode* makeIndexScan(const BSONObj& indexKeyPattern, MatchExpression* expr,
- bool* exact);
+ static QuerySolutionNode* makeLeafNode(const BSONObj& indexKeyPattern,
+ MatchExpression* expr,
+ bool* exact);
+
/**
- * Fill in any bounds that are missing in 'scan' with the "all values for this field"
- * interval.
+ * Merge the predicate 'expr' with the leaf node 'node'.
+ */
+ static void mergeWithLeafNode(MatchExpression* expr, const BSONObj& keyPattern,
+ size_t pos, bool* exactOut, QuerySolutionNode* node);
+
+ /**
+ * If index scan, fill in any bounds that are missing in 'node' with the "all values for
+ * this field" interval.
+ *
+ * If geo, XXX.
*/
- static void finishIndexScan(IndexScanNode* scan, const BSONObj& indexKeyPattern);
+ static void finishLeafNode(QuerySolutionNode* node, const BSONObj& indexKeyPattern);
//
// Analysis of Data Access
diff --git a/src/mongo/db/query/query_planner_test.cpp b/src/mongo/db/query/query_planner_test.cpp
index bfd0a132620..30e21c7dd92 100644
--- a/src/mongo/db/query/query_planner_test.cpp
+++ b/src/mongo/db/query/query_planner_test.cpp
@@ -69,13 +69,14 @@ namespace {
//
void runQuery(BSONObj query) {
+ solns.clear();
queryObj = query.getOwned();
ASSERT_OK(CanonicalQuery::canonicalize(ns, queryObj, &cq));
QueryPlanner::plan(*cq, keyPatterns, QueryPlanner::INCLUDE_COLLSCAN, &solns);
- ASSERT_GREATER_THAN(solns.size(), 0U);;
}
void runDetailedQuery(const BSONObj& query, const BSONObj& sort, const BSONObj& proj) {
+ solns.clear();
ASSERT_OK(CanonicalQuery::canonicalize(ns, query, sort, proj, &cq));
QueryPlanner::plan(*cq, keyPatterns, QueryPlanner::INCLUDE_COLLSCAN, &solns);
ASSERT_GREATER_THAN(solns.size(), 0U);;
@@ -420,7 +421,7 @@ namespace {
ASSERT_EQUALS(solns.size(), 2U);
for (size_t i = 0; i < solns.size(); ++i) {
- // cout << solns[i]->toString();
+ cout << solns[i]->toString();
ProjectionNode* pn = static_cast<ProjectionNode*>(solns[i]->root.get());
ASSERT(STAGE_COLLSCAN == pn->child->getType() || STAGE_IXSCAN == pn->child->getType());
}
@@ -549,6 +550,98 @@ namespace {
ASSERT_EQUALS(getNumSolutions(), 2U);
}
+ //
+ // Geo
+ // http://docs.mongodb.org/manual/reference/operator/query-geospatial/#geospatial-query-compatibility-chart
+ //
+
+ TEST_F(SingleIndexTest, Basic2DNonNear) {
+ // 2d can answer: within poly, within center, within centersphere, within box.
+ // And it can use an index (or not) for each of them. As such, 2 solns expected.
+ setIndex(BSON("a" << "2d"));
+
+ // Polygon
+ runQuery(fromjson("{a : { $within: { $polygon : [[0,0], [2,0], [4,0]] } }}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 2U);
+
+ // Center
+ runQuery(fromjson("{a : { $within : { $center : [[ 5, 5 ], 7 ] } }}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 2U);
+
+ // Centersphere
+ runQuery(fromjson("{a : { $within : { $centerSphere : [[ 10, 20 ], 0.01 ] } }}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 2U);
+
+ // Within box.
+ runQuery(fromjson("{a : {$within: {$box : [[0,0],[9,9]]}}}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 2U);
+
+ // TODO: test that we *don't* annotate for things we shouldn't.
+ }
+
+ TEST_F(SingleIndexTest, Basic2DSphereNonNear) {
+ // 2dsphere can do: within+geometry, intersects+geometry
+ setIndex(BSON("a" << "2dsphere"));
+
+ runQuery(fromjson("{a: {$geoIntersects: {$geometry: {type: 'Point',"
+ "coordinates: [10.0, 10.0]}}}}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 2U);
+
+ runQuery(fromjson("{a : { $geoWithin : { $centerSphere : [[ 10, 20 ], 0.01 ] } }}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 2U);
+
+ // TODO: test that we *don't* annotate for things we shouldn't.
+ }
+
+ TEST_F(SingleIndexTest, Basic2DGeoNear) {
+ // Can only do near + old point.
+ setIndex(BSON("a" << "2d"));
+ runQuery(fromjson("{a: {$near: [0,0], $maxDistance:0.3 }}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 1U);
+ }
+
+ TEST_F(SingleIndexTest, Basic2DSphereGeoNear) {
+ // Can do nearSphere + old point, near + new point.
+ setIndex(BSON("a" << "2dsphere"));
+
+ runQuery(fromjson("{a: {$nearSphere: [0,0], $maxDistance: 0.31 }}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 1U);
+
+ runQuery(fromjson("{a: {$geoNear: {$geometry: {type: 'Point', coordinates: [0,0]},"
+ "$maxDistance:100}}}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 1U);
+ }
+
+ TEST_F(SingleIndexTest, Basic2DSphereGeoNearReverseCompound) {
+ setIndex(BSON("x" << 1 << "a" << "2dsphere"));
+ runQuery(fromjson("{x:1, a: {$nearSphere: [0,0], $maxDistance: 0.31 }}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 1U);
+ }
+
+ TEST_F(SingleIndexTest, NearNoIndex) {
+ setIndex(BSON("x" << 1));
+ runQuery(fromjson("{x:1, a: {$nearSphere: [0,0], $maxDistance: 0.31 }}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 0U);
+ }
+
+ TEST_F(SingleIndexTest, TwoDSphereNoGeoPred) {
+ setIndex(BSON("x" << 1 << "a" << "2dsphere"));
+ runQuery(fromjson("{x:1}"));
+ dumpSolutions();
+ ASSERT_EQUALS(getNumSolutions(), 2U);
+ }
+
// STOPPED HERE - need to hook up machinery for multiple indexed predicates
// second is not working (until the machinery is in place)
//
diff --git a/src/mongo/db/query/query_solution.cpp b/src/mongo/db/query/query_solution.cpp
index 0d3845251ba..446f87a5a67 100644
--- a/src/mongo/db/query/query_solution.cpp
+++ b/src/mongo/db/query/query_solution.cpp
@@ -333,7 +333,6 @@ namespace mongo {
}
bool IndexScanNode::hasField(const string& field) const {
- // XXX XXX: multikey? do we store that the index is multikey in the scan?
BSONObjIterator it(indexKeyPattern);
while (it.more()) {
if (field == it.next().fieldName()) {
@@ -450,4 +449,85 @@ namespace mongo {
child->appendToString(ss, indent + 2);
}
+ //
+ // GeoNear2DNode
+ //
+
+ void GeoNear2DNode::appendToString(stringstream* ss, int indent) const {
+ addIndent(ss, indent);
+ *ss << "GEO_NEAR_2D\n";
+ addIndent(ss, indent + 1);
+ *ss << "numWanted = " << numWanted << endl;
+ addIndent(ss, indent + 1);
+ *ss << "keyPattern = " << indexKeyPattern.toString() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "seek = " << seek.toString() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "fetched = " << fetched() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "sortedByDiskLoc = " << sortedByDiskLoc() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "getSort = " << getSort().toString() << endl;
+ }
+
+ bool GeoNear2DNode::hasField(const string& field) const {
+ BSONObjIterator it(indexKeyPattern);
+ while (it.more()) {
+ if (field == it.next().fieldName()) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ //
+ // GeoNear2DSphereNode
+ //
+
+ void GeoNear2DSphereNode::appendToString(stringstream* ss, int indent) const {
+ addIndent(ss, indent);
+ *ss << "GEO_NEAR_2DSPHERE\n";
+ addIndent(ss, indent + 1);
+ *ss << "keyPattern = " << indexKeyPattern.toString() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "fetched = " << fetched() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "sortedByDiskLoc = " << sortedByDiskLoc() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "getSort = " << getSort().toString() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "baseBounds = " << baseBounds.toString() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "nearQuery = " << nq.toString() << endl;
+ }
+
+ //
+ // Geo2DNode
+ //
+
+ void Geo2DNode::appendToString(stringstream* ss, int indent) const {
+ addIndent(ss, indent);
+ *ss << "GEO_2D\n";
+ addIndent(ss, indent + 1);
+ *ss << "keyPattern = " << indexKeyPattern.toString() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "seek = " << seek.toString() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "fetched = " << fetched() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "sortedByDiskLoc = " << sortedByDiskLoc() << endl;
+ addIndent(ss, indent + 1);
+ *ss << "getSort = " << getSort().toString() << endl;
+ }
+
+ bool Geo2DNode::hasField(const string& field) const {
+ BSONObjIterator it(indexKeyPattern);
+ while (it.more()) {
+ if (field == it.next().fieldName()) {
+ return true;
+ }
+ }
+ return false;
+ }
+
} // namespace mongo
diff --git a/src/mongo/db/query/query_solution.h b/src/mongo/db/query/query_solution.h
index 8a89f295d9b..948ac02e658 100644
--- a/src/mongo/db/query/query_solution.h
+++ b/src/mongo/db/query/query_solution.h
@@ -29,6 +29,7 @@
#pragma once
#include "mongo/db/matcher/expression.h"
+#include "mongo/db/geo/geoquery.h"
#include "mongo/db/query/index_bounds.h"
#include "mongo/db/query/projection_parser.h"
#include "mongo/db/query/stage_types.h"
@@ -392,7 +393,6 @@ namespace mongo {
virtual ~SkipNode() { }
virtual StageType getType() const { return STAGE_SKIP; }
-
virtual void appendToString(stringstream* ss, int indent) const;
bool fetched() const { return child->fetched(); }
@@ -404,4 +404,64 @@ namespace mongo {
scoped_ptr<QuerySolutionNode> child;
};
+ //
+ // Geo nodes. A thin wrapper above an IXSCAN until we can yank functionality out of
+ // the IXSCAN layer into the stage layer.
+ //
+
+ struct GeoNear2DNode : public QuerySolutionNode {
+ GeoNear2DNode() : numWanted(100) { }
+ virtual ~GeoNear2DNode() { }
+
+ virtual StageType getType() const { return STAGE_GEO_NEAR_2D; }
+ virtual void appendToString(stringstream* ss, int indent) const;
+
+ bool fetched() const { return false; }
+ bool hasField(const string& field) const;
+ bool sortedByDiskLoc() const { return false; }
+ BSONObj getSort() const { return BSONObj(); }
+
+ int numWanted;
+ BSONObj indexKeyPattern;
+ BSONObj seek;
+ };
+
+ // TODO: This is probably an expression index.
+ struct Geo2DNode : public QuerySolutionNode {
+ Geo2DNode() { }
+ virtual ~Geo2DNode() { }
+
+ virtual StageType getType() const { return STAGE_GEO_2D; }
+ virtual void appendToString(stringstream* ss, int indent) const;
+
+ bool fetched() const { return false; }
+ bool hasField(const string& field) const;
+ bool sortedByDiskLoc() const { return false; }
+ BSONObj getSort() const { return BSONObj(); }
+
+ BSONObj indexKeyPattern;
+ BSONObj seek;
+ };
+
+ // This is actually its own standalone stage.
+ struct GeoNear2DSphereNode : public QuerySolutionNode {
+ GeoNear2DSphereNode() { }
+ virtual ~GeoNear2DSphereNode() { }
+
+ virtual StageType getType() const { return STAGE_GEO_NEAR_2DSPHERE; }
+ virtual void appendToString(stringstream* ss, int indent) const;
+
+ bool fetched() const { return true; }
+ bool hasField(const string& field) const { return true; }
+ bool sortedByDiskLoc() const { return false; }
+ BSONObj getSort() const { return BSONObj(); }
+
+ NearQuery nq;
+ IndexBounds baseBounds;
+
+ BSONObj indexKeyPattern;
+ scoped_ptr<MatchExpression> filter;
+ };
+
+
} // namespace mongo
diff --git a/src/mongo/db/query/stage_builder.cpp b/src/mongo/db/query/stage_builder.cpp
index 8b699a55202..1859329f26b 100644
--- a/src/mongo/db/query/stage_builder.cpp
+++ b/src/mongo/db/query/stage_builder.cpp
@@ -37,6 +37,7 @@
#include "mongo/db/exec/merge_sort.h"
#include "mongo/db/exec/or.h"
#include "mongo/db/exec/projection.h"
+#include "mongo/db/exec/s2near.h"
#include "mongo/db/exec/sort.h"
#include "mongo/db/exec/skip.h"
#include "mongo/db/index/catalog_hack.h"
@@ -156,6 +157,38 @@ namespace mongo {
}
return ret.release();
}
+ else if (STAGE_GEO_2D == root->getType()) {
+ // XXX: placeholder for having a real stage
+ const Geo2DNode* node = static_cast<const Geo2DNode*>(root);
+ IndexScanParams params;
+ NamespaceDetails* nsd = nsdetails(ns.c_str());
+ if (NULL == nsd) { return NULL; }
+ int idxNo = nsd->findIndexByKeyPattern(node->indexKeyPattern);
+ if (-1 == idxNo) { return NULL; }
+ params.descriptor = CatalogHack::getDescriptor(nsd, idxNo);
+ params.bounds.isSimpleRange = true;
+ params.bounds.startKey = node->seek;
+ return new IndexScan(params, ws, NULL);
+ }
+ else if (STAGE_GEO_NEAR_2D == root->getType()) {
+ // XXX: placeholder for having a real stage
+ const GeoNear2DNode* node = static_cast<const GeoNear2DNode*>(root);
+ IndexScanParams params;
+ NamespaceDetails* nsd = nsdetails(ns.c_str());
+ if (NULL == nsd) { return NULL; }
+ int idxNo = nsd->findIndexByKeyPattern(node->indexKeyPattern);
+ if (-1 == idxNo) { return NULL; }
+ params.descriptor = CatalogHack::getDescriptor(nsd, idxNo);
+ params.bounds.isSimpleRange = true;
+ params.bounds.startKey = node->seek;
+ params.limit = node->numWanted;
+ return new IndexScan(params, ws, NULL);
+ }
+ else if (STAGE_GEO_NEAR_2DSPHERE == root->getType()) {
+ const GeoNear2DSphereNode* node = static_cast<const GeoNear2DSphereNode*>(root);
+ return new S2NearStage(ns, node->indexKeyPattern, node->nq, node->baseBounds,
+ node->filter.get(), ws);
+ }
else {
stringstream ss;
root->appendToString(&ss, 0);
diff --git a/src/mongo/db/query/stage_types.h b/src/mongo/db/query/stage_types.h
index 98d4de94a22..5db655f8e81 100644
--- a/src/mongo/db/query/stage_types.h
+++ b/src/mongo/db/query/stage_types.h
@@ -38,6 +38,14 @@ namespace mongo {
STAGE_AND_SORTED,
STAGE_COLLSCAN,
STAGE_FETCH,
+
+ // TODO: This is probably an expression index, but would take even more time than
+ // STAGE_2DSPHERE to straighten out.
+ STAGE_GEO_2D,
+ // The two $geoNear impls imply a fetch+sort and as such are not IXSCANs.
+ STAGE_GEO_NEAR_2D,
+ STAGE_GEO_NEAR_2DSPHERE,
+
STAGE_IXSCAN,
STAGE_LIMIT,
STAGE_OR,