summaryrefslogtreecommitdiff
path: root/src/mongo/db/timeseries
diff options
context:
space:
mode:
authorDan Larkin-York <dan.larkin-york@mongodb.com>2021-01-26 18:48:04 +0000
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2021-02-02 01:26:45 +0000
commit07d444d57def12b226632d51142c138cfd3f00da (patch)
treeba1e6120e8c6991a1456d643c708d135965b47ba /src/mongo/db/timeseries
parentae0a72e4798a73d55addf4259f4bf1a7edafa9ae (diff)
downloadmongo-07d444d57def12b226632d51142c138cfd3f00da.tar.gz
SERVER-53840 Make bucket limits configurable
Diffstat (limited to 'src/mongo/db/timeseries')
-rw-r--r--src/mongo/db/timeseries/SConscript1
-rw-r--r--src/mongo/db/timeseries/bucket_catalog.cpp10
-rw-r--r--src/mongo/db/timeseries/bucket_catalog.h5
-rw-r--r--src/mongo/db/timeseries/bucket_catalog_test.cpp6
-rw-r--r--src/mongo/db/timeseries/timeseries.idl24
5 files changed, 34 insertions, 12 deletions
diff --git a/src/mongo/db/timeseries/SConscript b/src/mongo/db/timeseries/SConscript
index 14d894c0b39..63f72f1b138 100644
--- a/src/mongo/db/timeseries/SConscript
+++ b/src/mongo/db/timeseries/SConscript
@@ -11,6 +11,7 @@ env.Library(
],
LIBDEPS=[
'$BUILD_DIR/mongo/idl/idl_parser',
+ '$BUILD_DIR/mongo/idl/server_parameter',
],
)
diff --git a/src/mongo/db/timeseries/bucket_catalog.cpp b/src/mongo/db/timeseries/bucket_catalog.cpp
index b00539f5dd8..5189ea23777 100644
--- a/src/mongo/db/timeseries/bucket_catalog.cpp
+++ b/src/mongo/db/timeseries/bucket_catalog.cpp
@@ -39,8 +39,6 @@
namespace mongo {
namespace {
const auto getBucketCatalog = ServiceContext::declareDecoration<BucketCatalog>();
-
-constexpr uint64_t kIdleBucketExpiryMemoryUsageThreshold = 1024 * 1024 * 100; // 100 MB
} // namespace
BSONObj BucketCatalog::CommitData::toBSON() const {
@@ -121,11 +119,11 @@ BucketCatalog::InsertResult BucketCatalog::insert(OperationContext* opCtx,
&sizeToBeAdded);
auto isBucketFull = [&]() {
- if (bucket->numMeasurements == kTimeseriesBucketMaxCount) {
+ if (bucket->numMeasurements == static_cast<std::uint64_t>(gTimeseriesBucketMaxCount)) {
stats.numBucketsClosedDueToCount++;
return true;
}
- if (bucket->size + sizeToBeAdded > kTimeseriesBucketMaxSizeBytes) {
+ if (bucket->size + sizeToBeAdded > static_cast<std::uint64_t>(gTimeseriesBucketMaxSize)) {
stats.numBucketsClosedDueToSize++;
return true;
}
@@ -362,7 +360,9 @@ void BucketCatalog::_removeBucket(const OID& bucketId,
}
void BucketCatalog::_expireIdleBuckets(ExecutionStats* stats) {
- while (!_idleBuckets.empty() && _memoryUsage > kIdleBucketExpiryMemoryUsageThreshold) {
+ while (!_idleBuckets.empty() &&
+ _memoryUsage >
+ static_cast<std::uint64_t>(gTimeseriesIdleBucketExpiryMemoryUsageThreshold)) {
_removeBucket(*_idleBuckets.begin(), boost::none, _idleBuckets.begin());
stats->numBucketsClosedDueToMemoryThreshold++;
}
diff --git a/src/mongo/db/timeseries/bucket_catalog.h b/src/mongo/db/timeseries/bucket_catalog.h
index 29b9a8d77ca..ef9feba4811 100644
--- a/src/mongo/db/timeseries/bucket_catalog.h
+++ b/src/mongo/db/timeseries/bucket_catalog.h
@@ -41,9 +41,8 @@
namespace mongo {
class BucketCatalog {
public:
- // This set of constants define limits on the measurements held in a bucket.
- static constexpr int kTimeseriesBucketMaxCount = 1000;
- static constexpr int kTimeseriesBucketMaxSizeBytes = 125 * 1024; // 125 KB
+ // This constant, together with parameters defined in timeseries.idl, defines limits on the
+ // measurements held in a bucket.
static constexpr auto kTimeseriesBucketMaxTimeRange = Hours(1);
struct CommitInfo {
diff --git a/src/mongo/db/timeseries/bucket_catalog_test.cpp b/src/mongo/db/timeseries/bucket_catalog_test.cpp
index 40a0ba0c266..c70b1e8651f 100644
--- a/src/mongo/db/timeseries/bucket_catalog_test.cpp
+++ b/src/mongo/db/timeseries/bucket_catalog_test.cpp
@@ -223,7 +223,7 @@ TEST_F(BucketCatalogWithoutMetadataTest, CommitReturnsNewFields) {
ASSERT(data.newFieldNamesToBeInserted.count("b")) << data.toBSON();
// Fill up the bucket.
- for (auto i = 3; i < BucketCatalog::kTimeseriesBucketMaxCount; ++i) {
+ for (auto i = 3; i < gTimeseriesBucketMaxCount; ++i) {
_bucketCatalog->insert(_opCtx, _ns1, BSON(_timeField << Date_t::now() << "a" << i));
data = _bucketCatalog->commit(bucketId, _commitInfo);
ASSERT_EQ(0U, data.newFieldNamesToBeInserted.size()) << i << ":" << data.toBSON();
@@ -232,9 +232,7 @@ TEST_F(BucketCatalogWithoutMetadataTest, CommitReturnsNewFields) {
// When a bucket overflows, committing to the new overflow bucket should return the fields of
// the first measurement as new fields.
auto [overflowBucketId, unusedCommitInfo] = _bucketCatalog->insert(
- _opCtx,
- _ns1,
- BSON(_timeField << Date_t::now() << "a" << BucketCatalog::kTimeseriesBucketMaxCount));
+ _opCtx, _ns1, BSON(_timeField << Date_t::now() << "a" << gTimeseriesBucketMaxCount));
ASSERT_NE(bucketId, overflowBucketId);
data = _bucketCatalog->commit(overflowBucketId);
ASSERT_EQ(2U, data.newFieldNamesToBeInserted.size()) << data.toBSON();
diff --git a/src/mongo/db/timeseries/timeseries.idl b/src/mongo/db/timeseries/timeseries.idl
index 814bb634b65..6065394097e 100644
--- a/src/mongo/db/timeseries/timeseries.idl
+++ b/src/mongo/db/timeseries/timeseries.idl
@@ -31,6 +31,30 @@ global:
imports:
- "mongo/idl/basic_types.idl"
+server_parameters:
+ "timeseriesBucketMaxCount":
+ description: "Maximum number of measurements to store in a single bucket"
+ set_at: [ startup ]
+ cpp_vartype: "std::int32_t"
+ cpp_varname: "gTimeseriesBucketMaxCount"
+ default: 1000
+ validator: { gte: 1 }
+ "timeseriesBucketMaxSize":
+ description: "Maximum size in bytes of measurements to store together in a single bucket"
+ set_at: [ startup ]
+ cpp_vartype: "std::int32_t"
+ cpp_varname: "gTimeseriesBucketMaxSize"
+ default: 128000 # 125KB
+ validator: { gte: 1 }
+ "timeseriesIdleBucketExpiryMemoryUsageThreshold":
+ description: "The threshold for bucket catalog memory usage above which idle buckets will be
+ expired"
+ set_at: [ startup ]
+ cpp_vartype: "std::int32_t"
+ cpp_varname: "gTimeseriesIdleBucketExpiryMemoryUsageThreshold"
+ default: 104857600 # 100MB
+ validator: { gte: 1 }
+
structs:
TimeseriesOptions:
description: "The options that define a time-series collection."