diff options
author | Henrik Edin <henrik.edin@mongodb.com> | 2021-09-17 13:44:52 +0000 |
---|---|---|
committer | Evergreen Agent <no-reply@evergreen.mongodb.com> | 2021-09-17 14:06:01 +0000 |
commit | 3c22df5bc4a1c3a32ab936ef17dbe72bfcd79d58 (patch) | |
tree | 6bec3c5bbf882605f23527b0001192b66977f803 /jstests/concurrency/fsm_workloads | |
parent | 048d2393811e801103601bf6b3cb733c6750d4fc (diff) | |
download | mongo-3c22df5bc4a1c3a32ab936ef17dbe72bfcd79d58.tar.gz |
SERVER-60032 Temporarily disable test failing after bucket compression
Diffstat (limited to 'jstests/concurrency/fsm_workloads')
-rw-r--r-- | jstests/concurrency/fsm_workloads/random_moveChunk_timeseries_inserts.js | 9 |
1 files changed, 5 insertions, 4 deletions
diff --git a/jstests/concurrency/fsm_workloads/random_moveChunk_timeseries_inserts.js b/jstests/concurrency/fsm_workloads/random_moveChunk_timeseries_inserts.js index fb1ca2aed35..230ff2885f0 100644 --- a/jstests/concurrency/fsm_workloads/random_moveChunk_timeseries_inserts.js +++ b/jstests/concurrency/fsm_workloads/random_moveChunk_timeseries_inserts.js @@ -122,10 +122,11 @@ var $config = extendWorkload($config, function($config, $super) { // buckets with all documents. const verifyBucketIndex = (bucketIndex) => { const bucketColl = db.getCollection(`system.buckets.${collName}`); - const buckets = bucketColl.aggregate([{$sort: bucketIndex}]).toArray(); - const numDocsInBuckets = - buckets.map(b => Object.keys(b.data._id).length).reduce((x, y) => x + y, 0); - assert.eq(numInitialDocs, numDocsInBuckets); + // TODO SERVER-60033: We need an implementation of this that handle compressed buckets + // const buckets = bucketColl.aggregate([{$sort: bucketIndex}]).toArray(); + // const numDocsInBuckets = + // buckets.map(b => Object.keys(b.data._id).length).reduce((x, y) => x + y, 0); + // assert.eq(numInitialDocs, numDocsInBuckets); const plan = bucketColl.explain().aggregate([{$sort: bucketIndex}]); const stages = getPlanStages(plan, 'IXSCAN'); assert(stages.length > 0); |