summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorvrachev <vlad.rachev@mongodb.com>2021-04-29 09:42:59 -0400
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2021-04-29 14:15:04 +0000
commit2002e6c2cb0e90fa47b30bfea8add8fbc3a412b5 (patch)
tree108e29ce76594230996655612eb88d06b66a9bc5
parent0e6db36e92d82cc81cbd40ffd607eae88dc1f09d (diff)
downloadmongo-2002e6c2cb0e90fa47b30bfea8add8fbc3a412b5.tar.gz
Revert "SERVER-56164 system_perf.yml and perf.yml use finalized DSI api"
This reverts commit f4579a4bdd80f7884c3352089c034f97de80b464.
-rw-r--r--etc/perf.yml1206
-rw-r--r--etc/system_perf.yml2210
2 files changed, 1800 insertions, 1616 deletions
diff --git a/etc/perf.yml b/etc/perf.yml
index f553fd8192b..c29d553052f 100644
--- a/etc/perf.yml
+++ b/etc/perf.yml
@@ -1,233 +1,299 @@
command_type: system
stepback: false
-## Parameters for parameterized builds (see https://github.com/evergreen-ci/evergreen/wiki/Parameterized-Builds)
-parameters:
- - key: patch_compile_flags
- description: "Additional SCons flags to be applied during scons compile invocations in this patch"
+pre:
+ - command: manifest.load
+ - command: shell.exec
+ params:
+ silent: true
+ script: |
+ ${killall_mci|pkill -9 mongod; pkill -9 mongos; pkill -9 mongo; pkill -9 bsondump; pkill -9 mongoimport; pkill -9 mongoexport; pkill -9 mongodump; pkill -9 mongorestore; pkill -9 mongostat; pkill -9 mongofiles; pkill -9 mongotop; pkill -9 mongobridge; pkill -9 mongod-2.6; pkill -9 mongos-2.6; pkill -9 mongo-2.6; pkill -9 bsondump-2.6; pkill -9 mongoimport-2.6; pkill -9 mongoexport-2.6; pkill -9 mongodump-2.6; pkill -9 mongorestore-2.6; pkill -9 mongostat-2.6; pkill -9 mongofiles-2.6; pkill -9 mongotop-2.6; pkill -9 mongobridge-2.6; pkill -9 mongod-2.4; pkill -9 mongos-2.4; pkill -9 mongo-2.4; pkill -9 bsondump-2.4; pkill -9 mongoimport-2.4; pkill -9 mongoexport-2.4; pkill -9 mongodump-2.4; pkill -9 mongorestore-2.4; pkill -9 mongostat-2.4; pkill -9 mongofiles-2.4; pkill -9 mongotop-2.4; pkill -9 resmoke.py; pkill -9 python; pkill -9 python2; pkill -9 lldb; pkill -9 _test} >/dev/null 2>&1
+ exit 0
-variables:
- ###
- # Leave this section uncommented to enable compile.
- _real_remote_file: &_remote_file
- ${project_dir}/${version_id}/${revision}/${platform}/mongodb${compile-variant|}-${version_id}.tar.gz
- _real_compile: &_compile
- - variant: linux-wt-standalone
- name: compile
- _real_expansions: &_expansion_updates
- []
- ###
+post:
+ - command: attach.results
+ params:
+ file_location: src/report.json
+ - command: s3.put
+ params:
+ aws_key: ${aws_key}
+ aws_secret: ${aws_secret}
+ local_file: src/mongod.log
+ remote_file: ${project}/${build_variant}/${revision}/${task_id}/${version_id}/logs/mongod-${build_id}.log
+ bucket: mciuploads
+ permissions: public-read
+ content_type: ${content_type|text/plain}
+ display_name: mongod.log
+ - command: shell.exec
+ params:
+ silent: true
+ script: |
+ ${killall_mci|pkill -9 mongod; pkill -9 mongos; pkill -9 mongo; pkill -9 bsondump; pkill -9 mongoimport; pkill -9 mongoexport; pkill -9 mongodump; pkill -9 mongorestore; pkill -9 mongostat; pkill -9 mongofiles; pkill -9 mongotop; pkill -9 mongobridge; pkill -9 mongod-2.6; pkill -9 mongos-2.6; pkill -9 mongo-2.6; pkill -9 bsondump-2.6; pkill -9 mongoimport-2.6; pkill -9 mongoexport-2.6; pkill -9 mongodump-2.6; pkill -9 mongorestore-2.6; pkill -9 mongostat-2.6; pkill -9 mongofiles-2.6; pkill -9 mongotop-2.6; pkill -9 mongobridge-2.6; pkill -9 mongod-2.4; pkill -9 mongos-2.4; pkill -9 mongo-2.4; pkill -9 bsondump-2.4; pkill -9 mongoimport-2.4; pkill -9 mongoexport-2.4; pkill -9 mongodump-2.4; pkill -9 mongorestore-2.4; pkill -9 mongostat-2.4; pkill -9 mongofiles-2.4; pkill -9 mongotop-2.4; pkill -9 resmoke.py; pkill -9 python; pkill -9 python2; pkill -9 lldb; pkill -9 _test} >/dev/null 2>&1
+ exit 0
+ - command: shell.exec
+ params:
+ working_dir: src
+ script: |
+ # removes files from the (local) scons cache when it's over a
+ # threshold, to the $prune_ratio percentage. Ideally override
+ # these default values in the distro config in evergreen.
- ###
- # **Or**: Leave this section uncommented to bypass/skip compile.
- # This file ↓ came from a microbenchmarks waterfall run.
- # https://evergreen.mongodb.com/version/performance_996dcdc3d96346d71f012388eccc79c691619340
- # Artifacts eventually expire. If this fails, grab the compile artifacts url and update this.
-# _skip_remote_file: &_remote_file
-# perf/608778863e8e8676d756de72/abc2b21e0d47c527c00df30fcf322ff01699877d/linux/mongodb-608778863e8e8676d756de72.tar.gz
-# _skip_compile: &_compile
-# []
-# _skip_expansions: &_expansion_updates
-# - key: mdb_binary_for_server
-# value: https://mciuploads.s3.amazonaws.com/perf/608778863e8e8676d756de72/abc2b21e0d47c527c00df30fcf322ff01699877d/linux/mongodb-608778863e8e8676d756de72.tar.gz
-# - key: mdb_binary_for_client
-# value: https://mciuploads.s3.amazonaws.com/perf/608778863e8e8676d756de72/abc2b21e0d47c527c00df30fcf322ff01699877d/linux/mongodb-608778863e8e8676d756de72.tar.gz
- ###
+ if [ -d "${scons_cache_path}" ]; then
+ /opt/mongodbtoolchain/v3/bin/python3 buildscripts/scons_cache_prune.py --cache-dir ${scons_cache_path} --cache-size ${scons_cache_size|200} --prune-ratio ${scons_prune_ratio|0.8}
+ fi
+ - func: "upload pip requirements"
- _src_dir: &src_dir src/mongo
- _modules: &modules
- - enterprise
- - mongo-tools
- - dsi
- - genny
- - signal-processing
- - workloads
- - linkbench
- - linkbench2
- - mongo-perf
- - YCSB
- - benchmarks
- - py-tpcc
+# Be sure to add the module to git.get_project revisions parameter so the revision in the manifest
+# is the version of the repository that's cloned.
modules:
- ###
- # Same in every DSI project. Ensure that this block is synchronized with
- # evergreen-dsitest.yml, atlas/system_perf_atlas.yml, and src/dsi/onboarding.py
- # (search update-repos-here) in this repo, and etc/system_perf.yml and
- # etc/perf.yml in mongodb/mongo
- - name: dsi
- repo: git@github.com:10gen/dsi.git
- prefix: ../../src
- branch: master
- - name: genny
- repo: git@github.com:10gen/genny.git
- prefix: ../../src
- branch: master
- - name: signal-processing
- repo: git@github.com:10gen/signal-processing.git
- prefix: ../../src
- branch: master
- - name: workloads
- repo: git@github.com:10gen/workloads.git
- prefix: ../../src
- branch: master
- - name: linkbench
- repo: git@github.com:10gen/linkbench.git
- prefix: ../../src
- branch: master
- - name: linkbench2
- repo: git@github.com:10gen/linkbench2.git
- prefix: ../../src
- branch: master
- - name: mongo-perf
- repo: git@github.com:mongodb/mongo-perf.git
- prefix: ../../src
- branch: master
- - name: YCSB
- repo: git@github.com:mongodb-labs/YCSB.git
- prefix: ../../src
- branch: master
- ref: 4e7287880c04514cad2df5761b9511c940a33059
- - name: benchmarks
- repo: git@github.com:mongodb-labs/benchmarks.git
- prefix: ../../src
- branch: master
- - name: py-tpcc
- repo: git@github.com:mongodb-labs/py-tpcc.git
- prefix: ../../src
- branch: master
- ref: 2d19705337a40e24831a904266a648b85df5be84
+- name: enterprise
+ repo: git@github.com:10gen/mongo-enterprise-modules.git
+ prefix: src/mongo/db/modules
+ branch: v4.2
- ###
-# - name: mongo
-# repo: git@github.com:mongodb/mongo.git
-# prefix: ../../src
-# branch: master
- - name: enterprise
- repo: git@github.com:10gen/mongo-enterprise-modules.git
- prefix: src/mongo/db/modules
- branch: v4.2
+- name: genny
+ repo: git@github.com:mongodb/genny.git
+ branch: legacy
+- name: dsi
+ repo: git@github.com:10gen/dsi.git
+ branch: legacy
-###
-# Same in every DSI project
-pre:
- - func: f_other_pre_ops
- - func: f_dsi_pre_run
-post:
- - func: f_dsi_post_run
- - func: f_other_post_ops
-timeout:
- - func: f_dsi_timeout
- - func: f_other_timeout
-###
+- name: mongo-perf
+ repo: git@github.com:mongodb/mongo-perf.git
+ branch: master
+
+- name: signal-processing
+ repo: git@github.com:10gen/signal-processing.git
+ prefix: signal_processing
+ branch: master
functions:
- ###
- # Same in every DSI project
- f_dsi_pre_run:
- - command: manifest.load
- - command: expansions.update
- params:
- updates: *_expansion_updates
- f_run_dsi_workload:
- - command: git.get_project
+ "git get project": &git_get_project
+ command: git.get_project
+ params:
+ directory: src
+ revisions: # for each module include revision as <module_name> : ${<module_name>_rev}
+ enterprise: ${enterprise_rev}
+ genny: ${genny_rev}
+ dsi: ${dsi_rev}
+ mongo-perf: ${mongo-perf_rev}
+ signal-processing: ${signal-processing_rev}
+ "start server":
+ - command: s3.get
params:
- directory: *src_dir
- revisions:
- dsi: ${dsi_rev}
- genny: ${genny_rev}
- signal-processing: ${signal-processing_rev}
- linkbench: ${linkbench_rev}
- linkbench2: ${linkbench2_rev}
- workloads: ${workloads_rev}
- mongo-perf: ${mongo-perf_rev}
- YCSB: ${YCSB_rev}
- benchmarks: ${benchmarks_rev}
- py-tpcc: ${py-tpcc_rev}
- - command: expansions.write
+ aws_key: ${aws_key}
+ aws_secret: ${aws_secret}
+ remote_file: ${project}/${version_id}/${revision}/mongod-${version_id}
+ bucket: mciuploads
+ local_file: src/mongod
+ - command: s3.get
params:
- file: ./expansions.yml
+ aws_key: ${aws_key}
+ aws_secret: ${aws_secret}
+ remote_file: ${project}/${version_id}/${revision}/mongo-${version_id}
+ bucket: mciuploads
+ local_file: src/mongo
- command: shell.exec
params:
- script: ./src/dsi/run-dsi run_workload
+ working_dir: src
+ script: |
+ set -e
+ set -v
+ chmod +x mongod
+ chmod +x mongo
- command: shell.exec
- type: system
params:
- script: ./src/dsi/run-dsi determine_failure -m SYSTEM
+ background: true
+ working_dir: src
+ script: |
+ set -e
+ set -o verbose
+ mkdir -p ./dbdata
+ ${mongod_exec_wrapper} ./mongod --dbpath ./dbdata ${mongod_flags}
- command: shell.exec
- type: setup
params:
- script: ./src/dsi/run-dsi determine_failure -m SETUP
+ working_dir: src
+ script: |
+ set -e
+ set -o verbose
+ sleep 5
+
+ # if we started a replset, initiate it and wait for it to become primary
+ #
+ # Note: This process is always currently started with --nojournal (not a recommended production configuration, see
+ # https://docs.mongodb.com/manual/tutorial/manage-journaling/#disable-journaling).
+ # As a result, writeConcernMajorityJournalDefault can be set to false. If this becomes configurable later
+ # then the correct value should be passed to rs.initiate or getCmdLineOpts needs to interrogated (but
+ # only after db.createUser).
+ ./mongo --eval "if(db.isMaster().isreplicaset){\
+ rs.initiate({_id: 'test', version: 1, members: [ { _id: 0, host : 'localhost:27017' }], writeConcernMajorityJournalDefault:false});\
+ assert.soon(function(){return db.isMaster().ismaster}, 'no primary');\
+ }"
+
+ # benchRun() authenticates against the admin db, with a user that must has admin access.
+ # Note: This is possibly a legacy requirement from times when it would call serverStatus.
+ # Btw, when mongod is started without --auth, these should be harmless no-ops
+ ./mongo --eval "db.createUser({user: 'admin', pwd: 'password', roles:\
+ [ { role: 'root', db: 'admin' } ] })"\
+ admin
+
+ # print the replset config unless this is a standalone
+ ./mongo --eval "if( db.isMaster().hosts ) { printjson(rs.config()); }" --username admin --password password admin
+ echo "MONGOD STARTED."
+ "analyze":
- command: shell.exec
- type: test
params:
- script: ./src/dsi/run-dsi determine_failure -m TEST
- f_dsi_post_run:
- - command: shell.exec
+ working_dir: src
+ silent: true
+ script: |
+ is_patch=${is_patch}
+ task_id=${task_id}
+ perf_jira_user=${perf_jira_user}
+ perf_jira_pw=${perf_jira_pw}
+ dsi_analysis_atlas_user=${dsi_analysis_atlas_user}
+ dsi_analysis_atlas_pw=${dsi_analysis_atlas_pw}
+ evergreen_api_key=${evergreen_api_key}
+ evergreen_api_user=${evergreen_api_user}
+ source ./buildscripts/signal_processing_setup.sh
+ detect-changes --config .signal-processing.yml
+ - command: json.get_history
params:
- script: ./src/dsi/run-dsi post_run
- - command: attach.results
+ task: ${task_name}
+ file: "src/history.json"
+ name: "perf"
+ - command: json.get_history
params:
- file_location: ./build/EvergreenResultsJson/results.json
- - command: json.send
+ tags: true
+ task: ${task_name}
+ file: "src/tags.json"
+ name: "perf"
+ - command: shell.exec
params:
- name: perf
- file: ./build/LegacyPerfJson/perf.json
- - command: s3.put
+ working_dir: src
+ script: |
+ set -o errexit
+ set -o verbose
+ cat > overrides.yml <<EOF
+ bootstrap:
+ production: true
+ test_control:
+ reports_dir_basename: ..
+ runtime:
+ # evergreen default expansions
+ is_patch: ${is_patch}
+ task_id: ${task_id}
+ EOF
+
+ cp ./dsi/configurations/analysis/analysis.microbenchmarks.yml analysis.yml
+ - command: shell.exec
params:
- aws_key: ${aws_key}
- aws_secret: ${aws_secret}
- local_file: ./build/Artifacts/DSIArtifacts.tgz
- remote_file: ${project_dir}/${build_variant}/${revision}/${task_id}/${version_id}/logs/dsi-artifacts-${task_name}-${build_id}-${execution}.tgz
- bucket: mciuploads
- permissions: public-read
- content_type: application/x-gzip
- display_name: DSI Artifacts - Execution ${execution}
- - command: s3.put
+ working_dir: src
+ silent: true
+ script: |
+ cat > runtime_secret.yml <<EOF
+ dsi_analysis_atlas_user: "${dsi_analysis_atlas_user}"
+ dsi_analysis_atlas_pw: "${dsi_analysis_atlas_pw}"
+ EOF
+ chmod 400 runtime_secret.yml
+ - command: shell.exec
+ type: test
params:
- aws_key: ${aws_key}
- aws_secret: ${aws_secret}
- local_file: ./build/Documentation/index.html
- remote_file: ${project_dir}/${build_variant}/${revision}/${task_id}/${version_id}/logs/${task_name}-${build_id}-index.html
- bucket: mciuploads
- permissions: public-read
- content_type: text/html
- display_name: Documentation
- f_dsi_timeout:
+ working_dir: src
+ script: |
+ set -o errexit
+ set -o verbose
+ ./dsi/run-dsi setup
+ source ./dsi/dsi_venv/bin/activate
+ ./dsi/bin/analysis.py
+
+ # Params:
+ # workload: the path relative to genny/src/workloads to run e.g.
+ # scale/InsertRemove.yml
+ "run genny workload":
+ # Calling the git.get_project command here will clone the mongodb/mongo repository, as well as
+ # the repositories defined in the build variant's "modules" section. Build variants running a
+ # task which calls this function must include "genny" as a module.
+ - *git_get_project
- command: shell.exec
params:
- script: ./src/dsi/run-dsi on_timeout
- ###
+ working_dir: src/genny
+ script: |
+ set -eo pipefail
- f_other_post_ops:
- - command: shell.exec
- params:
- working_dir: src
- script: |
- # removes files from the (local) scons cache when it's over a
- # threshold, to the $prune_ratio percentage. Ideally override
- # these default values in the distro config in evergreen.
- if [ -d "${scons_cache_path}" ]; then
- /opt/mongodbtoolchain/v3/bin/python3 buildscripts/scons_cache_prune.py --cache-dir ${scons_cache_path} --cache-size ${scons_cache_size|200} --prune-ratio ${scons_prune_ratio|0.8}
- fi
- f_other_pre_ops:
- - &f_other_pre_ops
- command: shell.exec
+ export PATH="/opt/mongodbtoolchain/v3/bin:$PATH"
+ python3 -m virtualenv ./venv
+ source ./venv/bin/activate
+ python -m pip install ./src/python
+
+ ./scripts/lamp --linux-distro rhel62
+ ./scripts/genny run -w "./dist/etc/genny/workloads/${workload}" -u 'mongodb://admin:password@localhost:27017'
+ genny-metrics-legacy-report --report-file "${workdir}/src/perf.json" build/genny-metrics.csv
+ - command: "json.send"
params:
- silent: true
+ name: "perf"
+ file: "src/perf.json"
+
+ "run perf tests":
+ - command: shell.exec
+ params:
+ working_dir: src
script: |
- for PS in mongo{,d,s,import,export,dump,restore,stat,files,top,bridge} resmoke.py python{,2} lldb _test; do
- pkill -9 "$PS"
- done
- f_other_timeout:
- # Can't be empty so just `echo`.
+ set -e
+ set -v
+ virtualenv ./venv
+ source ./venv/bin/activate
+ pip install argparse
- command: shell.exec
- params: {script: "echo"}
+ type: test
+ params:
+ working_dir: src
+ script: |
+ set -e
+ set -v
+ source ./venv/bin/activate
+ cd mongo-perf
+ mkdir -p ../perf
+ # give mongod a few seconds to start up so that we can connect.
+ sleep 5
+ ${perf_exec_wrapper} python benchrun.py --shell ../mongo -t ${threads} --trialCount 5 -f testcases/*.js --readCmd ${readCmd} --includeFilter ${includeFilter1} --includeFilter ${includeFilter2} --excludeFilter ${excludeFilter} --out ../perf/perf.json --exclude-testbed --username admin --password password
+ echo "Oplog size at end of tests..."
+ ../mongo --username admin --password password --eval "db.getSiblingDB('local').oplog.rs.totalSize()/1024/1024" admin
+ - command: "json.send"
+ params:
+ name: "perf"
+ file: "src/perf/perf.json"
- ###
- # Compile
- compile mongodb:
+ "upload pip requirements":
+ command: s3.put
+ params:
+ aws_key: ${aws_key}
+ aws_secret: ${aws_secret}
+ local_file: pip-requirements.txt
+ remote_file: ${project}/${build_variant}/${revision}/pip-requirements-${task_id}-${execution}.txt
+ bucket: mciuploads
+ permissions: public-read
+ content_type: atext-plain
+ display_name: Pip Requirements
+
+tasks:
+
+- name: genny_scale_InsertRemove
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "start server"
+ - func: "run genny workload"
+ vars:
+ workload: scale/InsertRemove.yml
+
+- name: compile
+ commands:
+ # Calling the git.get_project command here will clone the mongodb/mongo repository, as well as
+ # the repositories defined in the build variant's "modules" section.
+ - func: "git get project"
# We create a virtual environment with the Python dependencies for compiling the server
# installed.
- command: shell.exec
@@ -236,9 +302,11 @@ functions:
script: |
set -o errexit
set -o verbose
+
/opt/mongodbtoolchain/v3/bin/virtualenv --python /opt/mongodbtoolchain/v3/bin/python3 "${workdir}/compile_venv"
/opt/mongodbtoolchain/v3/bin/virtualenv --python /opt/mongodbtoolchain/v3/bin/python2 "${workdir}/venv"
source "${workdir}/compile_venv/bin/activate"
+
python -m pip install -r etc/pip/compile-requirements.txt
- command: shell.exec
params:
@@ -246,461 +314,377 @@ functions:
script: |
set -o errexit
set -o verbose
+
# We get the raw version string (r1.2.3-45-gabcdef) from git
MONGO_VERSION=$(git describe --abbrev=7)
# If this is a patch build, we add the patch version id to the version string so we know
# this build was a patch, and which evergreen task it came from
- if [ "${is_patch|false}" = "true" ]; then
+ if [ "${is_patch|}" = "true" ]; then
MONGO_VERSION="$MONGO_VERSION-patch-${version_id}"
fi
+
# This script converts the generated version string into a sanitized version string for
# use by scons and uploading artifacts as well as information about for the scons cache.
source "${workdir}/compile_venv/bin/activate"
MONGO_VERSION=$MONGO_VERSION USE_SCONS_CACHE=${use_scons_cache|false} python buildscripts/generate_compile_expansions.py --out compile_expansions.yml
+ # Then we load the generated version data into the agent so we can use it in task definitions
- command: expansions.update
params:
file: src/compile_expansions.yml
- command: shell.exec
params:
- working_dir: src/src/mongo/gotools/src/github.com/mongodb/mongo-tools
- script: |
- set -o verbose
- set -o errexit
- # make sure newlines in the scripts are handled correctly by windows
- if [ "Windows_NT" = "$OS" ]; then
- set -o igncr
- fi;
- # set_goenv provides set_goenv(), print_ldflags() and print_tags() used below
- . ./set_goenv.sh
- GOROOT="" set_goenv || exit
- go version
- build_tools="bsondump mongostat mongofiles mongoexport mongoimport mongorestore mongodump mongotop"
- if [ "${build_mongoreplay}" = "true" ]; then
- build_tools="$build_tools mongoreplay"
- fi
- for i in $build_tools; do
- go build -ldflags "$(print_ldflags)" ${args} -tags "$(print_tags ${tooltags})" -o "../../../../../../mongo-tools/$i${exe|}" $i/main/$i.go
- "../../../../../../mongo-tools/$i${exe|}" --version
- done
- - command: shell.exec
- params:
working_dir: src
script: |
set -o errexit
set -o verbose
+
source "${workdir}/compile_venv/bin/activate"
- python ./buildscripts/scons.py ${compile_flags|} ${scons_cache_args|} mongo${extension} --use-new-tools mongod${extension} mongos${extension} MONGO_VERSION=${version}
- mkdir -p mongodb/bin
- mkdir -p mongodb/jstests/hooks
- mv mongo${extension|} mongodb/bin
- mv mongod${extension|} mongodb/bin
- mv mongos${extension|} mongodb/bin
- mv src/mongo-tools/* mongodb/bin
- if [ -d jstests/hooks ]
- then
- echo "Fetching JS test DB correctness checks from directory jstests"
- cp -a jstests/* mongodb/jstests
- echo "Now adding our own special run_validate_collections.js wrapper"
- mv mongodb/jstests/hooks/run_validate_collections.js mongodb/jstests/hooks/run_validate_collections.actual.js
- cat << EOF > mongodb/jstests/hooks/run_validate_collections.js
- print("NOTE: run_validate_collections.js will skip the oplog!");
- TestData = { skipValidationNamespaces: ['local.oplog.rs'] };
- load('jstests/hooks/run_validate_collections.actual.js');
- EOF
- fi
- tar czf mongodb${compile-variant|}.tar.gz mongodb
+ python ./buildscripts/scons.py ${compile_flags|} ${scons_cache_args|} mongo mongod
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
- local_file: src/mongodb${compile-variant|}.tar.gz
- remote_file: ${project_dir}/${version_id}/${revision}/${platform}/mongodb${compile-variant|}-${version_id}.tar.gz
+ local_file: src/mongod
+ remote_file: ${project}/${version_id}/${revision}/mongod-${version_id}
bucket: mciuploads
permissions: public-read
- content_type: ${content_type|application/x-gzip}
- display_name: mongodb${compile-variant|}.tar.gz
- ###
-
- ## Schedule Tasks ##
- f_schedule_tasks:
- - command: git.get_project
- params:
- directory: *src_dir
- revisions:
- dsi: ${dsi_rev}
- genny: ${genny_rev}
- signal-processing: ${signal-processing_rev}
- linkbench: ${linkbench_rev}
- linkbench2: ${linkbench2_rev}
- workloads: ${workloads_rev}
- mongo-perf: ${mongo-perf_rev}
- YCSB: ${YCSB_rev}
- benchmarks: ${benchmarks_rev}
- py-tpcc: ${py-tpcc_rev}
- - command: expansions.write
- params:
- file: ./expansions.yml
- - command: shell.exec
- params:
- script: ./src/dsi/run-dsi schedule_tasks --tasks=${tasks}
- - command: generate.tasks
+ content_type: ${content_type|application/octet-stream}
+ display_name: mongod
+ - command: s3.put
params:
- files:
- - build/TaskJSON/Tasks.json
-
-tasks:
- ###
- # Same in every DSI project
- - name: schedule_global_auto_tasks
- priority: 5
- commands:
- - func: f_schedule_tasks
- vars:
- tasks: all_tasks
- - name: schedule_variant_auto_tasks
- priority: 5
- commands:
- - func: f_schedule_tasks
- vars:
- tasks: variant_tasks
- - name: schedule_patch_auto_tasks
- priority: 5
- commands:
- - func: f_schedule_tasks
- vars:
- tasks: patch_tasks
- - name: smoke_test
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: short
- - name: smoke_test_ssl
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: short
- mongodb_setup: replica-ssl
- infrastructure_provisioning: replica
- - name: smoke_test_standalone_auth
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: short
- mongodb_setup: standalone-auth
- infrastructure_provisioning: single
- - name: smoke_test_replset_auth
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: short
- mongodb_setup: replica-auth
- infrastructure_provisioning: replica
- - name: smoke_test_shard_lite_auth
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: short
- mongodb_setup: shard-lite-auth
- infrastructure_provisioning: shard-lite
- - name: dsi_integ_test_run_command_simple
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: run_command_simple
- ###
+ aws_key: ${aws_key}
+ aws_secret: ${aws_secret}
+ local_file: src/build/cached/mongo/mongo
+ remote_file: ${project}/${version_id}/${revision}/mongo-${version_id}
+ bucket: mciuploads
+ permissions: public-read
+ content_type: ${content_type|application/octet-stream}
+ display_name: mongo
+- name: query
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "query"
+ includeFilter2: "core regression"
+ excludeFilter: "single_threaded"
+ threads: "1 2 4 8"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: views-query
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "query_identityview"
+ includeFilter2: "core regression"
+ excludeFilter: "single_threaded"
+ threads: "1 2 4 8"
+ readCmd: true
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: views-aggregation
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "aggregation_identityview"
+ includeFilter2: "regression"
+ excludeFilter: "none"
+ threads: "1"
+ readCmd: true
+ - func: "analyze"
+ vars:
+ report_analysis: true
+- name: where
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "where"
+ includeFilter2: "core regression"
+ excludeFilter: "single_threaded"
+ threads: "1 2 4 8"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: update
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "update"
+ includeFilter2: "core regression"
+ excludeFilter: "single_threaded"
+ threads: "1 2 4 8"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: insert
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "insert"
+ includeFilter2: "core regression"
+ excludeFilter: "single_threaded"
+ threads: "1 2 4 8"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: wildcard-index-read
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "wildcard_read"
+ includeFilter2: "core regression"
+ excludeFilter: "single_threaded"
+ threads: "1 2 4 8"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: wildcard-index-write
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "wildcard_write"
+ includeFilter2: "core regression"
+ excludeFilter: "single_threaded"
+ threads: "1 2 4 8"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: geo
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "geo"
+ includeFilter2: "core regression"
+ excludeFilter: "single_threaded"
+ threads: "1 2 4 8"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: misc
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "command multi remove mixed"
+ includeFilter2: "core regression"
+ excludeFilter: "single_threaded"
+ threads: "1 2 4 8"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: singleThreaded
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "single_threaded"
+ includeFilter2: "core regression"
+ excludeFilter: "none"
+ threads: "1"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: aggregation
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "aggregation"
+ includeFilter2: "regression"
+ excludeFilter: "none"
+ threads: "1"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ report_analysis: true
+- name: agg-query-comparison
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "agg_query_comparison"
+ includeFilter2: "core regression"
+ excludeFilter: "single_threaded"
+ threads: "1 2 4 8"
+ readCmd: false
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+- name: pipeline-updates
+ depends_on:
+ - variant: linux-wt-standalone
+ name: compile
+ commands:
+ - func: "git get project"
+ - func: "start server"
+ - func: "run perf tests"
+ vars:
+ includeFilter1: "pipeline-updates"
+ includeFilter2: "regression"
+ excludeFilter: "none"
+ threads: "1 2 4 8"
+ readCmd: true
+ - func: "analyze"
+ vars:
+ reports_analysis: true
+buildvariants:
+- name: linux-wt-standalone
+ display_name: Standalone Linux inMemory
+ batchtime: 90 # 1.5 hours
+ modules:
+ - enterprise
+ - genny
+ - dsi
+ - mongo-perf
+ - signal-processing
+ expansions:
+ # We are explicitly tracking the rhel62 variant compile options from evergreen.yml for
+ # microbenchmarks, since they run on the centos6 boxes. If we can get proper artifacts directly
+ # from that project, we should do that and remove the compile tasks.
+ compile_flags: --ssl MONGO_DISTMOD=rhel62 -j$(grep -c ^processor /proc/cpuinfo) --release --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
+ mongod_exec_wrapper: &exec_wrapper "numactl --physcpubind=4,5,6,7 -i 1"
+ perf_exec_wrapper: &perf_wrapper "numactl --physcpubind=1,2,3 -i 0"
+ mongod_flags: >-
+ --auth
+ --fork
+ --inMemoryEngineConfigString 'eviction=(threads_min=1),'
+ --inMemorySizeGB 60
+ --logpath ./mongod.log
+ --networkMessageCompressors noop
+ --setParameter diagnosticDataCollectionEnabled=false
+ --setParameter enableTestCommands=1
+ --setParameter ttlMonitorEnabled=false
+ --storageEngine inMemory
+ --syncdelay 0
+ use_scons_cache: true
+ project: &project perf
+ run_on:
+ - "centos6-perf"
+ tasks:
- name: compile
- commands:
- - command: manifest.load
- - command: git.get_project
- params:
- directory: src
- revisions:
- enterprise: ${enterprise_rev}
- mongo-tools: ${mongo-tools_rev}
- - func: "compile mongodb"
-
+ distros:
+ - rhel62-large
- name: genny_scale_InsertRemove
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: auto_genny_workload
- auto_workload_path: scale/InsertRemove.yml
-
+ - name: aggregation
+ - name: agg-query-comparison
- name: query
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: query,
- include_filter_2: core regression,
- exclude_filter: single_threaded,
- threads: "1 2 4 8",
- read_cmd: 'false'}
- - name: views-query
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: query_identityview,
- include_filter_2: core regression,
- exclude_filter: single_threaded,
- threads: "1 2 4 8",
- read_cmd: 'true'}
- name: views-aggregation
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: aggregation_identityview,
- include_filter_2: regression,
- exclude_filter: none,
- threads: "1",
- read_cmd: 'true'}
+ - name: views-query
- name: where
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: where,
- include_filter_2: core regression,
- exclude_filter: single_threaded,
- threads: "1 2 4 8",
- read_cmd: 'false'}
- name: update
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: update,
- include_filter_2: core regression,
- exclude_filter: single_threaded,
- threads: "1 2 4 8",
- read_cmd: 'false'}
- name: insert
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: insert,
- include_filter_2: core regression,
- exclude_filter: single_threaded,
- threads: "1 2 4 8",
- read_cmd: 'false'}
- name: wildcard-index-read
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: wildcard_read,
- include_filter_2: core regression,
- exclude_filter: single_threaded,
- threads: "1 2 4 8",
- read_cmd: 'false'}
- name: wildcard-index-write
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: wildcard_write,
- include_filter_2: core regression,
- exclude_filter: single_threaded,
- threads: "1 2 4 8",
- read_cmd: 'false'}
- name: geo
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: geo,
- include_filter_2: core regression,
- exclude_filter: single_threaded,
- threads: "1 2 4 8",
- read_cmd: 'false'}
- name: misc
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: command multi remove mixed,
- include_filter_2: core regression,
- exclude_filter: single_threaded,
- threads: "1 2 4 8",
- read_cmd: 'false'}
- name: singleThreaded
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: single_threaded,
- include_filter_2: core regression,
- exclude_filter: none,
- threads: "1",
- read_cmd: 'false'}
- - name: aggregation
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: aggregation,
- include_filter_2: regression,
- exclude_filter: js,
- threads: "1",
- read_cmd: 'false'}
- - name: agg-query-comparison
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: agg_query_comparison,
- include_filter_2: core regression,
- exclude_filter: single_threaded,
- threads: "1 2 4 8",
- read_cmd: 'false'}
- name: pipeline-updates
- depends_on: *_compile
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: microbenchmarks
- test_control_params: |
- {include_filter_1: pipeline-updates,
- include_filter_2: regression,
- exclude_filter: none,
- threads: "1 2 4 8",
- read_cmd: 'true'}
-buildvariants:
- - name: linux-wt-standalone
- display_name: Standalone Linux inMemory
- batchtime: 90 # 1.5 hours
- modules:
- - enterprise
- - genny
- - dsi
- - mongo-perf
- - signal-processing
- expansions:
- # We are explicitly tracking the rhel62 variant compile options from evergreen.yml for
- # microbenchmarks, since they run on the centos6 boxes. If we can get proper artifacts directly
- # from that project, we should do that and remove the compile tasks.
- compile_flags: --ssl MONGO_DISTMOD=rhel62 -j$(grep -c ^processor /proc/cpuinfo) --release --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
- mongod_exec_wrapper: &exec_wrapper "numactl --physcpubind=4,5,6,7 -i 1"
- perf_exec_wrapper: &perf_wrapper "numactl --physcpubind=1,2,3 -i 0"
- mongod_flags: >-
- --auth
- --fork
- --inMemoryEngineConfigString 'eviction=(threads_min=1),'
- --inMemorySizeGB 60
- --logpath ./mongod.log
- --networkMessageCompressors noop
- --setParameter diagnosticDataCollectionEnabled=false
- --setParameter enableTestCommands=1
- --setParameter ttlMonitorEnabled=false
- --storageEngine inMemory
- --syncdelay 0
- use_scons_cache: true
- project: &project perf
- platform: linux
- infrastructure_provisioning: microbenchmarks
- mongodb_setup: microbenchmarks_replica
- canaries: none
- storageEngine: inMemory
- project_dir: &project_dir perf
- run_on:
- - "centos6-perf"
- tasks:
- - name: compile
- distros:
- - rhel62-large
- - name: genny_scale_InsertRemove
- - name: aggregation
- - name: agg-query-comparison
- - name: query
- - name: views-aggregation
- - name: views-query
- - name: where
- - name: update
- - name: insert
- - name: wildcard-index-read
- - name: wildcard-index-write
- - name: geo
- - name: misc
- - name: singleThreaded
- - name: pipeline-updates
-
- - name: linux-wt-repl
- display_name: 1-Node ReplSet Linux inMemory
- batchtime: 90 # 1.5 hours
- modules:
- - genny
- - dsi
- - mongo-perf
- - signal-processing
- expansions:
- mongod_exec_wrapper: *exec_wrapper
- perf_exec_wrapper: *perf_wrapper
- mongod_flags: >-
- --auth
- --fork
- --inMemoryEngineConfigString 'eviction=(threads_min=1),'
- --inMemorySizeGB 60
- --logpath ./mongod.log
- --networkMessageCompressors noop
- --oplogSize 30000
- --replSet test
- --setParameter diagnosticDataCollectionEnabled=false
- --setParameter enableTestCommands=1
- --setParameter ttlMonitorEnabled=false
- --storageEngine inMemory
- --syncdelay 0
- project: *project
- platform: linux
- infrastructure_provisioning: microbenchmarks
- mongodb_setup: microbenchmarks_replica
- canaries: none
- storageEngine: inMemory
- project_dir: *project_dir
- run_on:
- - "centos6-perf"
- tasks:
- - name: genny_scale_InsertRemove
- - name: update
- - name: insert
- - name: misc
- - name: singleThreaded
- - name: wildcard-index-write
- - name: pipeline-updates
+- name: linux-wt-repl
+ display_name: 1-Node ReplSet Linux inMemory
+ batchtime: 90 # 1.5 hours
+ modules:
+ - genny
+ - dsi
+ - mongo-perf
+ - signal-processing
+ expansions:
+ mongod_exec_wrapper: *exec_wrapper
+ perf_exec_wrapper: *perf_wrapper
+ mongod_flags: >-
+ --auth
+ --fork
+ --inMemoryEngineConfigString 'eviction=(threads_min=1),'
+ --inMemorySizeGB 60
+ --logpath ./mongod.log
+ --networkMessageCompressors noop
+ --oplogSize 30000
+ --replSet test
+ --setParameter diagnosticDataCollectionEnabled=false
+ --setParameter enableTestCommands=1
+ --setParameter ttlMonitorEnabled=false
+ --storageEngine inMemory
+ --syncdelay 0
+ project: *project
+ run_on:
+ - "centos6-perf"
+ tasks:
+ - name: genny_scale_InsertRemove
+ - name: update
+ - name: insert
+ - name: misc
+ - name: singleThreaded
+ - name: wildcard-index-write
+ - name: pipeline-updates
diff --git a/etc/system_perf.yml b/etc/system_perf.yml
index 68c3bfb52ad..141a588960a 100644
--- a/etc/system_perf.yml
+++ b/etc/system_perf.yml
@@ -1,252 +1,88 @@
-command_type: system
stepback: false
+command_type: system
-
-## Parameters for parameterized builds (see https://github.com/evergreen-ci/evergreen/wiki/Parameterized-Builds)
-parameters:
- - key: patch_compile_flags
- description: "Additional SCons flags to be applied during scons compile invocations in this patch"
-
-
-variables:
- ###
- # Leave this section uncommented to enable compile.
- _real_compile_amazon2: &_compile_amazon2
- - name: compile
- variant: compile-amazon2
- - name: schedule_global_auto_tasks
- variant: task_generation
- _real_compile_amazon-64: &_compile_amazon-64
- - name: compile
- variant: compile-linux-64-amzn
- - name: schedule_global_auto_tasks
- variant: task_generation
- _real_compile_rhel70: &_compile_rhel70
- - name: compile
- variant: compile-rhel70
- - name: schedule_global_auto_tasks
- variant: task_generation
- _real_expansions: &_expansion_updates
- []
- ###
-
-###
-# **Or**: Leave this section uncommented to bypass/skip compile.
-# _skip_compile_amazon2: &_compile_amazon2
-# - name: schedule_global_auto_tasks
-# variant: task_generation
-# _skip_compile_rhel70: &_compile_rhel70
-# - name: schedule_global_auto_tasks
-# variant: task_generation
-# _skip_compile_amazon-64: &_compile_amazon-64
-# - name: compile
-# variant: compile-linux-64-amzn
-# - name: schedule_global_auto_tasks
-# variant: task_generation
-# _skip_expansions: &_expansion_updates
-# # This is the normal (amazon2) "compile" artifact from https://evergreen.mongodb.com/task/sys_perf_4.2_compile_amazon2_compile_54e29d8dcca0b3d73898a384b57c516728edbcd2_21_04_02_18_14_59
-# - key: mdb_binary_for_client
-# value: https://mciuploads.s3.amazonaws.com/dsi/sys_perf_4.2_54e29d8dcca0b3d73898a384b57c516728edbcd2/54e29d8dcca0b3d73898a384b57c516728edbcd2/linux/mongodb-enterprise-sys_perf_4.2_54e29d8dcca0b3d73898a384b57c516728edbcd2.tar.gz
-# - key: mdb_binary_for_server
-# value: https://mciuploads.s3.amazonaws.com/dsi/sys_perf_4.2_54e29d8dcca0b3d73898a384b57c516728edbcd2/54e29d8dcca0b3d73898a384b57c516728edbcd2/linux/mongodb-enterprise-sys_perf_4.2_54e29d8dcca0b3d73898a384b57c516728edbcd2.tar.gz
-###
-
- _src_dir: &src_dir src/mongo
- _modules: &modules
- - enterprise
- # - mongo-tools
- # - mongo
- - dsi
- - genny
- - signal-processing
- - workloads
- - linkbench
- - linkbench2
- - mongo-perf
- - YCSB
- - benchmarks
- - py-tpcc
-
-
-modules:
- ###
- # Same in every DSI project. Ensure that this block is synchronized with
- # evergreen-dsitest.yml, atlas/system_perf_atlas.yml, and src/dsi/onboarding.py
- # (search update-repos-here) in this repo, and etc/system_perf.yml and
- # etc/perf.yml in mongodb/mongo
- - name: dsi
- repo: git@github.com:10gen/dsi.git
- prefix: ../../src
- branch: master
- - name: genny
- repo: git@github.com:10gen/genny.git
- prefix: ../../src
- branch: master
- - name: signal-processing
- repo: git@github.com:10gen/signal-processing.git
- prefix: ../../src
- branch: master
- - name: workloads
- repo: git@github.com:10gen/workloads.git
- prefix: ../../src
- branch: master
- - name: linkbench
- repo: git@github.com:10gen/linkbench.git
- prefix: ../../src
- branch: master
- - name: linkbench2
- repo: git@github.com:10gen/linkbench2.git
- prefix: ../../src
- branch: master
- - name: mongo-perf
- repo: git@github.com:mongodb/mongo-perf.git
- prefix: ../../src
- branch: master
- - name: YCSB
- repo: git@github.com:mongodb-labs/YCSB.git
- prefix: ../../src
- branch: master
- ref: 4e7287880c04514cad2df5761b9511c940a33059
- - name: benchmarks
- repo: git@github.com:mongodb-labs/benchmarks.git
- prefix: ../../src
- branch: master
- - name: py-tpcc
- repo: git@github.com:mongodb-labs/py-tpcc.git
- prefix: ../../src
- branch: master
- ref: 2d19705337a40e24831a904266a648b85df5be84
-# - name: mongo
-# repo: git@github.com:mongodb/mongo.git
-# prefix: ../../src
-# branch: master
- ###
- - name: enterprise
- repo: git@github.com:10gen/mongo-enterprise-modules.git
- prefix: src/mongo/db/modules
- branch: v4.2
-# - name: mongo-tools
-# repo: git@github.com:mongodb/mongo-tools.git
-# prefix: mongo-tools/src/github.com/mongodb
-# branch: master
-
-
-###
-# Same in every DSI project
pre:
- - func: f_other_pre_ops
- - func: f_dsi_pre_run
-post:
- - func: f_dsi_post_run
- - func: f_other_post_ops
-timeout:
- - func: f_dsi_timeout
- - func: f_other_timeout
-###
-
-functions:
- ###
- # Same in every DSI project
- f_dsi_pre_run:
- - command: manifest.load
- - command: expansions.update
- params:
- updates: *_expansion_updates
- f_run_dsi_workload:
- - command: git.get_project
- params:
- directory: *src_dir
- revisions:
- dsi: ${dsi_rev}
- genny: ${genny_rev}
- signal-processing: ${signal-processing_rev}
- linkbench: ${linkbench_rev}
- linkbench2: ${linkbench2_rev}
- workloads: ${workloads_rev}
- mongo-perf: ${mongo-perf_rev}
- YCSB: ${YCSB_rev}
- benchmarks: ${benchmarks_rev}
- py-tpcc: ${py-tpcc_rev}
- # mongo: ${mongo_rev}
- - command: expansions.write
- params:
- file: ./expansions.yml
- - command: shell.exec
- params:
- script: ./src/dsi/run-dsi run_workload
- - command: shell.exec
- type: system
- params:
- script: ./src/dsi/run-dsi determine_failure -m SYSTEM
+post:
+ # Do cluster teardown first to ensure runtime is below Evergreen's post timeout. Other post tasks
+ # will not have been run if the timeout is exceeded.
- command: shell.exec
- type: setup
params:
- script: ./src/dsi/run-dsi determine_failure -m SETUP
+ working_dir: work
+ script: |
+ source ./dsienv.sh
+ if [ -e /data/infrastructure_provisioning/terraform/provisioned.${cluster} ]; then
+ mark_idle.sh
+ fi
- command: shell.exec
- type: test
params:
- script: ./src/dsi/run-dsi determine_failure -m TEST
- f_dsi_post_run:
+ working_dir: work
+ script: |
+ set -e
+ set -v
+ source ./dsienv.sh
+ ../src/dsi/dsi/run-dsi infrastructure_teardown.py
- command: shell.exec
params:
- script: ./src/dsi/run-dsi post_run
- - command: attach.results
- params:
- file_location: ./build/EvergreenResultsJson/results.json
- - command: json.send
- params:
- name: perf
- file: ./build/LegacyPerfJson/perf.json
+ working_dir: work
+ script: |
+ source ./dsienv.sh
+ make_artifact.sh
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
- local_file: ./build/Artifacts/DSIArtifacts.tgz
- remote_file: ${project_dir}/${build_variant}/${revision}/${task_id}/${version_id}/logs/dsi-artifacts-${task_name}-${build_id}-${execution}.tgz
+ local_file: work/dsi-artifacts.tgz
+ remote_file: ${project_dir}/${build_variant}/${revision}/${task_id}/${version_id}/logs/dsi-artifacts-${task_name}-${build_id}-${execution}.${ext|tgz}
bucket: mciuploads
permissions: public-read
- content_type: application/x-gzip
- display_name: DSI Artifacts - Execution ${execution}
+ content_type: ${content_type|application/x-gzip}
+ display_name: Dsi Artifacts - Execution ${execution}
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
- local_file: ./build/Documentation/index.html
- remote_file: ${project_dir}/${build_variant}/${revision}/${task_id}/${version_id}/logs/${task_name}-${build_id}-index.html
+ local_file: src/workloads/workloads/jsdoc/jsdocs-redirect.html
+ remote_file: ${project_dir}/${build_variant}/${revision}/${task_id}/${version_id}/logs/workloads-${task_name}-${build_id}.html
bucket: mciuploads
permissions: public-read
content_type: text/html
- display_name: Documentation
- f_dsi_timeout:
- - command: shell.exec
+ display_name: workloads documentation
+ - command: attach.results
params:
- script: ./src/dsi/run-dsi on_timeout
- ###
-
- f_other_post_ops:
- - command: shell.exec
- params:
- working_dir: src
- script: |
- # removes files from the (local) scons cache when it's over a
- # threshold, to the $prune_ratio percentage. Ideally override
- # these default values in the distro config in evergreen.
- if [ -d "${scons_cache_path}" ]; then
- /opt/mongodbtoolchain/v3/bin/python3 buildscripts/scons_cache_prune.py --cache-dir ${scons_cache_path} --cache-size ${scons_cache_size|200} --prune-ratio ${scons_prune_ratio|0.8}
- fi
- f_other_pre_ops:
- # Can't be empty so just `echo`.
- - command: shell.exec
- params: {script: "echo"}
- f_other_timeout:
- # Can't be empty so just `echo`.
+ file_location: work/report.json
+ - command: "json.send"
+ params:
+ name: "perf"
+ file: "work/perf.json"
+ - func: "upload pip requirements"
- command: shell.exec
- params: {script: "echo"}
+ params:
+ working_dir: src
+ script: |
+ # removes files from the (local) scons cache when it's over a
+ # threshold, to the $prune_ratio percentage. Ideally override
+ # these default values in the distro config in evergreen.
+
+ if [ -d "${scons_cache_path}" ]; then
+ /opt/mongodbtoolchain/v3/bin/python3 buildscripts/scons_cache_prune.py --cache-dir ${scons_cache_path} --cache-size ${scons_cache_size|200} --prune-ratio ${scons_prune_ratio|0.8}
+ fi
- ###
- # Compile
- compile mongodb:
+functions:
+ "git get project": &git_get_project
+ command: git.get_project
+ params:
+ directory: src
+ revisions: # for each module include revision as <module_name> : ${<module_name>_rev}
+ dsi: ${dsi_rev}
+ enterprise: ${enterprise_rev}
+ linkbench: ${linkbench_rev}
+ linkbench2: ${linkbench2_rev}
+ genny: ${genny_rev}
+ workloads: ${workloads_rev}
+ signal-processing: ${signal-processing_rev}
+
+ "compile mongodb":
# We create a virtual environment with the Python dependencies for compiling the server
# installed.
- command: shell.exec
@@ -255,27 +91,40 @@ functions:
script: |
set -o errexit
set -o verbose
+
/opt/mongodbtoolchain/v3/bin/virtualenv --python /opt/mongodbtoolchain/v3/bin/python3 "${workdir}/compile_venv"
/opt/mongodbtoolchain/v3/bin/virtualenv --python /opt/mongodbtoolchain/v3/bin/python2 "${workdir}/venv"
source "${workdir}/compile_venv/bin/activate"
+
python -m pip install -r etc/pip/compile-requirements.txt
+
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
+
# We get the raw version string (r1.2.3-45-gabcdef) from git
MONGO_VERSION=$(git describe --abbrev=7)
+
+ # If we're going to compile the upstream wtdevelop repository for wiredtiger, add
+ # that githash to version string.
+ if [ "${compile-variant|}" = "-wtdevelop" ]; then
+ WT_VERSION=$(cd src/third_party/wtdevelop; git describe --abbrev=7 | cut -c 9-)
+ MONGO_VERSION="$MONGO_VERSION-wtdevelop-$WT_VERSION"
+ fi
# If this is a patch build, we add the patch version id to the version string so we know
# this build was a patch, and which evergreen task it came from
if [ "${is_patch|false}" = "true" ]; then
MONGO_VERSION="$MONGO_VERSION-patch-${version_id}"
fi
+
# This script converts the generated version string into a sanitized version string for
# use by scons and uploading artifacts as well as information about for the scons cache.
source "${workdir}/compile_venv/bin/activate"
MONGO_VERSION=$MONGO_VERSION USE_SCONS_CACHE=${use_scons_cache|false} python buildscripts/generate_compile_expansions.py --out compile_expansions.yml
+ # Then we load the generated version data into the agent so we can use it in task definitions
- command: expansions.update
params:
file: src/compile_expansions.yml
@@ -319,8 +168,10 @@ functions:
then
echo "Fetching JS test DB correctness checks from directory jstests"
cp -a jstests/* mongodb/jstests
+
echo "Now adding our own special run_validate_collections.js wrapper"
mv mongodb/jstests/hooks/run_validate_collections.js mongodb/jstests/hooks/run_validate_collections.actual.js
+
cat << EOF > mongodb/jstests/hooks/run_validate_collections.js
print("NOTE: run_validate_collections.js will skip the oplog!");
TestData = { skipValidationNamespaces: ['local.oplog.rs'] };
@@ -338,800 +189,1149 @@ functions:
permissions: public-read
content_type: ${content_type|application/x-gzip}
display_name: mongodb${compile-variant|}.tar.gz
- ###
+ "use WiredTiger develop":
+ command: shell.exec
+ params:
+ working_dir: src
+ script: |
+ set -o errexit
+ set -o verbose
+ if [ "${compile-variant|}" = "-wtdevelop" ]; then
+ cd src/third_party
+ for wtdir in dist examples ext lang src test tools ; do
+ rm -rf wiredtiger/$wtdir
+ mv wtdevelop/$wtdir wiredtiger/
+ done
+ fi
+
+ "prepare environment":
+ - command: shell.exec
+ params:
+ script: |
+ rm -rf ./*
+ mkdir src
+ mkdir work
+ - command: manifest.load
+ # Calling the git.get_project command here will clone the mongodb/mongo repository, as well as
+ # the repositories defined in the build variant's "modules" section.
+ - *git_get_project
+ - command: shell.exec
+ params:
+ working_dir: work
+ script: |
+ cat > bootstrap.yml <<EOF
+ infrastructure_provisioning: ${cluster}
+ platform: ${platform}
+ mongodb_setup: ${setup}
+ storageEngine: ${storageEngine}
+ test_control: ${test}
+ production: true
+ authentication: ${authentication}
+ overrides:
+ infrastructure_provisioning:
+ tfvars:
+ # This is currently only used by initialsync-logkeeper. It is empty and not used for other tests.
+ mongod_seeded_ebs_snapshot_id: ${snapshotId}
+ # Initially used by sb_large_scale to override expire-on-delta to allow longer runtime on otherwise standard variants
+ ${additional_tfvars|}
+ workload_setup:
+ local_repos:
+ workloads: ../src/workloads/workloads
+ ycsb: ../src/YCSB/YCSB
+ linkbench: ../src/linkbench/linkbench
+ linkbench2: ../src/linkbench2/linkbench2
+ tpcc: ../src/tpcc/tpcc
+ genny: ../src/genny/genny
+ mongodb_setup:
+ # This is currently only used by initialsync-logkeeper-short. It is empty and not used for other tests.
+ mongodb_dataset: ${dataset}
+ mongodb_binary_archive: "https://s3.amazonaws.com/mciuploads/${project_dir}/${version_id}/${revision}/${platform}/mongodb${compile-variant|}-${version_id}.tar.gz"
+ EOF
+
+ cat > runtime.yml <<EOF
+ # evergreen default expansions
+ branch_name: ${branch_name}
+ build_id: ${build_id}
+ build_variant: ${build_variant}
+ execution: ${execution}
+ is_patch: ${is_patch|false}
+ order: ${revision_order_id}
+ project: ${project}
+ project_dir: ${project_dir}
+ revision: ${revision}
+ task_id: ${task_id}
+ task_name: ${task_name}
+ version_id: ${version_id}
+ workdir: ${workdir}
+
+ # sys-perf expansions
+ dsi_rev: ${dsi_rev}
+ enterprise_rev: ${enterprise_rev}
+ ext: ${ext}
+ script_flags : ${script_flags}
+ workloads_rev: ${workloads_rev}
+ EOF
- ## Schedule Tasks ##
- f_schedule_tasks:
- - command: git.get_project
+ - command: shell.exec
params:
- directory: *src_dir
- revisions:
- dsi: ${dsi_rev}
- genny: ${genny_rev}
- signal-processing: ${signal-processing_rev}
- linkbench: ${linkbench_rev}
- linkbench2: ${linkbench2_rev}
- workloads: ${workloads_rev}
- mongo-perf: ${mongo-perf_rev}
- YCSB: ${YCSB_rev}
- benchmarks: ${benchmarks_rev}
- py-tpcc: ${py-tpcc_rev}
+ silent: true
+ working_dir: work
+ script: |
+ # AWS ssh secret key
+ echo "${ec2_pem}" > aws_ssh_key.pem
+ chmod 400 aws_ssh_key.pem
+
+ cat > runtime_secret.yml <<EOF
+ # Note that inside system_perf.yml we have ${aws_key} & ${aws_secret}, which are used for
+ # Evergreen resources. The below are used for dsi resources, and are NOT the same!
+ aws_access_key: "${terraform_key}"
+ aws_secret_key: "${terraform_secret}"
+ perf_jira_user: "${perf_jira_user}"
+ perf_jira_pw: "${perf_jira_pw}"
+ cedar_api_user: ${cedar_api_user}
+ cedar_api_key: ${cedar_api_key}
+ dsi_analysis_atlas_user: "${dsi_analysis_atlas_user}"
+ dsi_analysis_atlas_pw: "${dsi_analysis_atlas_pw}"
+ EOF
+ chmod 400 runtime_secret.yml
- command: expansions.write
params:
- file: ./expansions.yml
+ file: work/expansions.yml
- command: shell.exec
params:
- script: ./src/dsi/run-dsi schedule_tasks --tasks=${tasks}
- - command: generate.tasks
+ working_dir: work
+ # setup execution environment
+ # configure environment, has private information, no logging
+ script: |
+ set -e
+ ../src/dsi/dsi/run-dsi python ../src/dsi/dsi/bin/bootstrap.py
+ - command: shell.exec
params:
- files:
- - build/TaskJSON/Tasks.json
-
+ script: |
+ set -v
+ set -e
+ source work/dsienv.sh
+ setup-dsi-env.sh
+ ls -a work
-tasks:
- ###
- # Same in every DSI project
- - name: schedule_global_auto_tasks
- priority: 5
- commands:
- - func: f_schedule_tasks
- vars:
- tasks: all_tasks
- - name: schedule_variant_auto_tasks
- priority: 5
- commands:
- - func: f_schedule_tasks
- vars:
- tasks: variant_tasks
- - name: schedule_patch_auto_tasks
- priority: 5
- commands:
- - func: f_schedule_tasks
- vars:
- tasks: patch_tasks
- - name: smoke_test
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: short
- - name: smoke_test_ssl
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: short
- mongodb_setup: replica-ssl
- infrastructure_provisioning: replica
- - name: smoke_test_standalone_auth
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: short
- mongodb_setup: standalone-auth
- infrastructure_provisioning: single
- - name: smoke_test_replset_auth
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: short
- mongodb_setup: replica-auth
- infrastructure_provisioning: replica
- - name: smoke_test_shard_lite_auth
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: short
- mongodb_setup: shard-lite-auth
- infrastructure_provisioning: shard-lite
- - name: dsi_integ_test_run_command_simple
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: run_command_simple
- ###
-
- - name: compile
- commands:
- - command: manifest.load
- - command: git.get_project
- params:
- directory: src
- revisions:
- enterprise: ${enterprise_rev}
- # mongo-tools: ${mongo-tools_rev}
- - func: "compile mongodb"
-
- - name: linkbench
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "linkbench"
-
- - name: linkbench2
- priority: 5
- exec_timeout_secs: 43200 # 12 hours
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "linkbench2"
- additional_tfvars: "tags: {expire-on-delta: 12}"
-
- - name: tpcc
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "tpcc"
-
- - name: industry_benchmarks
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "ycsb"
-
- - name: ycsb_60GB
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "ycsb-60GB"
-
- - name: industry_benchmarks_secondary_reads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "ycsb-secondary-reads"
-
- - name: industry_benchmarks_wmajority
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "ycsb-wmajority"
-
- - name: crud_workloads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "crud_workloads"
-
- - name: cursor_manager
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "cursor_manager"
-
- - name: mixed_workloads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "mixed_workloads"
-
- - name: misc_workloads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "misc_workloads"
-
- - name: map_reduce_workloads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "map_reduce_workloads"
-
- - name: genny_canaries
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "genny_canaries"
-
-
- - name: retryable_writes_workloads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "retryable_writes"
-
- - name: snapshot_reads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "snapshot_reads"
-
- - name: secondary_reads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "secondary_reads"
-
- - name: bestbuy_agg
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "bestbuy_agg"
-
- - name: bestbuy_agg_merge_same_db
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "bestbuy_agg_merge_same_db"
-
- - name: bestbuy_agg_merge_different_db
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "bestbuy_agg_merge_different_db"
-
- - name: bestbuy_agg_merge_target_hashed
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "bestbuy_agg_merge_target_hashed"
-
- - name: bestbuy_agg_merge_wordcount
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "bestbuy_agg_merge_wordcount"
-
- - name: bestbuy_query
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "bestbuy_query"
-
- - name: non_sharded_workloads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "non_sharded"
-
- - name: mongos_workloads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "mongos"
-
- - name: mongos_large_catalog_workloads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "mongos_large_catalog"
-
- - name: move_chunk_workloads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "move_chunk"
-
- - name: move_chunk_waiting_workloads
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "move_chunk_waiting"
-
- - name: secondary_performance
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- # Unfortunately the dash/underscore style is different for mongodb_setup and test_control
- test_control: "secondary_performance"
- mongodb_setup: "secondary-performance"
-
- - name: initialsync
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "initialsync"
-
- - name: initialsync-logkeeper-short
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "initialsync-logkeeper"
- mongodb_setup: "initialsync-logkeeper-short"
- # Logkeeper dataset with FCV set to 4.4
- mongodb_dataset: "https://s3-us-west-2.amazonaws.com/dsi-donot-remove/InitialSyncLogKeeper/logkeeper-slice-data-mongodb-4.4.tgz"
-
- - name: initialsync-logkeeper
- priority: 5
- exec_timeout_secs: 216000 # 2.5 days
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "initialsync-logkeeper"
-
- - name: change_streams_throughput
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "change_streams_throughput"
-
- - name: change_streams_latency
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "change_streams_latency"
-
- - name: change_streams_multi_mongos
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "change_streams_multi_mongos"
-
- - name: sb_large_scale
- priority: 5
- exec_timeout_secs: 43200 # 12 hours
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "sb_large_scale"
- additional_tfvars: "tags: {expire-on-delta: 12}"
-
- - name: sb_timeseries
- priority: 5
- commands:
- - func: f_run_dsi_workload
- vars:
- test_control: "sb_timeseries"
+ "deploy cluster":
+ - command: shell.exec
+ params:
+ working_dir: work
+ script: |
+ set -e
+ set -v
+ source ./dsienv.sh
+ ../src/dsi/dsi/run-dsi infrastructure_provisioning.py
+ ../src/dsi/dsi/run-dsi workload_setup.py
+ ../src/dsi/dsi/run-dsi mongodb_setup.py
+
+ "run test":
+ - command: shell.exec
+ type: test
+ params:
+ working_dir: work
+ script: |
+ set -e
+ set -v
+ source ./dsienv.sh
+ ../src/dsi/dsi/run-dsi test_control.py
+ - command: "json.send"
+ params:
+ name: "perf"
+ file: "work/perf.json"
+ "analyze":
+ - command: shell.exec
+ type: test
+ params:
+ working_dir: work
+ script: |
+ set -o verbose
+ source ./dsienv.sh
+ ../src/dsi/dsi/run-dsi analysis.py
+ # detect outliers needs to run, so defer the post_run_check exit status to later
+ echo $? > post_run_check.status
+ - command: shell.exec
+ params:
+ working_dir: work
+ script: |
+ is_patch=${is_patch}
+ task_id=${task_id}
+ perf_jira_user=${perf_jira_user}
+ perf_jira_pw=${perf_jira_pw}
+ dsi_analysis_atlas_user=${dsi_analysis_atlas_user}
+ dsi_analysis_atlas_pw=${dsi_analysis_atlas_pw}
+ evergreen_api_key=${evergreen_api_key}
+ evergreen_api_user=${evergreen_api_user}
+ source ../src/buildscripts/signal_processing_setup.sh
+ detect-changes --config .signal-processing.yml
+ - command: shell.exec
+ type: test
+ params:
+ working_dir: work
+ script: |
+ set -o verbose
+ exit $(cat post_run_check.status)
+
+ "upload pip requirements":
+ command: s3.put
+ params:
+ aws_key: ${aws_key}
+ aws_secret: ${aws_secret}
+ local_file: pip-requirements.txt
+ remote_file: ${project}/${build_variant}/${revision}/pip-requirements-${task_id}-${execution}.txt
+ bucket: mciuploads
+ permissions: public-read
+ content_type: atext-plain
+ display_name: Pip Requirements
+
+#######################################
+# Tasks #
+#######################################
+tasks:
+- name: compile
+ commands:
+ - command: manifest.load
+ - func: "git get project"
+ - func: "use WiredTiger develop" # noop if ${compile-variant|} is not "-wtdevelop"
+ - func: "compile mongodb"
+
+- name: linkbench
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "linkbench"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: linkbench2
+ priority: 5
+ exec_timeout_secs: 43200 # 12 hours
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "linkbench2"
+ additional_tfvars: "tags: {expire-on-delta: 12}"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: tpcc
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "tpcc"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: insert_remove
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "insert_remove"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: validate_cmd
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "validate_cmd"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: service_architecture_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "service_architecture_workloads"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: big_update
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "big_update"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: industry_benchmarks
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "ycsb"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+ vars:
+ script_flags: --ycsb-throughput-analysis reports
+
+- name: ycsb_60GB
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "ycsb-60GB"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+ vars:
+ script_flags: --ycsb-throughput-analysis reports
+- name: industry_benchmarks_secondary_reads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "ycsb-secondary-reads"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+ vars:
+ script_flags: --ycsb-throughput-analysis reports
+
+- name: industry_benchmarks_wmajority
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "ycsb-wmajority"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+ vars:
+ script_flags: --ycsb-throughput-analysis reports
+
+- name: crud_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "crud_workloads"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: cursor_manager
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "cursor_manager"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: mixed_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "mixed_workloads"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: misc_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "misc_workloads"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: map_reduce_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "map_reduce_workloads"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: smoke_test
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "short"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: genny_canaries
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "genny_canaries"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: genny_overhead
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "genny_overhead"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: retryable_writes_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "retryable_writes"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: snapshot_reads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "snapshot_reads"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: secondary_reads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "secondary_reads"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: bestbuy_agg
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "bestbuy_agg"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: bestbuy_agg_merge_same_db
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "bestbuy_agg_merge_same_db"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: bestbuy_agg_merge_different_db
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "bestbuy_agg_merge_different_db"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: bestbuy_agg_merge_target_hashed
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "bestbuy_agg_merge_target_hashed"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: bestbuy_agg_merge_wordcount
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "bestbuy_agg_merge_wordcount"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: bestbuy_query
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "bestbuy_query"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: non_sharded_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "non_sharded"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: mongos_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "mongos"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: mongos_large_catalog_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "mongos_large_catalog"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: move_chunk_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "move_chunk"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: move_chunk_waiting_workloads
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "move_chunk_waiting"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: secondary_performance
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ # Unfortunately the dash/underscore style is different for mongodb_setup and test_control
+ test: "secondary_performance"
+ setup: "secondary-performance"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: initialsync
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "initialsync"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: initialsync-logkeeper-short
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "initialsync-logkeeper"
+ setup: "initialsync-logkeeper-short"
+ # Logkeeper dataset with FCV set to 4.0
+ dataset: "https://s3-us-west-2.amazonaws.com/dsi-donot-remove/InitialSyncLogKeeper/logkeeper-slice-data.tgz"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: initialsync-logkeeper
+ priority: 5
+ exec_timeout_secs: 216000 # 2.5 days
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "initialsync-logkeeper"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: change_streams_throughput
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "change_streams_throughput"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: change_streams_latency
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "change_streams_latency"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: change_streams_multi_mongos
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "change_streams_multi_mongos"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: sb_large_scale
+ priority: 5
+ exec_timeout_secs: 43200 # 12 hours
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "sb_large_scale"
+ additional_tfvars: "tags: {expire-on-delta: 12}"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+- name: sb_timeseries
+ priority: 5
+ commands:
+ - func: "prepare environment"
+ vars:
+ test: "sb_timeseries"
+ - func: "deploy cluster"
+ - func: "run test"
+ - func: "analyze"
+
+#######################################
+# Modules #
+#######################################
+# if a module is added and to be added to the manifest
+# be sure to add the module to git.get_project revisions parameter
+modules:
+- name: dsi
+ repo: git@github.com:10gen/dsi.git
+ prefix: dsi
+ branch: legacy
+
+- name: genny
+ repo: git@github.com:10gen/genny.git
+ prefix: genny
+ branch: legacy
+
+- name: workloads
+ repo: git@github.com:10gen/workloads.git
+ prefix: workloads
+ branch: master
+
+- name: linkbench
+ repo: git@github.com:10gen/linkbench.git
+ prefix: linkbench
+ branch: master
+
+- name: linkbench2
+ repo: git@github.com:mdcallag/linkbench.git
+ prefix: linkbench2
+ branch: master
+ ref: 63207190657737b32eb0e81c5b81ad1b8bad0e5a
+
+- name: enterprise
+ repo: git@github.com:10gen/mongo-enterprise-modules.git
+ prefix: src/mongo/db/modules
+ branch: v4.2
+
+- name: signal-processing
+ repo: git@github.com:10gen/signal-processing.git
+ prefix: signal_processing
+ branch: master
+
+#######################################
+# Buildvariants #
+#######################################
buildvariants:
- - name: task_generation
- display_name: Task Generation
- modules: *modules
- expansions:
- platform: linux
- project_dir: dsi
- run_on:
- - amazon2-build
- tasks:
- - name: schedule_global_auto_tasks
-
- # We are explicitly tracking the Linux 64 Amazon variant compile options from evergreen.yml. If we can get
- # proper artifacts directly from that project, we should do that and remove these tasks.
- - &compile-linux-64-amzn
- name: compile-linux-64-amzn
- display_name: Compile on Linux64 Amazon
- expansions: &compile-linux-64-amzn-expansions
- compile_flags: >-
- --ssl
- MONGO_DISTMOD=linux-64-amzn-build
- -j$(grep -c ^processor /proc/cpuinfo)
- --release
- --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
- platform: linux
- project_dir: &project_dir dsi
- tooltags: ""
- use_scons_cache: true
- run_on:
- - "amazon1-2018-build"
- tasks:
- - name: compile
- - name: compile-amazon2
- display_name: Compile on Amazon Linux 2
- modules:
- - enterprise
- expansions:
- <<: *compile-linux-64-amzn-expansions
- compile_flags: >-
- --ssl
- MONGO_DISTMOD=amazon2
- -j$(grep -c ^processor /proc/cpuinfo)
- --release
- --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
- compile-variant: -enterprise
- run_on:
- - "amazon2-build"
- tasks:
+# We are explicitly tracking the Linux 64 Amazon variant compile options from evergreen.yml. If we can get
+# proper artifacts directly from that project, we should do that and remove these tasks.
+- &compile-linux-64-amzn
+ name: compile-linux-64-amzn
+ display_name: Compile on Linux64 Amazon
+ expansions: &compile-linux-64-amzn-expansions
+ compile_flags: >-
+ --ssl
+ MONGO_DISTMOD=linux-64-amzn-build
+ -j$(grep -c ^processor /proc/cpuinfo)
+ --release
+ --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
+ platform: linux
+ project_dir: &project_dir dsi
+ tooltags: ""
+ use_scons_cache: true
+ run_on:
+ - "amazon1-2018-build"
+ tasks:
+ - name: compile
+
+- name: compile-amazon2
+ display_name: Compile on Amazon Linux 2
+ modules:
+ - enterprise
+ expansions:
+ <<: *compile-linux-64-amzn-expansions
+ compile_flags: >-
+ --ssl
+ MONGO_DISTMOD=amazon2
+ -j$(grep -c ^processor /proc/cpuinfo)
+ --release
+ --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
+ compile-variant: -enterprise
+ run_on:
+ - "amazon2-build"
+ tasks:
+ - name: compile
+
+- name: compile-rhel70
+ display_name: Compile on Rhel70
+ modules:
+ - enterprise
+ expansions:
+ <<: *compile-linux-64-amzn-expansions
+ compile_flags: >-
+ --ssl
+ MONGO_DISTMOD=rhel70
+ -j$(grep -c ^processor /proc/cpuinfo)
+ --release
+ --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
+ compile-variant: -rhel70
+ run_on:
+ - rhel70-small
+ tasks:
+ - name: compile
+
+#######################################
+# Linux Buildvariants #
+#######################################
+- name: linux-1-node-replSet
+ display_name: Linux 1-Node ReplSet
+ modules: &modules
+ - dsi
+ - genny
+ - workloads
+ - linkbench
+ - linkbench2
+ - signal-processing
+ expansions:
+ setup: single-replica
+ cluster: single
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ run_on:
+ - "rhel70-perf-single"
+ depends_on:
- name: compile
-
- - name: compile-rhel70
- display_name: Compile for Atlas-like
- modules: *modules
- batchtime: 2880 # 48 hours
- expansions:
- <<: *compile-linux-64-amzn-expansions
- compile_flags: >-
- --ssl
- MONGO_DISTMOD=rhel70
- -j$(grep -c ^processor /proc/cpuinfo)
- --release
- --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
- compile-variant: -rhel70
- run_on:
- - rhel70-small
- tasks:
+ variant: compile-linux-64-amzn
+ tasks: &1nodetasks
+ - name: industry_benchmarks
+ - name: ycsb_60GB
+ - name: crud_workloads
+ - name: mixed_workloads
+ - name: misc_workloads
+ - name: map_reduce_workloads
+ - name: smoke_test
+ - name: retryable_writes_workloads
+ - name: non_sharded_workloads
+ - name: bestbuy_agg
+ - name: bestbuy_agg_merge_different_db
+ - name: bestbuy_agg_merge_same_db
+ - name: bestbuy_agg_merge_wordcount
+ - name: bestbuy_query
+ - name: change_streams_throughput
+ - name: change_streams_latency
+ - name: snapshot_reads
+ - name: linkbench
+ - name: linkbench2
+ - name: tpcc
+ - name: insert_remove
+ - name: big_update
+ - name: sb_large_scale
+ - name: sb_timeseries
+
+- name: linux-standalone
+ display_name: Linux Standalone
+ modules: *modules
+ expansions:
+ setup: standalone
+ cluster: single
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ run_on:
+ - "rhel70-perf-single"
+ depends_on:
- name: compile
-
- - name: linux-1-node-replSet
- display_name: Linux 1-Node ReplSet
- batchtime: 10080 # 7 days
- modules: *modules
- expansions:
- mongodb_setup: single-replica
- infrastructure_provisioning: single
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- run_on:
- - "rhel70-perf-single"
- depends_on: *_compile_amazon-64
- tasks: &1nodetasks
- - name: industry_benchmarks
- - name: ycsb_60GB
- - name: crud_workloads
- - name: mixed_workloads
- - name: misc_workloads
- - name: map_reduce_workloads
- - name: smoke_test
- - name: retryable_writes_workloads
- - name: non_sharded_workloads
- - name: bestbuy_agg
- - name: bestbuy_agg_merge_different_db
- - name: bestbuy_agg_merge_same_db
- - name: bestbuy_agg_merge_wordcount
- - name: bestbuy_query
- - name: change_streams_throughput
- - name: change_streams_latency
- - name: snapshot_reads
- - name: linkbench
- - name: linkbench2
- - name: tpcc
- - name: sb_large_scale
- - name: sb_timeseries
-
- - name: linux-standalone
- display_name: Linux Standalone
- batchtime: 2880 # 48 hours
- modules: *modules
- expansions:
- mongodb_setup: standalone
- infrastructure_provisioning: single
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- run_on:
- - "rhel70-perf-single"
- depends_on: *_compile_amazon-64
- tasks: &standalonetasks
- - name: schedule_patch_auto_tasks
- - name: schedule_variant_auto_tasks
- - name: industry_benchmarks
- - name: ycsb_60GB
- - name: crud_workloads
- - name: genny_canaries
- - name: cursor_manager
- - name: mixed_workloads
- - name: misc_workloads
- - name: map_reduce_workloads
- - name: smoke_test
- - name: non_sharded_workloads
- - name: bestbuy_agg
- - name: bestbuy_agg_merge_different_db
- - name: bestbuy_agg_merge_same_db
- - name: bestbuy_agg_merge_wordcount
- - name: bestbuy_query
-
- - name: linux-standalone-audit
- display_name: Linux Standalone Audit
- batchtime: 10080 # 7 days
- modules: *modules
- expansions:
- mongodb_setup: standalone-audit
- infrastructure_provisioning: single
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- compile-variant: -enterprise
- run_on:
- - "rhel70-perf-single"
- depends_on: *_compile_amazon2
- tasks:
- - name: industry_benchmarks
- - name: crud_workloads
- - name: smoke_test
-
- - name: linux-1-node-replSet-ese-cbc
- display_name: Linux 1-Node ReplSet ESE CBC
- modules: *modules
- expansions:
- setup: single-replica-ese-cbc
- infrastructure_provisioning: single
- cluster: single
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- compile-variant: -enterprise
- run_on:
- - "rhel70-perf-single"
- depends_on: *_compile_amazon2
- tasks:
- - name: industry_benchmarks
- - name: smoke_test
-
- - name: linux-1-node-replSet-ese-gcm
- display_name: Linux 1-Node ReplSet ESE GCM
- batchtime: 5760 # 4 days
- modules: *modules
- expansions:
- mongodb_setup: single-replica-ese-gcm
- infrastructure_provisioning: single
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- compile-variant: -enterprise
- run_on:
- - "rhel70-perf-single"
- depends_on: *_compile_amazon2
- tasks:
- - name: industry_benchmarks
- - name: smoke_test
-
-
- - name: linux-3-shard
- display_name: Linux 3-Shard Cluster
- batchtime: 10080 # 7 days
- modules: *modules
- expansions:
- mongodb_setup: shard
- infrastructure_provisioning: shard
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- run_on:
- - "rhel70-perf-shard"
- depends_on: *_compile_amazon-64
- tasks:
- - name: industry_benchmarks
- - name: crud_workloads
- - name: mixed_workloads
- - name: misc_workloads
- - name: map_reduce_workloads
- - name: smoke_test
- - name: industry_benchmarks_wmajority
- - name: mongos_workloads
- - name: mongos_large_catalog_workloads
- - name: change_streams_throughput
- - name: change_streams_latency
- - name: change_streams_multi_mongos
-
- - name: linux-shard-lite
- display_name: Linux Shard Lite Cluster
- batchtime: 5760 # 4 days
- modules: *modules
- expansions:
- mongodb_setup: shard-lite
- infrastructure_provisioning: shard-lite
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- run_on:
- - "rhel70-perf-shard-lite"
- depends_on: *_compile_amazon-64
- tasks: &shardlitetasks
- - name: bestbuy_agg
- - name: bestbuy_agg_merge_different_db
- - name: bestbuy_agg_merge_same_db
- - name: bestbuy_agg_merge_target_hashed
- - name: bestbuy_agg_merge_wordcount
- - name: bestbuy_query
- - name: change_streams_latency
- - name: change_streams_throughput
- - name: industry_benchmarks
- - name: industry_benchmarks_wmajority
- - name: linkbench
- - name: mixed_workloads
- - name: mongos_workloads
- - name: mongos_large_catalog_workloads
- - name: move_chunk_workloads
- - name: move_chunk_waiting_workloads
- - name: retryable_writes_workloads
- - name: smoke_test
-
-
- - name: linux-3-node-replSet
- display_name: Linux 3-Node ReplSet
- batchtime: 2880 # 48 hours
- modules: *modules
- expansions:
- mongodb_setup: replica
- infrastructure_provisioning: replica
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- run_on:
- - "rhel70-perf-replset"
- depends_on: *_compile_amazon-64
- tasks: &3nodetasks
- - name: industry_benchmarks
- - name: ycsb_60GB
- - name: industry_benchmarks_secondary_reads
- - name: crud_workloads
- - name: mixed_workloads
- - name: misc_workloads
- - name: map_reduce_workloads
- - name: smoke_test
- - name: retryable_writes_workloads
- - name: industry_benchmarks_wmajority
- - name: secondary_performance # Uses a special 2 node mongodb setup
- - name: non_sharded_workloads
- - name: bestbuy_agg
- - name: bestbuy_agg_merge_different_db
- - name: bestbuy_agg_merge_same_db
- - name: bestbuy_agg_merge_wordcount
- - name: bestbuy_query
- - name: change_streams_throughput
- - name: change_streams_latency
- - name: snapshot_reads
- - name: secondary_reads
- - name: tpcc
- - name: linkbench
- - name: linkbench2
- - name: sb_large_scale
- - name: sb_timeseries
-
- - name: linux-3-node-replSet-noflowcontrol
- display_name: Linux 3-Node ReplSet (Flow Control off)
- batchtime: 10080 # 7 days
- modules: *modules
- expansions:
- mongodb_setup: replica-noflowcontrol
- infrastructure_provisioning: replica
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- run_on:
- - "rhel70-perf-replset"
- depends_on: *_compile_amazon-64
- tasks:
- - name: industry_benchmarks
- - name: industry_benchmarks_secondary_reads
- - name: crud_workloads
- - name: mixed_workloads
- - name: smoke_test
- - name: industry_benchmarks_wmajority
- - name: change_streams_throughput
- - name: change_streams_latency
- - name: tpcc
- - name: linkbench
- - name: linkbench2
-
- - name: linux-3-node-replSet-ssl
- display_name: Linux 3-Node ReplSet (SSL)
- batchtime: 10080 # 7 days
- modules: *modules
- expansions:
- mongodb_setup: replica-ssl
- infrastructure_provisioning: replica
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- run_on:
- - "rhel70-perf-replset"
- depends_on: *_compile_amazon-64
- tasks:
- - name: industry_benchmarks
- - name: mixed_workloads
-
- - name: linux-3-node-replSet-initialsync
- display_name: Linux 3-Node ReplSet Initial Sync
- batchtime: 2880 # 48 hours
- modules: *modules
- expansions:
- mongodb_setup: replica-2node
- infrastructure_provisioning: replica
- platform: linux
- authentication: disabled
- storageEngine: wiredTiger
- project_dir: *project_dir
- run_on:
- - "rhel70-perf-replset"
- depends_on: *_compile_amazon-64
- tasks:
- - name: initialsync
- - name: initialsync-logkeeper-short
-
- - name: linux-replSet-initialsync-logkeeper
- display_name: Linux ReplSet Initial Sync LogKeeper
- batchtime: 10080 # 7 days
- modules: *modules
- expansions:
- mongodb_setup: initialsync-logkeeper
- infrastructure_provisioning: initialsync-logkeeper
- # EBS logkeeper snapshot with FCV set to 4.4
- snapshotId: snap-0dba360e743e27ea0
- platform: linux
- authentication: disabled
- storageEngine: wiredTiger
- project_dir: *project_dir
- run_on:
- - "rhel70-perf-initialsync-logkeeper"
- depends_on: *_compile_amazon-64
- tasks:
- - name: initialsync-logkeeper
-
- - name: atlas-like-M60
- display_name: M60-Like 3-Node ReplSet
- batchtime: 5760 # 4 days
- modules: *modules
- expansions:
- mongodb_setup: atlas-like-replica
- infrastructure_provisioning: M60-like-replica
- platform: linux
- project_dir: *project_dir
- authentication: enabled
- storageEngine: wiredTiger
- compile-variant: -rhel70
- run_on:
- - "rhel70-perf-M60-like"
- depends_on: *_compile_rhel70
- tasks: # Cannot use *3nodetasks because secondary_performance uses a special mongodb setup.
- - name: industry_benchmarks
- - name: ycsb_60GB
- - name: industry_benchmarks_secondary_reads
- - name: crud_workloads
- - name: mixed_workloads
- - name: misc_workloads
- - name: map_reduce_workloads
- - name: smoke_test
- - name: retryable_writes_workloads
- - name: industry_benchmarks_wmajority
- - name: non_sharded_workloads
- - name: bestbuy_agg
- - name: bestbuy_agg_merge_different_db
- - name: bestbuy_agg_merge_same_db
- - name: bestbuy_agg_merge_wordcount
- - name: bestbuy_query
- - name: change_streams_throughput
- - name: change_streams_latency
- - name: snapshot_reads
- - name: secondary_reads
- # - name: tpcc # TPCC with SSL currently broken https://jira.mongodb.org/browse/TIG-1681
- - name: linkbench
+ variant: compile-linux-64-amzn
+ tasks: &standalonetasks
+ - name: industry_benchmarks
+ - name: ycsb_60GB
+ - name: crud_workloads
+ - name: genny_canaries
+ - name: genny_overhead
+ - name: cursor_manager
+ - name: insert_remove
+ - name: mixed_workloads
+ - name: misc_workloads
+ - name: map_reduce_workloads
+ - name: smoke_test
+ - name: non_sharded_workloads
+ - name: bestbuy_agg
+ - name: bestbuy_agg_merge_different_db
+ - name: bestbuy_agg_merge_same_db
+ - name: bestbuy_agg_merge_wordcount
+ - name: bestbuy_query
+ - name: big_update
+ - name: validate_cmd
+
+- name: linux-standalone-audit
+ display_name: Linux Standalone Audit
+ modules: *modules
+ expansions:
+ setup: standalone-audit
+ cluster: single
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ compile-variant: -enterprise
+ run_on:
+ - "rhel70-perf-single"
+ depends_on:
+ - name: compile
+ variant: compile-amazon2
+ tasks:
+ - name: industry_benchmarks
+ - name: crud_workloads
+ - name: smoke_test
+
+- name: linux-1-node-replSet-ese-cbc
+ display_name: Linux 1-Node ReplSet ESE CBC
+ modules: *modules
+ expansions:
+ setup: single-replica-ese-cbc
+ cluster: single
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ compile-variant: -enterprise
+ run_on:
+ - "rhel70-perf-single"
+ depends_on:
+ - name: compile
+ variant: compile-amazon2
+ tasks:
+ - name: industry_benchmarks
+ - name: smoke_test
+
+- name: linux-1-node-replSet-ese-gcm
+ display_name: Linux 1-Node ReplSet ESE GCM
+ modules: *modules
+ expansions:
+ setup: single-replica-ese-gcm
+ cluster: single
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ compile-variant: -enterprise
+ run_on:
+ - "rhel70-perf-single"
+ depends_on:
+ - name: compile
+ variant: compile-amazon2
+ tasks:
+ - name: industry_benchmarks
+ - name: smoke_test
+
+
+- name: linux-3-shard
+ display_name: Linux 3-Shard Cluster
+ modules: *modules
+ expansions:
+ setup: shard
+ cluster: shard
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ run_on:
+ - "rhel70-perf-shard"
+ depends_on:
+ - name: compile
+ variant: compile-linux-64-amzn
+ tasks:
+ - name: industry_benchmarks
+ - name: crud_workloads
+ - name: insert_remove
+ - name: mixed_workloads
+ - name: misc_workloads
+ - name: map_reduce_workloads
+ - name: smoke_test
+ - name: industry_benchmarks_wmajority
+ - name: mongos_workloads
+ - name: mongos_large_catalog_workloads
+ - name: change_streams_throughput
+ - name: change_streams_latency
+ - name: change_streams_multi_mongos
+
+- name: linux-shard-lite
+ display_name: Linux Shard Lite Cluster
+ modules: *modules
+ expansions:
+ setup: shard-lite
+ cluster: shard-lite
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ run_on:
+ - "rhel70-perf-shard-lite"
+ depends_on:
+ - name: compile
+ variant: compile-linux-64-amzn
+ tasks: &shardlitetasks
+ - name: bestbuy_agg
+ - name: bestbuy_agg_merge_different_db
+ - name: bestbuy_agg_merge_same_db
+ - name: bestbuy_agg_merge_target_hashed
+ - name: bestbuy_agg_merge_wordcount
+ - name: bestbuy_query
+ - name: change_streams_latency
+ - name: change_streams_throughput
+ - name: industry_benchmarks
+ - name: industry_benchmarks_wmajority
+ - name: linkbench
+ - name: mixed_workloads
+ - name: mongos_workloads
+ - name: mongos_large_catalog_workloads
+ - name: move_chunk_workloads
+ - name: move_chunk_waiting_workloads
+ - name: retryable_writes_workloads
+ - name: smoke_test
+
+
+- name: linux-3-node-replSet
+ display_name: Linux 3-Node ReplSet
+ modules: *modules
+ expansions:
+ setup: replica
+ cluster: replica
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ run_on:
+ - "rhel70-perf-replset"
+ depends_on:
+ - name: compile
+ variant: compile-linux-64-amzn
+ tasks: &3nodetasks
+ - name: industry_benchmarks
+ - name: ycsb_60GB
+ - name: industry_benchmarks_secondary_reads
+ - name: crud_workloads
+ - name: insert_remove
+ - name: service_architecture_workloads
+ - name: mixed_workloads
+ - name: misc_workloads
+ - name: map_reduce_workloads
+ - name: smoke_test
+ - name: retryable_writes_workloads
+ - name: industry_benchmarks_wmajority
+ - name: secondary_performance # Uses a special 2 node mongodb setup
+ - name: non_sharded_workloads
+ - name: bestbuy_agg
+ - name: bestbuy_agg_merge_different_db
+ - name: bestbuy_agg_merge_same_db
+ - name: bestbuy_agg_merge_wordcount
+ - name: bestbuy_query
+ - name: change_streams_throughput
+ - name: change_streams_latency
+ - name: snapshot_reads
+ - name: secondary_reads
+ - name: tpcc
+ - name: linkbench
+ - name: linkbench2
+ - name: big_update
+ - name: sb_large_scale
+ - name: sb_timeseries
+
+- name: linux-3-node-replSet-noflowcontrol
+ display_name: Linux 3-Node ReplSet (Flow Control off)
+ modules: *modules
+ expansions:
+ setup: replica-noflowcontrol
+ cluster: replica
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ run_on:
+ - "rhel70-perf-replset"
+ depends_on:
+ - name: compile
+ variant: compile-linux-64-amzn
+ tasks:
+ - name: industry_benchmarks
+ - name: industry_benchmarks_secondary_reads
+ - name: crud_workloads
+ - name: service_architecture_workloads
+ - name: mixed_workloads
+ - name: smoke_test
+ - name: industry_benchmarks_wmajority
+ - name: change_streams_throughput
+ - name: change_streams_latency
+ - name: tpcc
+ - name: linkbench
+ - name: linkbench2
+ - name: big_update
+
+- name: linux-3-node-replSet-ssl
+ display_name: Linux 3-Node ReplSet (SSL)
+ modules: *modules
+ expansions:
+ setup: replica-ssl
+ cluster: replica
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ run_on:
+ - "rhel70-perf-replset"
+ depends_on:
+ - name: compile
+ variant: compile-linux-64-amzn
+ tasks:
+ - name: industry_benchmarks
+ - name: mixed_workloads
+
+- name: linux-3-node-replSet-initialsync
+ display_name: Linux 3-Node ReplSet Initial Sync
+ modules: *modules
+ expansions:
+ setup: replica-2node
+ cluster: replica
+ platform: linux
+ # TODO(TIG-1506): enabled authentication on initialsync variants
+ authentication: disabled
+ storageEngine: wiredTiger
+ project_dir: *project_dir
+ depends_on:
+ - name: compile
+ variant: compile-linux-64-amzn
+ run_on:
+ - "rhel70-perf-replset"
+ tasks:
+ - name: initialsync
+ - name: initialsync-logkeeper-short
+
+- name: linux-replSet-initialsync-logkeeper
+ display_name: Linux ReplSet Initial Sync LogKeeper
+ modules: *modules
+ expansions:
+ setup: initialsync-logkeeper
+ cluster: initialsync-logkeeper
+ # EBS logkeeper snapshot with FCV set to 4.0
+ snapshotId: snap-041c3c57a1a4f5bba
+ platform: linux
+ # TODO(TIG-1506): enabled authentication on initialsync variants
+ authentication: disabled
+ storageEngine: wiredTiger
+ project_dir: *project_dir
+ run_on:
+ - "rhel70-perf-initialsync-logkeeper"
+ depends_on:
+ - name: compile
+ variant: compile-linux-64-amzn
+ tasks:
+ - name: initialsync-logkeeper
+
+#######################################
+# Atlas Like Buildvariants #
+#######################################
+- name: atlas-like-M60
+ display_name: M60-Like 3-Node ReplSet
+ modules: *modules
+ expansions:
+ setup: atlas-like-replica
+ cluster: M60-like-replica
+ platform: linux
+ project_dir: *project_dir
+ authentication: enabled
+ storageEngine: wiredTiger
+ compile-variant: -rhel70
+ run_on:
+ - "rhel70-perf-M60-like"
+ depends_on:
+ - name: compile
+ variant: compile-rhel70
+ tasks: # Cannot use *3nodetasks because secondary_performance uses a special mongodb setup.
+ - name: industry_benchmarks
+ - name: ycsb_60GB
+ - name: industry_benchmarks_secondary_reads
+ - name: crud_workloads
+ # - name: insert_remove
+ # - name: service_architecture_workloads
+ - name: mixed_workloads
+ - name: misc_workloads
+ - name: map_reduce_workloads
+ - name: smoke_test
+ - name: retryable_writes_workloads
+ - name: industry_benchmarks_wmajority
+ - name: non_sharded_workloads
+ - name: bestbuy_agg
+ - name: bestbuy_agg_merge_different_db
+ - name: bestbuy_agg_merge_same_db
+ - name: bestbuy_agg_merge_wordcount
+ - name: bestbuy_query
+ - name: change_streams_throughput
+ - name: change_streams_latency
+ - name: snapshot_reads
+ - name: secondary_reads
+ # - name: tpcc # TPCC with SSL currently broken https://jira.mongodb.org/browse/TIG-1681
+ - name: linkbench
+ # - name: big_update