command_type: system stepback: false ## Parameters for parameterized builds (see https://github.com/evergreen-ci/evergreen/wiki/Parameterized-Builds) parameters: - key: patch_compile_flags description: "Additional SCons flags to be applied during scons compile invocations in this patch" variables: ### # Leave this section uncommented to enable compile. _real_compile_amazon2: &_compile_amazon2 - name: compile variant: compile-amazon2 _real_compile_amazon-64: &_compile_amazon-64 - name: compile variant: compile-linux-64-amzn _real_compile_rhel70: &_compile_rhel70 - name: compile variant: compile-rhel70 _real_expansions: &_expansion_updates [] ### ### # **Or**: Leave this section uncommented to bypass/skip compile. # _skip_compile_amazon2: &_compile_amazon2 # _skip_compile_rhel70: &_compile_rhel70 # _skip_compile_amazon-64: &_compile_amazon-64 # - name: compile # variant: compile-linux-64-amzn # _skip_expansions: &_expansion_updates # # This is the normal (amazon2) "compile" artifact from https://evergreen.mongodb.com/task/sys_perf_4.2_compile_amazon2_compile_54e29d8dcca0b3d73898a384b57c516728edbcd2_21_04_02_18_14_59 # - key: mdb_binary_for_client # value: https://mciuploads.s3.amazonaws.com/dsi/sys_perf_4.2_54e29d8dcca0b3d73898a384b57c516728edbcd2/54e29d8dcca0b3d73898a384b57c516728edbcd2/linux/mongodb-enterprise-sys_perf_4.2_54e29d8dcca0b3d73898a384b57c516728edbcd2.tar.gz # - key: mdb_binary_for_server # value: https://mciuploads.s3.amazonaws.com/dsi/sys_perf_4.2_54e29d8dcca0b3d73898a384b57c516728edbcd2/54e29d8dcca0b3d73898a384b57c516728edbcd2/linux/mongodb-enterprise-sys_perf_4.2_54e29d8dcca0b3d73898a384b57c516728edbcd2.tar.gz ### _src_dir: &src_dir src/mongo _modules: &modules - enterprise # - mongo-tools # - mongo - dsi - genny - signal-processing - workloads - linkbench - linkbench2 - mongo-perf - YCSB - benchmarks - py-tpcc modules: ### # Same in every DSI project. Ensure that this block is synchronized with # evergreen-dsitest.yml, atlas/system_perf_atlas.yml, and src/dsi/onboarding.py # (search update-repos-here) in this repo, and etc/system_perf.yml and # etc/perf.yml in mongodb/mongo - name: dsi repo: git@github.com:10gen/dsi.git prefix: ../../src branch: master - name: genny repo: git@github.com:10gen/genny.git prefix: ../../src branch: master - name: signal-processing repo: git@github.com:10gen/signal-processing.git prefix: ../../src branch: master - name: workloads repo: git@github.com:10gen/workloads.git prefix: ../../src branch: master - name: linkbench repo: git@github.com:10gen/linkbench.git prefix: ../../src branch: master - name: linkbench2 repo: git@github.com:10gen/linkbench2.git prefix: ../../src branch: master - name: mongo-perf repo: git@github.com:mongodb/mongo-perf.git prefix: ../../src branch: master - name: YCSB repo: git@github.com:mongodb-labs/YCSB.git prefix: ../../src branch: master ref: 4e7287880c04514cad2df5761b9511c940a33059 - name: benchmarks repo: git@github.com:mongodb-labs/benchmarks.git prefix: ../../src branch: master - name: py-tpcc repo: git@github.com:mongodb-labs/py-tpcc.git prefix: ../../src branch: master ref: 2d19705337a40e24831a904266a648b85df5be84 # - name: mongo # repo: git@github.com:mongodb/mongo.git # prefix: ../../src # branch: master ### - name: enterprise repo: git@github.com:10gen/mongo-enterprise-modules.git prefix: src/mongo/db/modules branch: v4.2 # - name: mongo-tools # repo: git@github.com:mongodb/mongo-tools.git # prefix: mongo-tools/src/github.com/mongodb # branch: master ### # Same in every DSI project pre: - func: f_other_pre_ops - func: f_dsi_pre_run post: - func: f_dsi_post_run - func: f_other_post_ops timeout: - func: f_dsi_timeout - func: f_other_timeout ### functions: ### # Same in every DSI project f_dsi_pre_run: - command: manifest.load - command: expansions.update params: updates: *_expansion_updates f_run_dsi_workload: - command: git.get_project params: directory: *src_dir revisions: dsi: ${dsi_rev} genny: ${genny_rev} signal-processing: ${signal-processing_rev} linkbench: ${linkbench_rev} linkbench2: ${linkbench2_rev} workloads: ${workloads_rev} mongo-perf: ${mongo-perf_rev} YCSB: ${YCSB_rev} benchmarks: ${benchmarks_rev} py-tpcc: ${py-tpcc_rev} # mongo: ${mongo_rev} - command: expansions.write params: file: ./expansions.yml - command: shell.exec params: script: ./src/dsi/run-dsi run_workload - command: shell.exec type: system params: script: ./src/dsi/run-dsi determine_failure -m SYSTEM - command: shell.exec type: setup params: script: ./src/dsi/run-dsi determine_failure -m SETUP - command: shell.exec type: test params: script: ./src/dsi/run-dsi determine_failure -m TEST f_dsi_post_run: - command: shell.exec params: script: ./src/dsi/run-dsi post_run - command: attach.results params: file_location: ./build/EvergreenResultsJson/results.json - command: json.send params: name: perf file: ./build/LegacyPerfJson/perf.json - command: s3.put params: aws_key: ${aws_key} aws_secret: ${aws_secret} local_file: ./build/Artifacts/DSIArtifacts.tgz remote_file: ${project_dir}/${build_variant}/${revision}/${task_id}/${version_id}/logs/dsi-artifacts-${task_name}-${build_id}-${execution}.tgz bucket: mciuploads permissions: public-read content_type: application/x-gzip display_name: DSI Artifacts - Execution ${execution} - command: s3.put params: aws_key: ${aws_key} aws_secret: ${aws_secret} local_file: ./build/Documentation/index.html remote_file: ${project_dir}/${build_variant}/${revision}/${task_id}/${version_id}/logs/${task_name}-${build_id}-index.html bucket: mciuploads permissions: public-read content_type: text/html display_name: Documentation f_dsi_timeout: - command: shell.exec params: script: ./src/dsi/run-dsi on_timeout ### f_other_post_ops: - command: shell.exec params: working_dir: src script: | # removes files from the (local) scons cache when it's over a # threshold, to the $prune_ratio percentage. Ideally override # these default values in the distro config in evergreen. if [ -d "${scons_cache_path}" ]; then /opt/mongodbtoolchain/v3/bin/python3 buildscripts/scons_cache_prune.py --cache-dir ${scons_cache_path} --cache-size ${scons_cache_size|200} --prune-ratio ${scons_prune_ratio|0.8} fi f_other_pre_ops: # Can't be empty so just `echo`. - command: shell.exec params: {script: "echo"} f_other_timeout: # Can't be empty so just `echo`. - command: shell.exec params: {script: "echo"} ### # Compile compile mongodb: # We create a virtual environment with the Python dependencies for compiling the server # installed. - command: shell.exec params: working_dir: src script: | set -o errexit set -o verbose /opt/mongodbtoolchain/v3/bin/virtualenv --python /opt/mongodbtoolchain/v3/bin/python3 "${workdir}/compile_venv" /opt/mongodbtoolchain/v3/bin/virtualenv --python /opt/mongodbtoolchain/v3/bin/python2 "${workdir}/venv" source "${workdir}/compile_venv/bin/activate" python -m pip install -r etc/pip/compile-requirements.txt - command: shell.exec params: working_dir: src script: | set -o errexit set -o verbose # We get the raw version string (r1.2.3-45-gabcdef) from git MONGO_VERSION=$(git describe --abbrev=7) # If this is a patch build, we add the patch version id to the version string so we know # this build was a patch, and which evergreen task it came from if [ "${is_patch|false}" = "true" ]; then MONGO_VERSION="$MONGO_VERSION-patch-${version_id}" fi # This script converts the generated version string into a sanitized version string for # use by scons and uploading artifacts as well as information about for the scons cache. source "${workdir}/compile_venv/bin/activate" MONGO_VERSION=$MONGO_VERSION USE_SCONS_CACHE=${use_scons_cache|false} python buildscripts/generate_compile_expansions.py --out compile_expansions.yml - command: expansions.update params: file: src/compile_expansions.yml - command: shell.exec params: working_dir: src/src/mongo/gotools/src/github.com/mongodb/mongo-tools script: | set -o verbose set -o errexit # make sure newlines in the scripts are handled correctly by windows if [ "Windows_NT" = "$OS" ]; then set -o igncr fi; # set_goenv provides set_goenv(), print_ldflags() and print_tags() used below . ./set_goenv.sh GOROOT="" set_goenv || exit go version build_tools="bsondump mongostat mongofiles mongoexport mongoimport mongorestore mongodump mongotop" if [ "${build_mongoreplay}" = "true" ]; then build_tools="$build_tools mongoreplay" fi for i in $build_tools; do go build -ldflags "$(print_ldflags)" ${args} -tags "$(print_tags ${tooltags})" -o "../../../../../../mongo-tools/$i${exe|}" $i/main/$i.go "../../../../../../mongo-tools/$i${exe|}" --version done - command: shell.exec params: working_dir: src script: | set -o errexit set -o verbose source "${workdir}/compile_venv/bin/activate" python ./buildscripts/scons.py ${compile_flags|} ${scons_cache_args|} mongo${extension} --use-new-tools mongod${extension} mongos${extension} MONGO_VERSION=${version} mkdir -p mongodb/bin mkdir -p mongodb/jstests/hooks mv mongo${extension|} mongodb/bin mv mongod${extension|} mongodb/bin mv mongos${extension|} mongodb/bin mv src/mongo-tools/* mongodb/bin if [ -d jstests/hooks ] then echo "Fetching JS test DB correctness checks from directory jstests" cp -a jstests/* mongodb/jstests echo "Now adding our own special run_validate_collections.js wrapper" mv mongodb/jstests/hooks/run_validate_collections.js mongodb/jstests/hooks/run_validate_collections.actual.js cat << EOF > mongodb/jstests/hooks/run_validate_collections.js print("NOTE: run_validate_collections.js will skip the oplog!"); TestData = { skipValidationNamespaces: ['local.oplog.rs'] }; load('jstests/hooks/run_validate_collections.actual.js'); EOF fi tar czf mongodb${compile-variant|}.tar.gz mongodb - command: s3.put params: aws_key: ${aws_key} aws_secret: ${aws_secret} local_file: src/mongodb${compile-variant|}.tar.gz remote_file: ${project_dir}/${version_id}/${revision}/${platform}/mongodb${compile-variant|}-${version_id}.tar.gz bucket: mciuploads permissions: public-read content_type: ${content_type|application/x-gzip} display_name: mongodb${compile-variant|}.tar.gz ### ## Schedule Tasks ## f_schedule_tasks: - command: git.get_project params: directory: *src_dir revisions: dsi: ${dsi_rev} genny: ${genny_rev} signal-processing: ${signal-processing_rev} linkbench: ${linkbench_rev} linkbench2: ${linkbench2_rev} workloads: ${workloads_rev} mongo-perf: ${mongo-perf_rev} YCSB: ${YCSB_rev} benchmarks: ${benchmarks_rev} py-tpcc: ${py-tpcc_rev} - command: expansions.write params: file: ./expansions.yml - command: shell.exec params: script: ./src/dsi/run-dsi schedule_tasks --tasks=${tasks} - command: generate.tasks params: files: - build/TaskJSON/Tasks.json tasks: ### # Same in every DSI project - name: schedule_global_auto_tasks priority: 5 commands: - func: f_schedule_tasks vars: tasks: all_tasks - name: schedule_variant_auto_tasks priority: 5 commands: - func: f_schedule_tasks vars: tasks: variant_tasks - name: schedule_patch_auto_tasks priority: 5 commands: - func: f_schedule_tasks vars: tasks: patch_tasks - name: smoke_test priority: 5 commands: - func: f_run_dsi_workload vars: test_control: short - name: smoke_test_ssl priority: 5 commands: - func: f_run_dsi_workload vars: test_control: short mongodb_setup: replica-ssl infrastructure_provisioning: replica - name: smoke_test_standalone_auth priority: 5 commands: - func: f_run_dsi_workload vars: test_control: short mongodb_setup: standalone-auth infrastructure_provisioning: single - name: smoke_test_replset_auth priority: 5 commands: - func: f_run_dsi_workload vars: test_control: short mongodb_setup: replica-auth infrastructure_provisioning: replica - name: smoke_test_shard_lite_auth priority: 5 commands: - func: f_run_dsi_workload vars: test_control: short mongodb_setup: shard-lite-auth infrastructure_provisioning: shard-lite - name: dsi_integ_test_run_command_simple priority: 5 commands: - func: f_run_dsi_workload vars: test_control: run_command_simple ### - name: compile commands: - command: manifest.load - command: git.get_project params: directory: src revisions: enterprise: ${enterprise_rev} # mongo-tools: ${mongo-tools_rev} - func: "compile mongodb" - name: linkbench priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "linkbench" - name: linkbench2 priority: 5 exec_timeout_secs: 43200 # 12 hours commands: - func: f_run_dsi_workload vars: test_control: "linkbench2" additional_tfvars: "tags: {expire-on-delta: 12}" - name: tpcc priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "tpcc" - name: insert_remove priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "insert_remove" - name: validate_cmd priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "validate_cmd" - name: service_architecture_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "service_architecture_workloads" - name: big_update priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "big_update" - name: industry_benchmarks priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "ycsb" - name: ycsb_60GB priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "ycsb-60GB" - name: industry_benchmarks_secondary_reads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "ycsb-secondary-reads" - name: industry_benchmarks_wmajority priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "ycsb-wmajority" - name: crud_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "crud_workloads" - name: cursor_manager priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "cursor_manager" - name: mixed_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "mixed_workloads" - name: misc_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "misc_workloads" - name: map_reduce_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "map_reduce_workloads" - name: genny_canaries priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "genny_canaries" - name: genny_overhead priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "genny_overhead" - name: retryable_writes_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "retryable_writes" - name: snapshot_reads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "snapshot_reads" - name: secondary_reads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "secondary_reads" - name: bestbuy_agg priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "bestbuy_agg" - name: bestbuy_agg_merge_same_db priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "bestbuy_agg_merge_same_db" - name: bestbuy_agg_merge_different_db priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "bestbuy_agg_merge_different_db" - name: bestbuy_agg_merge_target_hashed priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "bestbuy_agg_merge_target_hashed" - name: bestbuy_agg_merge_wordcount priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "bestbuy_agg_merge_wordcount" - name: bestbuy_query priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "bestbuy_query" - name: non_sharded_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "non_sharded" - name: mongos_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "mongos" - name: mongos_large_catalog_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "mongos_large_catalog" - name: move_chunk_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "move_chunk" - name: move_chunk_waiting_workloads priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "move_chunk_waiting" - name: secondary_performance priority: 5 commands: - func: f_run_dsi_workload vars: # Unfortunately the dash/underscore style is different for mongodb_setup and test_control test_control: "secondary_performance" mongodb_setup: "secondary-performance" - name: initialsync priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "initialsync" - name: initialsync-logkeeper-short priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "initialsync-logkeeper" mongodb_setup: "initialsync-logkeeper-short" # Logkeeper dataset with FCV set to 4.0 mongodb_dataset: "https://s3-us-west-2.amazonaws.com/dsi-donot-remove/InitialSyncLogKeeper/logkeeper-slice-data.tgz" - name: initialsync-logkeeper priority: 5 exec_timeout_secs: 216000 # 2.5 days commands: - func: f_run_dsi_workload vars: test_control: "initialsync-logkeeper" - name: change_streams_throughput priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "change_streams_throughput" - name: change_streams_latency priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "change_streams_latency" - name: change_streams_multi_mongos priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "change_streams_multi_mongos" - name: sb_large_scale priority: 5 exec_timeout_secs: 43200 # 12 hours commands: - func: f_run_dsi_workload vars: test_control: "sb_large_scale" additional_tfvars: "tags: {expire-on-delta: 12}" - name: sb_timeseries priority: 5 commands: - func: f_run_dsi_workload vars: test_control: "sb_timeseries" buildvariants: # - name: task_generation # display_name: Task Generation # modules: *modules # expansions: # platform: linux # project_dir: dsi # run_on: # - amazon2-build # tasks: # - name: schedule_global_auto_tasks # We are explicitly tracking the Linux 64 Amazon variant compile options from evergreen.yml. If we can get # proper artifacts directly from that project, we should do that and remove these tasks. - &compile-linux-64-amzn name: compile-linux-64-amzn display_name: Compile on Linux64 Amazon expansions: &compile-linux-64-amzn-expansions compile_flags: >- --ssl MONGO_DISTMOD=linux-64-amzn-build -j$(grep -c ^processor /proc/cpuinfo) --release --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars platform: linux project_dir: &project_dir dsi tooltags: "" use_scons_cache: true run_on: - "amazon1-2018-build" tasks: - name: compile - name: compile-amazon2 display_name: Compile on Amazon Linux 2 modules: - enterprise expansions: <<: *compile-linux-64-amzn-expansions compile_flags: >- --ssl MONGO_DISTMOD=amazon2 -j$(grep -c ^processor /proc/cpuinfo) --release --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars compile-variant: -enterprise run_on: - "amazon2-build" tasks: - name: compile - name: compile-rhel70 display_name: Compile for Atlas-like modules: *modules batchtime: 2880 # 48 hours expansions: <<: *compile-linux-64-amzn-expansions compile_flags: >- --ssl MONGO_DISTMOD=rhel70 -j$(grep -c ^processor /proc/cpuinfo) --release --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars compile-variant: -rhel70 run_on: - rhel70-small tasks: - name: compile - name: linux-1-node-replSet display_name: Linux 1-Node ReplSet batchtime: 10080 # 7 days modules: *modules expansions: mongodb_setup: single-replica infrastructure_provisioning: single platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger run_on: - "rhel70-perf-single" depends_on: *_compile_amazon-64 tasks: &1nodetasks - name: industry_benchmarks - name: ycsb_60GB - name: crud_workloads - name: mixed_workloads - name: misc_workloads - name: map_reduce_workloads - name: smoke_test - name: retryable_writes_workloads - name: non_sharded_workloads - name: bestbuy_agg - name: bestbuy_agg_merge_different_db - name: bestbuy_agg_merge_same_db - name: bestbuy_agg_merge_wordcount - name: bestbuy_query - name: change_streams_throughput - name: change_streams_latency - name: snapshot_reads - name: linkbench - name: linkbench2 - name: tpcc - name: insert_remove - name: big_update - name: sb_large_scale - name: sb_timeseries - name: linux-standalone display_name: Linux Standalone batchtime: 2880 # 48 hours modules: *modules expansions: mongodb_setup: standalone infrastructure_provisioning: single platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger run_on: - "rhel70-perf-single" depends_on: *_compile_amazon-64 tasks: &standalonetasks # - name: schedule_patch_auto_tasks # - name: schedule_variant_auto_tasks - name: industry_benchmarks - name: ycsb_60GB - name: crud_workloads - name: genny_canaries - name: genny_overhead - name: cursor_manager - name: insert_remove - name: mixed_workloads - name: misc_workloads - name: map_reduce_workloads - name: smoke_test - name: non_sharded_workloads - name: bestbuy_agg - name: bestbuy_agg_merge_different_db - name: bestbuy_agg_merge_same_db - name: bestbuy_agg_merge_wordcount - name: bestbuy_query - name: big_update - name: validate_cmd - name: linux-standalone-audit display_name: Linux Standalone Audit batchtime: 10080 # 7 days modules: *modules expansions: mongodb_setup: standalone-audit infrastructure_provisioning: single platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger compile-variant: -enterprise run_on: - "rhel70-perf-single" depends_on: *_compile_amazon2 tasks: - name: industry_benchmarks - name: crud_workloads - name: smoke_test - name: linux-1-node-replSet-ese-cbc display_name: Linux 1-Node ReplSet ESE CBC modules: *modules expansions: mongodb_setup: single-replica-ese-cbc infrastructure_provisioning: single cluster: single platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger compile-variant: -enterprise run_on: - "rhel70-perf-single" depends_on: *_compile_amazon2 tasks: - name: industry_benchmarks - name: smoke_test - name: linux-1-node-replSet-ese-gcm display_name: Linux 1-Node ReplSet ESE GCM batchtime: 5760 # 4 days modules: *modules expansions: mongodb_setup: single-replica-ese-gcm infrastructure_provisioning: single platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger compile-variant: -enterprise run_on: - "rhel70-perf-single" depends_on: *_compile_amazon2 tasks: - name: industry_benchmarks - name: smoke_test - name: linux-3-shard display_name: Linux 3-Shard Cluster batchtime: 10080 # 7 days modules: *modules expansions: mongodb_setup: shard infrastructure_provisioning: shard platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger run_on: - "rhel70-perf-shard" depends_on: *_compile_amazon-64 tasks: - name: industry_benchmarks - name: crud_workloads - name: insert_remove - name: mixed_workloads - name: misc_workloads - name: map_reduce_workloads - name: smoke_test - name: industry_benchmarks_wmajority - name: mongos_workloads - name: mongos_large_catalog_workloads - name: change_streams_throughput - name: change_streams_latency - name: change_streams_multi_mongos - name: linux-shard-lite display_name: Linux Shard Lite Cluster batchtime: 5760 # 4 days modules: *modules expansions: mongodb_setup: shard-lite infrastructure_provisioning: shard-lite platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger run_on: - "rhel70-perf-shard-lite" depends_on: *_compile_amazon-64 tasks: &shardlitetasks - name: bestbuy_agg - name: bestbuy_agg_merge_different_db - name: bestbuy_agg_merge_same_db - name: bestbuy_agg_merge_target_hashed - name: bestbuy_agg_merge_wordcount - name: bestbuy_query - name: change_streams_latency - name: change_streams_throughput - name: industry_benchmarks - name: industry_benchmarks_wmajority - name: linkbench - name: mixed_workloads - name: mongos_workloads - name: mongos_large_catalog_workloads - name: move_chunk_workloads - name: move_chunk_waiting_workloads - name: retryable_writes_workloads - name: smoke_test - name: linux-3-node-replSet display_name: Linux 3-Node ReplSet batchtime: 2880 # 48 hours modules: *modules expansions: mongodb_setup: replica infrastructure_provisioning: replica platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger run_on: - "rhel70-perf-replset" depends_on: *_compile_amazon-64 tasks: &3nodetasks - name: industry_benchmarks - name: ycsb_60GB - name: industry_benchmarks_secondary_reads - name: crud_workloads - name: insert_remove - name: service_architecture_workloads - name: mixed_workloads - name: misc_workloads - name: map_reduce_workloads - name: smoke_test - name: retryable_writes_workloads - name: industry_benchmarks_wmajority - name: secondary_performance # Uses a special 2 node mongodb setup - name: non_sharded_workloads - name: bestbuy_agg - name: bestbuy_agg_merge_different_db - name: bestbuy_agg_merge_same_db - name: bestbuy_agg_merge_wordcount - name: bestbuy_query - name: change_streams_throughput - name: change_streams_latency - name: snapshot_reads - name: secondary_reads - name: tpcc - name: linkbench - name: linkbench2 - name: big_update - name: sb_large_scale - name: sb_timeseries - name: linux-3-node-replSet-noflowcontrol display_name: Linux 3-Node ReplSet (Flow Control off) batchtime: 10080 # 7 days modules: *modules expansions: mongodb_setup: replica-noflowcontrol infrastructure_provisioning: replica platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger run_on: - "rhel70-perf-replset" depends_on: *_compile_amazon-64 tasks: - name: industry_benchmarks - name: industry_benchmarks_secondary_reads - name: crud_workloads - name: service_architecture_workloads - name: mixed_workloads - name: smoke_test - name: industry_benchmarks_wmajority - name: change_streams_throughput - name: change_streams_latency - name: tpcc - name: linkbench - name: linkbench2 - name: big_update - name: linux-3-node-replSet-ssl display_name: Linux 3-Node ReplSet (SSL) batchtime: 10080 # 7 days modules: *modules expansions: mongodb_setup: replica-ssl infrastructure_provisioning: replica platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger run_on: - "rhel70-perf-replset" depends_on: *_compile_amazon-64 tasks: - name: industry_benchmarks - name: mixed_workloads - name: linux-3-node-replSet-initialsync display_name: Linux 3-Node ReplSet Initial Sync batchtime: 2880 # 48 hours modules: *modules expansions: mongodb_setup: replica-2node infrastructure_provisioning: replica platform: linux authentication: disabled storageEngine: wiredTiger project_dir: *project_dir run_on: - "rhel70-perf-replset" depends_on: *_compile_amazon-64 tasks: - name: initialsync - name: initialsync-logkeeper-short - name: linux-replSet-initialsync-logkeeper display_name: Linux ReplSet Initial Sync LogKeeper batchtime: 10080 # 7 days modules: *modules expansions: mongodb_setup: initialsync-logkeeper infrastructure_provisioning: initialsync-logkeeper # EBS logkeeper snapshot with FCV set to 4.0 snapshotId: snap-041c3c57a1a4f5bba platform: linux authentication: disabled storageEngine: wiredTiger project_dir: *project_dir run_on: - "rhel70-perf-initialsync-logkeeper" depends_on: *_compile_amazon-64 tasks: - name: initialsync-logkeeper - name: atlas-like-M60 display_name: M60-Like 3-Node ReplSet batchtime: 5760 # 4 days modules: *modules expansions: mongodb_setup: atlas-like-replica infrastructure_provisioning: M60-like-replica platform: linux project_dir: *project_dir authentication: enabled storageEngine: wiredTiger compile-variant: -rhel70 run_on: - "rhel70-perf-M60-like" depends_on: *_compile_rhel70 tasks: # Cannot use *3nodetasks because secondary_performance uses a special mongodb setup. - name: industry_benchmarks - name: ycsb_60GB - name: industry_benchmarks_secondary_reads - name: crud_workloads # - name: insert_remove # - name: service_architecture_workloads - name: mixed_workloads - name: misc_workloads - name: map_reduce_workloads - name: smoke_test - name: retryable_writes_workloads - name: industry_benchmarks_wmajority - name: non_sharded_workloads - name: bestbuy_agg - name: bestbuy_agg_merge_different_db - name: bestbuy_agg_merge_same_db - name: bestbuy_agg_merge_wordcount - name: bestbuy_query - name: change_streams_throughput - name: change_streams_latency - name: snapshot_reads - name: secondary_reads # - name: tpcc # TPCC with SSL currently broken https://jira.mongodb.org/browse/TIG-1681 - name: linkbench # - name: big_update