command_type: system stepback: false ## Parameters for parameterized builds (see https://github.com/evergreen-ci/evergreen/wiki/Parameterized-Builds) parameters: - key: patch_compile_flags description: "Additional SCons flags to be applied during scons compile invocations in this patch" variables: ### # Leave this section uncommented to enable compile. _real_remote_file: &_remote_file ${project_dir}/${version_id}/${revision}/${platform}/mongodb${compile-variant|}-${version_id}.tar.gz _real_compile: &_compile - variant: linux-wt-standalone name: compile _real_expansions: &_expansion_updates [] ### ### # **Or**: Leave this section uncommented to bypass/skip compile. # This file ↓ came from a microbenchmarks waterfall run. # https://evergreen.mongodb.com/version/performance_996dcdc3d96346d71f012388eccc79c691619340 # Artifacts eventually expire. If this fails, grab the compile artifacts url and update this. # _skip_remote_file: &_remote_file # perf/5f6ca2392fbabe40badf39c4/c39af144b2370be0537410d9bc79be66a1a5f3c7/linux/mongodb-5f6ca2392fbabe40badf39c4.tar.gz # _skip_compile: &_compile # [] # _skip_expansions: &_expansion_updates # - key: mdb_binary_for_server # value: https://mciuploads.s3.amazonaws.com/perf/5f6ca2392fbabe40badf39c4/c39af144b2370be0537410d9bc79be66a1a5f3c7/linux/mongodb-5f6ca2392fbabe40badf39c4.tar.gz # - key: mdb_binary_for_client # value: https://mciuploads.s3.amazonaws.com/perf/5f6ca2392fbabe40badf39c4/c39af144b2370be0537410d9bc79be66a1a5f3c7/linux/mongodb-5f6ca2392fbabe40badf39c4.tar.gz ### _src_dir: &src_dir src/mongo _modules: &modules - enterprise - mongo-tools - dsi - genny - signal-processing - workloads - linkbench - linkbench2 - mongo-perf - YCSB - benchmarks - py-tpcc modules: ### # Same in every DSI project. Ensure that this block is synchronized with # evergreen-dsitest.yml, atlas/system_perf_atlas.yml, and src/dsi/onboarding.py # (search update-repos-here) in this repo, and etc/system_perf.yml and # etc/perf.yml in mongodb/mongo - name: dsi repo: git@github.com:10gen/dsi.git prefix: ../../src branch: master - name: genny repo: git@github.com:10gen/genny.git prefix: ../../src branch: master - name: signal-processing repo: git@github.com:10gen/signal-processing.git prefix: ../../src branch: master - name: workloads repo: git@github.com:10gen/workloads.git prefix: ../../src branch: master - name: linkbench repo: git@github.com:10gen/linkbench.git prefix: ../../src branch: master - name: linkbench2 repo: git@github.com:10gen/linkbench2.git prefix: ../../src branch: master - name: mongo-perf repo: git@github.com:mongodb/mongo-perf.git prefix: ../../src branch: master - name: YCSB repo: git@github.com:mongodb-labs/YCSB.git prefix: ../../src branch: production - name: benchmarks repo: git@github.com:mongodb-labs/benchmarks.git prefix: ../../src branch: master - name: py-tpcc repo: git@github.com:mongodb-labs/py-tpcc.git prefix: ../../src branch: master ref: 46f8f42a71626fc2057d2604ab9359ccf0395ba0 ### # - name: mongo # repo: git@github.com:mongodb/mongo.git # prefix: ../../src # branch: master - name: enterprise repo: git@github.com:10gen/mongo-enterprise-modules.git prefix: src/mongo/db/modules branch: v4.0 ### # Same in every DSI project pre: - func: f_other_pre_ops - func: f_dsi_pre_run post: - func: f_dsi_post_run - func: f_other_post_ops timeout: - func: f_dsi_timeout - func: f_other_timeout ### functions: ### # Same in every DSI project f_dsi_pre_run: - command: manifest.load - command: expansions.update params: updates: *_expansion_updates f_run_dsi_workload: - command: git.get_project params: directory: *src_dir revisions: dsi: ${dsi_rev} genny: ${genny_rev} signal-processing: ${signal-processing_rev} linkbench: ${linkbench_rev} linkbench2: ${linkbench2_rev} workloads: ${workloads_rev} mongo-perf: ${mongo-perf_rev} YCSB: ${YCSB_rev} benchmarks: ${benchmarks_rev} py-tpcc: ${py-tpcc_rev} - command: expansions.write params: file: ./expansions.yml - command: shell.exec params: script: ./src/dsi/run-dsi run_workload - command: shell.exec type: system params: script: ./src/dsi/run-dsi determine_failure -m SYSTEM - command: shell.exec type: setup params: script: ./src/dsi/run-dsi determine_failure -m SETUP - command: shell.exec type: test params: script: ./src/dsi/run-dsi determine_failure -m TEST f_dsi_post_run: - command: json.send params: name: perf file: ./build/LegacyPerfJson/perf.json - command: shell.exec params: script: ./src/dsi/run-dsi post_run - command: perf.send params: file: ./build/CedarReports/cedar_report.json aws_key: ${terraform_key} aws_secret: ${terraform_secret} bucket: genny-metrics region: us-east-1 prefix: ${task_id}_${execution} - command: attach.results params: file_location: ./build/EvergreenResultsJson/results.json - command: s3.put params: aws_key: ${aws_key} aws_secret: ${aws_secret} local_file: ./build/Artifacts/DSIArtifacts.tgz remote_file: ${project_dir}/${build_variant}/${revision}/${task_id}/${version_id}/logs/dsi-artifacts-${task_name}-${build_id}-${execution}.tgz bucket: mciuploads permissions: public-read content_type: application/x-gzip display_name: DSI Artifacts - Execution ${execution} - command: s3.put params: aws_key: ${aws_key} aws_secret: ${aws_secret} local_file: ./build/Documentation/index.html remote_file: ${project_dir}/${build_variant}/${revision}/${task_id}/${version_id}/logs/${task_name}-${build_id}-index.html bucket: mciuploads permissions: public-read content_type: text/html display_name: Documentation f_dsi_timeout: - command: shell.exec params: script: ./src/dsi/run-dsi on_timeout ### f_other_post_ops: - command: shell.exec params: working_dir: src script: | # removes files from the (local) scons cache when it's over a # threshold, to the $prune_ratio percentage. Ideally override # these default values in the distro config in evergreen. if [ -d "${scons_cache_path}" ]; then /opt/mongodbtoolchain/v3/bin/python3 buildscripts/scons_cache_prune.py --cache-dir ${scons_cache_path} --cache-size ${scons_cache_size|200} --prune-ratio ${scons_prune_ratio|0.8} fi f_other_pre_ops: - &f_other_pre_ops command: shell.exec params: silent: true script: | for PS in mongo{,d,s,import,export,dump,restore,stat,files,top,bridge} resmoke.py python{,2} lldb _test; do pkill -9 "$PS" done f_other_timeout: # Can't be empty so just `echo`. - command: shell.exec params: {script: "echo"} ### # Compile compile mongodb: - command: shell.exec params: working_dir: src script: | set -o errexit set -o verbose # We get the raw version string (r1.2.3-45-gabcdef) from git MONGO_VERSION=$(git describe --abbrev=7) # If this is a patch build, we add the patch version id to the version string so we know # this build was a patch, and which evergreen task it came from if [ "${is_patch|false}" = "true" ]; then MONGO_VERSION="$MONGO_VERSION-patch-${version_id}" fi # This script converts the generated version string into a sanitized version string for # use by scons and uploading artifacts as well as information about for the scons cache. MONGO_VERSION=$MONGO_VERSION USE_SCONS_CACHE=${use_scons_cache|false} ${python|/opt/mongodbtoolchain/v2/bin/python2} buildscripts/generate_compile_expansions.py --out compile_expansions.yml # Then we load the generated version data into the agent so we can use it in task definitions - command: expansions.update params: file: src/compile_expansions.yml - command: shell.exec params: working_dir: src/src/mongo/gotools/src/github.com/mongodb/mongo-tools script: | set -o verbose set -o errexit # make sure newlines in the scripts are handled correctly by windows if [ "Windows_NT" = "$OS" ]; then set -o igncr fi; # set_goenv provides set_goenv(), print_ldflags() and print_tags() used below . ./set_goenv.sh GOROOT="" set_goenv || exit go version build_tools="bsondump mongostat mongofiles mongoexport mongoimport mongorestore mongodump mongotop" if [ "${build_mongoreplay}" = "true" ]; then build_tools="$build_tools mongoreplay" fi for i in $build_tools; do go build -ldflags "$(print_ldflags)" ${args} -tags "$(print_tags ${tooltags})" -o "../../../../../../mongo-tools/$i${exe|}" $i/main/$i.go "../../../../../../mongo-tools/$i${exe|}" --version done - command: shell.exec params: working_dir: src script: | set -o errexit set -o verbose ${python|/opt/mongodbtoolchain/v2/bin/python2} ./buildscripts/scons.py ${compile_flags|} ${scons_cache_args|} mongo${extension} --use-new-tools mongod${extension} mongos${extension} MONGO_VERSION=${version} mkdir -p mongodb/bin mkdir -p mongodb/jstests/hooks mv mongo${extension|} mongodb/bin mv mongod${extension|} mongodb/bin mv mongos${extension|} mongodb/bin mv src/mongo-tools/* mongodb/bin if [ -d jstests/hooks ] then echo "Fetching JS test DB correctness checks from directory jstests" cp -a jstests/* mongodb/jstests echo "Now adding our own special run_validate_collections.js wrapper" mv mongodb/jstests/hooks/run_validate_collections.js mongodb/jstests/hooks/run_validate_collections.actual.js cat << EOF > mongodb/jstests/hooks/run_validate_collections.js print("NOTE: run_validate_collections.js will skip the oplog!"); TestData = { skipValidationNamespaces: ['local.oplog.rs'] }; load('jstests/hooks/run_validate_collections.actual.js'); EOF fi tar czf mongodb${compile-variant|}.tar.gz mongodb - command: s3.put params: aws_key: ${aws_key} aws_secret: ${aws_secret} local_file: src/mongodb${compile-variant|}.tar.gz remote_file: ${project_dir}/${version_id}/${revision}/${platform}/mongodb${compile-variant|}-${version_id}.tar.gz bucket: mciuploads permissions: public-read content_type: ${content_type|application/x-gzip} display_name: mongodb${compile-variant|}.tar.gz ### ## Schedule Tasks ## f_schedule_tasks: - command: git.get_project params: directory: *src_dir revisions: dsi: ${dsi_rev} genny: ${genny_rev} signal-processing: ${signal-processing_rev} linkbench: ${linkbench_rev} linkbench2: ${linkbench2_rev} workloads: ${workloads_rev} mongo-perf: ${mongo-perf_rev} YCSB: ${YCSB_rev} benchmarks: ${benchmarks_rev} py-tpcc: ${py-tpcc_rev} - command: expansions.write params: file: ./expansions.yml - command: shell.exec params: script: ./src/dsi/run-dsi schedule_tasks --tasks=${tasks} - command: generate.tasks params: files: - build/TaskJSON/Tasks.json tasks: ### # Same in every DSI project - name: schedule_global_auto_tasks priority: 5 commands: - func: f_schedule_tasks vars: tasks: all_tasks - name: schedule_variant_auto_tasks priority: 5 commands: - func: f_schedule_tasks vars: tasks: variant_tasks - name: schedule_patch_auto_tasks priority: 5 commands: - func: f_schedule_tasks vars: tasks: patch_tasks - name: smoke_test priority: 5 commands: - func: f_run_dsi_workload vars: test_control: short - name: smoke_test_ssl priority: 5 commands: - func: f_run_dsi_workload vars: test_control: short mongodb_setup: replica-ssl infrastructure_provisioning: replica - name: smoke_test_standalone_auth priority: 5 commands: - func: f_run_dsi_workload vars: test_control: short mongodb_setup: standalone-auth infrastructure_provisioning: single - name: smoke_test_replset_auth priority: 5 commands: - func: f_run_dsi_workload vars: test_control: short mongodb_setup: replica-auth infrastructure_provisioning: replica - name: smoke_test_shard_lite_auth priority: 5 commands: - func: f_run_dsi_workload vars: test_control: short mongodb_setup: shard-lite-auth infrastructure_provisioning: shard-lite - name: dsi_integ_test_run_command_simple priority: 5 commands: - func: f_run_dsi_workload vars: test_control: run_command_simple ### - name: compile commands: - command: manifest.load - command: git.get_project params: directory: src revisions: enterprise: ${enterprise_rev} mongo-tools: ${mongo-tools_rev} - func: "compile mongodb" - name: query depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: query, include_filter_2: core regression, exclude_filter: single_threaded, threads: "1 2 4 8", read_cmd: 'false'} - name: views-query depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: query_identityview, include_filter_2: core regression, exclude_filter: single_threaded, threads: "1 2 4 8", read_cmd: 'true'} - name: views-aggregation depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: aggregation_identityview, include_filter_2: regression, exclude_filter: none, threads: "1", read_cmd: 'true'} - name: where depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: where, include_filter_2: core regression, exclude_filter: single_threaded, threads: "1 2 4 8", read_cmd: 'false'} - name: update depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: update, include_filter_2: core regression, exclude_filter: single_threaded, threads: "1 2 4 8", read_cmd: 'false'} - name: insert depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: insert, include_filter_2: core regression, exclude_filter: single_threaded, threads: "1 2 4 8", read_cmd: 'false'} - name: geo depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: geo, include_filter_2: core regression, exclude_filter: single_threaded, threads: "1 2 4 8", read_cmd: 'false'} - name: misc depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: command multi remove mixed, include_filter_2: core regression, exclude_filter: single_threaded, threads: "1 2 4 8", read_cmd: 'false'} - name: singleThreaded depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: single_threaded, include_filter_2: core regression, exclude_filter: none, threads: "1", read_cmd: 'false'} - name: aggregation depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: aggregation, include_filter_2: regression, exclude_filter: js, threads: "1", read_cmd: 'false'} - name: agg-query-comparison depends_on: *_compile commands: - func: f_run_dsi_workload vars: test_control: microbenchmarks test_control_params: | {include_filter_1: agg_query_comparison, include_filter_2: core regression, exclude_filter: single_threaded, threads: "1 2 4 8", read_cmd: 'false'} buildvariants: - name: linux-wt-standalone display_name: Standalone Linux inMemory batchtime: 90 # 1.5 hours modules: - enterprise - dsi - mongo-perf - signal-processing expansions: # We are explicitly tracking the rhel62 variant compile options from evergreen.yml for # microbenchmarks, since they run on the centos6 boxes. If we can get proper artifacts directly # from that project, we should do that and remove the compile tasks. compile_flags: --ssl MONGO_DISTMOD=rhel62 -j$(grep -c ^processor /proc/cpuinfo) --release --variables-files=etc/scons/mongodbtoolchain_gcc.vars mongod_exec_wrapper: &exec_wrapper "numactl --physcpubind=4,5,6,7 -i 1" perf_exec_wrapper: &perf_wrapper "numactl --physcpubind=1,2,3 -i 0" use_scons_cache: true project: &project perf platform: linux infrastructure_provisioning: microbenchmarks mongodb_setup: microbenchmarks_standalone canaries: none storageEngine: inMemory project_dir: &project_dir perf run_on: - "centos6-perf" tasks: - name: compile distros: - rhel62-large - name: aggregation - name: agg-query-comparison - name: query - name: views-aggregation - name: views-query - name: where - name: update - name: insert - name: geo - name: misc - name: singleThreaded - name: linux-wt-repl display_name: 1-Node ReplSet Linux inMemory batchtime: 90 # 1.5 hours modules: - dsi - mongo-perf - signal-processing expansions: mongod_exec_wrapper: *exec_wrapper perf_exec_wrapper: *perf_wrapper project: *project platform: linux infrastructure_provisioning: microbenchmarks mongodb_setup: microbenchmarks_standalone canaries: none storageEngine: inMemory project_dir: *project_dir run_on: - "centos6-perf" tasks: - name: update - name: insert - name: misc - name: singleThreaded