#!/bin/bash cd $HOME # workaround EVG-12829 unameOut=$(uname -s) case "${unameOut}" in Linux*) machine=Linux;; Darwin*) machine=Mac;; CYGWIN*) machine=Cygwin;; *) machine="UNKNOWN:${unameOut}" esac TOOLCHAIN_ROOT=/opt/mongodbtoolchain/v4 if [[ "${machine}" = "Cygwin" ]]; then out_dir="/cygdrive/c/setup_script_output.txt" desktop_dir="/cygdrive/c/Users/Administrator/Desktop" { date env echo "----------------------" echo -e "\n=> Setting _NT_SOURCE_PATH environment variable for debuggers to pick up source files." SRC_DIR_HASH=$(readlink -f /cygdrive/z/data/mci/source-*) SRC_DIR="${SRC_DIR_HASH}/src" echo "Source Path: [${SRC_DIR}]" set -x; setx _NT_SOURCE_PATH "${SRC_DIR}" { set +x; } 2>/dev/null echo -e "\n=> Setting _NT_SYMBOL_PATH environment variable for debuggers to pick up the symbols." DBG_ARCHIVE_PARENT=$(readlink -f /cygdrive/z/data/mci/artifacts-*dist_test_debug) DBG_ARCHIVE=$(readlink -f ${DBG_ARCHIVE_PARENT}/debugsymbols-*.zip) DBG_ARCHIVE_TARGET_PARENT="${DBG_ARCHIVE_PARENT}/extracted_symbols" DBG_ARCHIVE_TARGET="${DBG_ARCHIVE_TARGET_PARENT}/dist-test/bin" echo "Symbols Dir: [${DBG_ARCHIVE_TARGET}]" echo -e "\n=> Extracting Symbol files." set -x; mkdir -p ${DBG_ARCHIVE_TARGET_PARENT} unzip -n ${DBG_ARCHIVE} -d ${DBG_ARCHIVE_TARGET_PARENT} setx _NT_SYMBOL_PATH "${DBG_ARCHIVE_TARGET};srv*;" { set +x; } 2>/dev/null echo -e "\n=> Extracting Core Dump to Desktop." COREDUMP_ARCHIVE=$(readlink -f /cygdrive/z/data/mci/artifacts-*/mongo-coredumps*.tgz 2>/dev/null) COREDUMP_ARCHIVE_PARENT=$(dirname ${COREDUMP_ARCHIVE}) COREDUMP_ARCHIVE_TARGET="${COREDUMP_ARCHIVE_PARENT}/extracted_dump" set -x; mkdir -p ${COREDUMP_ARCHIVE_TARGET} tar -xzvf ${COREDUMP_ARCHIVE} -C ${COREDUMP_ARCHIVE_TARGET} cp -r ${COREDUMP_ARCHIVE_TARGET}/* ${desktop_dir} { set +x; } 2>/dev/null echo "Copied to Desktop." } &> ${out_dir} cp ${out_dir} ${desktop_dir} else # Communicate to users that logged in before the script started that nothing is ready. wall "The setup_spawnhost_coredump script has just started setting up the debugging environment." # Write this file that gets cat'ed on login to communicate to users logging in if this setup script is still running. echo '+-----------------------------------------------------------------------------------+' > ~/.setup_spawnhost_coredump_progress echo "| The setup script is still setting up data files for inspection on a [${machine}] host. |" >> ~/.setup_spawnhost_coredump_progress echo '+-----------------------------------------------------------------------------------+' >> ~/.setup_spawnhost_coredump_progress cat >> ~/.profile </dev/null | grep -v debug$ ls -l bin/ 2>/dev/null ( ls -l mongo* &>/dev/null | grep -v debug$ || ls -l bin/ &>/dev/null ) || echo " [none]" for item in "mongo" "mongod" "mongos"; do echo "\${item} core dumps:" ls -l dump_\${item}.* 2>/dev/null || echo " [none]" done echo "Core dumps from unknown processes (crashed processes typically found here):" ls -l dump_* 2>/dev/null | grep -v mongo || echo " [none]" echo echo "To examine a core dump, type 'gdb ./ ./'" cat ~/.setup_spawnhost_coredump_progress EOF echo 'if [ -f ~/.profile ]; then . ~/.profile fi' >> .bash_profile # Make a directory on the larger EBS volume. Soft-link it under the home directory. The smaller home # volume can have trouble particularly with coredumps from sharded timeouts. mkdir -p /data/debug ln -s /data/debug . cd debug # As the name suggests, pretty printers. Primarily for boost::optional git clone git@github.com:mongodb-forks/Boost-Pretty-Printer.git --branch mongodb-stable & archive_fail() { echo "Error: archive [${1}] not found." >&2 } # Discover and unarchive necessary files and source code. This will put mongo binaries and their # partner .debug files in the same `debug/bin` directory. The `bin` directory will later be symbolic # linked into the top-level (`debug`) directory. Shared library files and their debug symbols will # be dumped into a `debug/lib` directory for tidiness. The mongo `/src/` directory is soft # linked as `debug/src`. The .gdbinit file assumes gdb is being run from the `debug` directory. BIN_ARCHIVE=$(ls /data/mci/artifacts-*archive_dist_test*/mongo-*.tgz 2>/dev/null) if [[ -n $BIN_ARCHIVE ]]; then # Have shell expand braces before passing the wildcard to tar. bin_files_pattern=(\*/bin/mongo{d,s,,bridge}) tar --wildcards --strip-components=1 -xzf $BIN_ARCHIVE "${bin_files_pattern[@]}" tar --wildcards --strip-components=1 -xzf $BIN_ARCHIVE '*/lib/*' & else archive_fail "bin" fi DBG_ARCHIVE=$(ls /data/mci/artifacts-*archive_dist_test_debug/debugsymbols-*.tgz 2>/dev/null) if [[ -n $DBG_ARCHIVE ]]; then # Support discovering split-dwarf files. Specifically, capture both .debug and .dwp # files. # Have shell expand braces before passing the wildcard to tar. dbg_files_pattern=(\*/bin/mongo{d,s,,bridge}.{debug,dwp}) tar --wildcards --strip-components=1 -xzf $DBG_ARCHIVE "${dbg_files_pattern[@]}" & tar --wildcards --strip-components=1 -xzf $DBG_ARCHIVE '*/lib/*' & else archive_fail "debug" fi UNITTEST_ARCHIVE=$(ls /data/mci/artifacts-*run_unittests/mongo-unittests-*.tgz 2>/dev/null) if [[ -n $UNITTEST_ARCHIVE ]]; then tar --wildcards --strip-components=0 -xzf $UNITTEST_ARCHIVE 'bin/*' & tar --wildcards -xzf $UNITTEST_ARCHIVE 'lib/*' & else archive_fail "unittest" fi BENCHMARK_ARCHIVE=$(ls /data/mci/artifacts-*compile_upload_benchmarks/mongodb_mongo_*.tgz 2>/dev/null) if [[ -n $BENCHMARK_ARCHIVE ]]; then tar --wildcards --strip-components=2 -xzf $BENCHMARK_ARCHIVE '*/bin/*' & else archive_fail "benchmark" fi SRC_DIR=$(ls -d /data/mci/source-* 2>/dev/null) if [[ -n $SRC_DIR ]]; then ln -s ${SRC_DIR}/.gdbinit . ln -s ${SRC_DIR}/src src ln -s ${SRC_DIR}/buildscripts buildscripts # Install pymongo to get the bson library for pretty-printers. ${TOOLCHAIN_ROOT}/bin/pip3 install -r ${SRC_DIR}/etc/pip/dev-requirements.txt & else archive_fail "src" fi COREDUMP_ARCHIVE=$(ls /data/mci/artifacts-*/mongo-coredumps-*.tgz 2>/dev/null) if [[ -n $COREDUMP_ARCHIVE ]]; then tar -xzf $COREDUMP_ARCHIVE & else archive_fail "coredump" fi echo "Waiting for background processes to complete." wait # Symbolic linking all of the executable files is sufficient for `gdb ./mongod ./dump_mongod.core` # to succeed. This inadvertantly also links in the ".debug" files which is unnecessary, but # harmless. gdb expects the .debug files to live adjacent to the physical binary. find bin -type f -perm -o=x -exec ln -s {} . \; # This script checks a bin for the dwarf version and then generates an index if the bin does not already have an index. eu-readelf is used in place of readelf as it is much faster. cat > add_index.sh <>\${1}.debug_str.new $TOOLCHAIN_ROOT/bin/objcopy --add-section .debug_names=\${1}.debug_names --set-section-flags .debug_names=readonly --update-section .debug_str=\${1}.debug_str.new \${1} \${1} rm -f \${1}.debug_names \${1}.debug_str.new \${1}.debug_str fi elif [[ \$dwarf_version == 4 ]] then $TOOLCHAIN_ROOT/bin/gdb --batch-silent --quiet --nx --eval-command "save gdb-index \$target_dir" \$1 if [ -f \${1}.gdb-index ] then $TOOLCHAIN_ROOT/bin/objcopy --add-section .gdb_index=\${1}.gdb-index --set-section-flags .gdb_index=readonly \${1} \${1} rm -f \${1}.gdb-index fi else echo "Can't determine debug info for \$1" fi EOF # After creating the index file in a separate debug file, the debuglink CRC # is no longer value, this will simply recreate the debuglink and therefore # update the CRC to match. cat > recalc_debuglink.sh < create_build_id_links.sh <> ~/.gdbinit < ~/.setup_spawnhost_coredump_progress # Alert currently logged in users that this setup script has completed. Logging back in will ensure any # paths/environment variables will be set as intended. wall "The setup_spawnhost_coredump script has completed, please relogin to ensure the right environment variables are set." fi # Send a Slack notification as the very last thing the setup_spawnhost_coredump script does. # This way a Server engineer can temporarily forget about the Evergreen host they spawned until the # paths and environment variables are configured as intended for when they first connect. if [[ "${machine}" = "Cygwin" ]]; then # The setup_spawnhost_coredump script runs as the mci-exec user on Windows hosts. However, # Server engineers log in as the Administrator user. ssh_user="Administrator" # The Evergreen binary only expects a Windows path. The rest of Cygwin is flexible about it # being a Cygwin path or a Windows path so we do the conversion here. evg_credentials_pathname=$(cygpath -w ~Administrator/.evergreen.yml) evg_binary_pathname=~Administrator/cli_bin/evergreen else ssh_user=$(whoami) evg_credentials_pathname=~/.evergreen.yml evg_binary_pathname=evergreen fi slack_user=$(awk '{if ($1 == "user:") print $2}' "$evg_credentials_pathname") # Refer to the https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html # documentation for more information on the AWS instance metadata endpoints. aws_metadata_svc="http://169.254.169.254" aws_token=$(curl -s -X PUT "$aws_metadata_svc/latest/api/token" -H 'X-aws-ec2-metadata-token-ttl-seconds: 60') ssh_host=$(curl -s -H "X-aws-ec2-metadata-token: $aws_token" "$aws_metadata_svc/latest/meta-data/public-hostname") if [[ "${machine}" = "Cygwin" ]]; then slack_message=$(printf "The setup_spawnhost_coredump script has finished setting things up. \ Please use Windows Remote Desktop with\n\ 1. PC name: $ssh_host\n\ 2. User account: $ssh_user\n\ 3. The RDP password configured under the edit dialog at https://spruce.mongodb.com/spawn/host\n\ to log in.") else slack_message="The setup_spawnhost_coredump script has finished setting things up. Please run "'```'"ssh $ssh_user@$ssh_host"'```'" to log in." fi # The Evergreen spawn host is expected to be provisioned with the user's .evergreen.yml credentials. # But in case something unexpected happens we don't want the setup_spawnhost_coredump script itself # to error. if [[ -n "${slack_user}" ]]; then "$evg_binary_pathname" --config "$evg_credentials_pathname" notify slack -t "@$slack_user" -m "$slack_message" fi