mirror of https://github.com/mongodb/mongo
201 lines
9.0 KiB
Bash
Executable File
201 lines
9.0 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
cd $HOME # workaround EVG-12829
|
|
|
|
unameOut=$(uname -s)
|
|
case "${unameOut}" in
|
|
Linux*) machine=Linux;;
|
|
Darwin*) machine=Mac;;
|
|
CYGWIN*) machine=Cygwin;;
|
|
*) machine="UNKNOWN:${unameOut}"
|
|
esac
|
|
|
|
if [[ "${machine}" = "Cygwin" ]]; then
|
|
out_dir="/cygdrive/c/setup_script_output.txt"
|
|
desktop_dir="/cygdrive/c/Users/Administrator/Desktop"
|
|
|
|
{
|
|
date
|
|
env
|
|
|
|
echo "----------------------"
|
|
echo -e "\n=> Setting _NT_SOURCE_PATH environment variable for debuggers to pick up source files."
|
|
src_dir_hash=$(readlink -f /cygdrive/z/data/mci/source-*)
|
|
full_src_dir="${src_dir_hash}/src"
|
|
echo "Source Path: [${full_src_dir}]"
|
|
set -x;
|
|
setx _NT_SOURCE_PATH "${full_src_dir}"
|
|
{ set +x; } 2>/dev/null
|
|
|
|
echo -e "\n=> Setting _NT_SYMBOL_PATH environment variable for debuggers to pick up the symbols."
|
|
sym_parent_dir=$(readlink -f /cygdrive/z/data/mci/artifacts-*dist_test_debug)
|
|
sym_dir=$(readlink -f ${sym_parent_dir}/debugsymbols-mongodb*zip)
|
|
sym_extracted_dir="${sym_parent_dir}/extracted_symbols"
|
|
full_sym_dir="${sym_extracted_dir}/dist-test/bin"
|
|
echo "Symbols Dir: [${full_sym_dir}]"
|
|
|
|
echo -e "\n=> Extracting Symbol files."
|
|
set -x;
|
|
mkdir ${sym_extracted_dir}
|
|
unzip -n ${sym_dir} -d ${sym_extracted_dir}
|
|
setx _NT_SYMBOL_PATH "${full_sym_dir};srv*;"
|
|
{ set +x; } 2>/dev/null
|
|
|
|
echo -e "\n=> Extracting Core Dump to Desktop."
|
|
full_dump_dir=$(readlink -f /cygdrive/z/data/mci/artifacts-* | grep -v dist_test)
|
|
full_dump_parent_dir=$(readlink -f ${full_dump_dir}/mongo-coredumps*tgz)
|
|
extracted_dump_dir="${full_dump_dir}/extracted_dump"
|
|
set -x;
|
|
mkdir ${extracted_dump_dir}
|
|
tar -xzvf ${full_dump_parent_dir} -C ${extracted_dump_dir}
|
|
cp ${extracted_dump_dir}/* ${desktop_dir}
|
|
{ set +x; } 2>/dev/null
|
|
echo "Copied to Desktop."
|
|
|
|
} &> ${out_dir}
|
|
|
|
cp ${out_dir} ${desktop_dir}
|
|
else
|
|
# Communicate to users that logged in before the script started that nothing is ready.
|
|
wall "The setup_spawnhost_coredump script has just started setting up the debugging environment."
|
|
|
|
# Write this file that gets cat'ed on login to communicate to users logging in if this setup script is still running.
|
|
echo '+-----------------------------------------------------------------------------------+' > ~/.setup_spawnhost_coredump_progress
|
|
echo "| The setup script is still setting up data files for inspection on a [${machine}] host. |" >> ~/.setup_spawnhost_coredump_progress
|
|
echo '+-----------------------------------------------------------------------------------+' >> ~/.setup_spawnhost_coredump_progress
|
|
|
|
cat >> ~/.profile <<EOF
|
|
cat ~/.setup_spawnhost_coredump_progress
|
|
# Coredumps generated by a toolchain built mongodb can be problematic when examined with the system
|
|
# gdb.
|
|
alias gdb='/opt/mongodbtoolchain/v3/bin/gdb'
|
|
# As per below, put the user into the appropriate directory. This is where gdb is expected to be
|
|
# invoked from.
|
|
cd debug
|
|
echo "Debuggable binaries:"
|
|
ls -l mongo* | grep -v debug$
|
|
for item in "mongo" "mongod" "mongos"; do
|
|
echo "\${item} core dumps:"
|
|
ls -l dump_\${item}.*
|
|
done
|
|
|
|
echo "Core dumps from unknown processes (crashed processes typically found here):"
|
|
ls -l dump_* | grep -v mongo
|
|
|
|
echo
|
|
echo "To examine a core dump, type 'gdb ./<binary> ./<core file>'"
|
|
cat ~/.setup_spawnhost_coredump_progress
|
|
EOF
|
|
|
|
echo 'if [ -f ~/.profile ]; then
|
|
. ~/.profile
|
|
fi' >> .bash_profile
|
|
|
|
# Make a directory on the larger EBS volume. Soft-link it under the home directory. The smaller home
|
|
# volume can have trouble particularly with coredumps from sharded timeouts.
|
|
mkdir /data/debug
|
|
ln -s /data/debug .
|
|
cd debug
|
|
|
|
# As the name suggests, pretty printers. Primarily for boost::optional<T>
|
|
git clone git@github.com:ruediger/Boost-Pretty-Printer.git &
|
|
|
|
# Discover and unarchive necessary files and source code. This will put mongo binaries and their
|
|
# partner .debug files in the same `debug/bin` directory. The `bin` directory will later be symbolic
|
|
# linked into the top-level (`debug`) directory. Shared library files and their debug symbols will
|
|
# be dumped into a `debug/lib` directory for tidiness. The mongo `<reporoot>/src/` directory is soft
|
|
# linked as `debug/src`. The .gdbinit file assumes gdb is being run from the `debug` directory.
|
|
BIN_ARCHIVE=`ls /data/mci/artifacts-*archive_dist_test*/mongo-*.tgz`
|
|
tar --wildcards --strip-components=1 -xzf $BIN_ARCHIVE '*/bin/mongod' '*/bin/mongos' '*/bin/mongo' '*/bin/mongobridge' &
|
|
tar --wildcards --strip-components=1 -xzf $BIN_ARCHIVE '*/lib/*' &
|
|
DBG_ARCHIVE=`ls /data/mci/artifacts-*archive_dist_test_debug/debugsymbols-*.tgz`
|
|
tar --wildcards --strip-components=1 -xzf $DBG_ARCHIVE '*/bin/mongod.debug' '*/bin/mongos.debug' '*/bin/mongo.debug' '*/bin/mongobridge.debug' &
|
|
tar --wildcards --strip-components=1 -xzf $DBG_ARCHIVE '*/lib/*' &
|
|
UNITTEST_ARCHIVE=`ls /data/mci/artifacts-*run_unittests/mongo-unittests-*.tgz`
|
|
tar --wildcards --strip-components=0 -xzf $UNITTEST_ARCHIVE 'bin/*' &
|
|
tar --wildcards -xzf $UNITTEST_ARCHIVE 'lib/*' &
|
|
|
|
SRC_DIR=`find /data/mci/ -maxdepth 1 | grep source`
|
|
ln -s $SRC_DIR/.gdbinit .
|
|
ln -s $SRC_DIR/src src
|
|
ln -s $SRC_DIR/buildscripts buildscripts
|
|
|
|
# Install pymongo to get the bson library for pretty-printers.
|
|
/opt/mongodbtoolchain/v3/bin/pip3 install -r $SRC_DIR/etc/pip/dev-requirements.txt &
|
|
|
|
COREDUMP_ARCHIVE=`ls /data/mci/artifacts-*/mongo-coredumps-*.tgz`
|
|
tar -xzf $COREDUMP_ARCHIVE &
|
|
echo "Waiting for background processes to complete."
|
|
wait
|
|
|
|
# Symbolic linking all of the executable files is sufficient for `gdb ./mongod ./dump_mongod.core`
|
|
# to succeed. This inadvertantly also links in the ".debug" files which is unnecessary, but
|
|
# harmless. gdb expects the .debug files to live adjacent to the physical binary.
|
|
ln -s bin/* ./
|
|
|
|
cat >> ~/.gdbinit <<EOF
|
|
set auto-load safe-path /
|
|
set solib-search-path ./lib/
|
|
set pagination off
|
|
set print object on
|
|
set print static-members off
|
|
set print pretty on
|
|
|
|
python
|
|
import sys
|
|
sys.path.insert(0, './Boost-Pretty-Printer')
|
|
import boost
|
|
boost.register_printers()
|
|
end
|
|
EOF
|
|
|
|
echo "dir $HOME/debug" >> ~/.gdbinit
|
|
|
|
# Empty out the progress script that warns users about the set script still running when users log in.
|
|
echo "" > ~/.setup_spawnhost_coredump_progress
|
|
# Alert currently logged in users that this setup script has completed. Logging back in will ensure any
|
|
# paths/environment variables will be set as intended.
|
|
wall "The setup_spawnhost_coredump script has completed, please relogin to ensure the right environment variables are set."
|
|
fi
|
|
|
|
# Send a Slack notification as the very last thing the setup_spawnhost_coredump script does.
|
|
# This way a Server engineer can temporarily forget about the Evergreen host they spawned until the
|
|
# paths and environment variables are configured as intended for when they first connect.
|
|
if [[ "${machine}" = "Cygwin" ]]; then
|
|
# The setup_spawnhost_coredump script runs as the mci-exec user on Windows hosts. However,
|
|
# Server engineers log in as the Administrator user.
|
|
ssh_user="Administrator"
|
|
# The Evergreen binary only expects a Windows path. The rest of Cygwin is flexible about it
|
|
# being a Cygwin path or a Windows path so we do the conversion here.
|
|
evg_credentials_pathname=$(cygpath -w ~Administrator/.evergreen.yml)
|
|
evg_binary_pathname=~Administrator/cli_bin/evergreen
|
|
else
|
|
ssh_user=$(whoami)
|
|
evg_credentials_pathname=~/.evergreen.yml
|
|
evg_binary_pathname=evergreen
|
|
fi
|
|
|
|
slack_user=$(awk '{if ($1 == "user:") print $2}' "$evg_credentials_pathname")
|
|
# Refer to the https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html
|
|
# documentation for more information on the AWS instance metadata endpoints.
|
|
aws_metadata_svc="http://169.254.169.254"
|
|
aws_token=$(curl -s -X PUT "$aws_metadata_svc/latest/api/token" -H 'X-aws-ec2-metadata-token-ttl-seconds: 60')
|
|
ssh_host=$(curl -s -H "X-aws-ec2-metadata-token: $aws_token" "$aws_metadata_svc/latest/meta-data/public-hostname")
|
|
if [[ "${machine}" = "Cygwin" ]]; then
|
|
slack_message=$(printf "The setup_spawnhost_coredump script has finished setting things up. \
|
|
Please use Windows Remote Desktop with\n\
|
|
1. PC name: $ssh_host\n\
|
|
2. User account: $ssh_user\n\
|
|
3. The RDP password configured under the edit dialog at https://spruce.mongodb.com/spawn/host\n\
|
|
to log in.")
|
|
else
|
|
slack_message="The setup_spawnhost_coredump script has finished setting things up. Please run "'```'"ssh $ssh_user@$ssh_host"'```'" to log in."
|
|
fi
|
|
|
|
# The Evergreen spawn host is expected to be provisioned with the user's .evergreen.yml credentials.
|
|
# But in case something unexpected happens we don't want the setup_spawnhost_coredump script itself
|
|
# to error.
|
|
if [[ -n "${slack_user}" ]]; then
|
|
"$evg_binary_pathname" --config "$evg_credentials_pathname" notify slack -t "@$slack_user" -m "$slack_message"
|
|
fi
|