#!/usr/bin/env bash -l # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 # Note the -l above to get .bashrc and /etc/profile exec &> >(tee -a "/tmp/BOOTSTRAP.log") shopt -q login_shell && echo 'Login shell' || echo 'Not login shell' # Force login shell . /etc/profile if [ -f /tmp/BOOTSTRAP.WHOAMI ]; then echo "bootstrap is already running" exit 0 fi whoami > /tmp/BOOTSTRAP.WHOAMI env >> /tmp/BOOTSTRAP.WHOAMI #export VPC_ID=${1:-default} #export MASTER_SUBNET_ID=${2:-default} #export COMPUTE_SUBNET_ID=${3:-default} #export SSH_KEY_ID=${1:-default} #export PRIVATE_KEY_ARN=${2:-default} # Default to 2.10.1 if no value is provided pcluster_version=${pcluster_version:-2.10.1} notearshpc_version=${notearshpc_version:-0.2.0} sudo pip-3.6 --disable-pip-version-check install aws-parallelcluster==${pcluster_version} export AWS_DEFAULT_REGION=$(curl -s http://169.254.169.254/latest/meta-data/placement/availability-zone | rev | cut -c 2- | rev) # Load the SSH Key generated by CloudFormation aws secretsmanager get-secret-value --secret-id ${private_key_arn} --query SecretString --output text > ~/.ssh/${ssh_key_id} chmod 600 ~/.ssh/${ssh_key_id} # Automatically add ssh key into agent. we need to make the agent stop asking for a password echo 'eval $(ssh-agent) &>/dev/null' >> ~/.bashrc echo "ssh-add ~/.ssh/${ssh_key_id} &>/dev/null" >> ~/.bashrc # pcluster shortcuts cat <<\EOF >> ~/.bashrc alias pl="pcluster list --color" alias p="pcluster" alias pssh="pcluster ssh" EOF # Welcome message cat <<\WELCOME > ~/environment/Welcome.txt _ _ _______ _____ _ _ | \ | | |__ __| / ____| | | | | \| | ___ | | ___ __ _ _ __ ___ | | | |_ _ ___| |_ ___ _ __ | . ` |/ _ \ | |/ _ \/ _` | '__/ __| | | | | | | / __| __/ _ \ '__| | |\ | (_) | | | __/ (_| | | \__ \ | |____| | |_| \__ \ || __/ | |_| \_|\___/ |_|\___|\__,_|_| |___/ \_____|_|\__,_|___/\__\___|_| To get started, run: $ pcluster list --color To ssh into the cluster, once it's gone into CREATE_COMPLETE, run: $ pcluster ssh hpc-cluster To connect to the desktop GUI via DCV run: $ pcluster dcv connect hpc-cluster WELCOME sudo cp ~/environment/Welcome.txt /etc/motd echo 'cat /etc/motd' >> ~/.bash_profile # Fetch the config file from S3 and substitute variables mkdir -p ~/.parallelcluster echo "Pulling Config: ${config}" case "${config}" in s3://*) aws s3 cp ${config} /tmp/config.ini --quiet;; http://*|https://*) wget ${config} -O /tmp/config.ini -o /tmp/BOOTSTRAP.wget;; *) echo "Unknown/Unsupported post_install_script URI" ;; esac envsubst < /tmp/config.ini > ~/environment/config.ini # change default config file cat <<\EOF >> ~/.bashrc # Set default config file to be the in Cloud9 environment export AWS_PCLUSTER_CONFIG_FILE=~/environment/config.ini EOF . ~/.bashrc . /etc/profile which pcluster >> /tmp/BOOTSTRAP.WHOAMI aws configure set default.region ${AWS_DEFAULT_REGION} aws configure set default.output json env >> /tmp/BOOTSTRAP.PCLUSTER pcluster list # Start the pcluster provisioning, but don't wait for it to complete. pcluster create -t hpc hpc-cluster -c ~/environment/config.ini --nowait -nr -g "{\"NoTearsHPC\": \"${notearshpc_version}\"}" if [ $? != 0 ]; then cloud9_environment=${cloud9_environment:-$(uuidgen)} pcluster create hpc-cluster-${cloud9_environment} -c ~/environment/config.ini --nowait -nr -g "{\"NoTearsHPC\": \"${notearshpc_version}\"}" fi echo "Finished" >> /tmp/BOOTSTRAP.WHOAMI echo "Finished"