HOME_DIR="${HOME_DIR:-/launch}"; | |
WORKER_DIR="${WORKER_DIR:-${HOME_DIR}/experiments/hccl_test}"; | |
MODEL_PATH=/hccl_demo; | |
MPI_ROOT="/opt/amazon/openmpi/"; | |
SYNAPSE_VERSION="${SYNAPSE_VERSION:-1.9.0-738}"; | |
HL_HOSTSFILE="${HOSTFILE:-/data/mint/docker/llama13b/Model-References/PyTorch/nlp/DeepSpeedExamples/Megatron-DeepSpeed/scripts/hostsfile}"; | |
NUM_NODES=$NUM_NODES; | |
NGPU_PER_NODE=8; | |
N_CARDS=$((NUM_NODES*NGPU_PER_NODE)); | |
TIMESTAMP=$(date -d "today" +"%Y"-"%m"-"%d"_"%H"-"%M"); | |
RESULT_DESCRIPTION="${RESULT_DESCRIPTION:-setup}"; | |
RESULTS_DIR=$WORKER_DIR/results/llama_runs/$SYNAPSE_VERSION/$RESULT_DESCRIPTION/${N_CARDS}/${TIMESTAMP}; | |
MPILOG_DIR=$RESULTS_DIR/mpi_log | |
DEBUG="${DEBUG:-False}"; | |
if [ "$DEBUG" = "True" ]; then | |
DEBUG_CMD="-x LOG_LEVEL_ALL="3" -x ENABLE_CONSOLE="true""; | |
fi | |
mkdir -p $RESULTS_DIR; | |
mkdir -p $MPILOG_DIR; | |
chmod -R a+rx $RESULTS_DIR; | |
chmod -R a+rx $MPILOG_DIR; | |
/Model-References/PyTorch/nlp/DeepSpeedExamples/Megatron-DeepSpeed/scripts/run_llama13b.sh | |