File size: 1,551 Bytes
feb6f45 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
#!/bin/bash
cd ${MODEL_GARDEN_ROOT}/PyTorch/generative_models/stable-diffusion-training/
git config --global --add safe.directory `pwd`/src/taming-transformers
git config --global --add safe.directory `pwd`/src/clip
pip install -r requirements.txt
export PYTHONPATH=`pwd`/src/taming-transformers:$PYTHONPATH
export CKPT_PATH=/software/lfs/data/pytorch/stable-diffusion/model.ckpt
if [ -z ${OMPI_COMM_WORLD_SIZE} ]; then WORLD_SIZE=${WORLD_SIZE:-1}; else WORLD_SIZE=${OMPI_COMM_WORLD_SIZE}; fi
if [ -z ${OMPI_COMM_WORLD_RANK} ]; then NODE_RANK=${NODE_RANK:-0}; else NODE_RANK=${OMPI_COMM_WORLD_RANK}; fi
hostname
ulimit -n $(ulimit -aH|grep "open file" |tr -s ' '|cut -d ' ' -f 4)
echo ===SOFT LIMIT===
ulimit -a
echo ===HARD LIMIT===
ulimit -aH
echo ==========
export NODE_RANK=${NODE_RANK}
BATCH_SIZE=${BATCH_SIZE:-8}
HPU_GRAPH=${HPU_GRAPH:-True}
TRAIN_EPOCHS=${TRAIN_EPOCHS:-10}
if [ ! -z ${TRAIN_BATCHES} ]; then
LIMIT_TRAIN_BATCHES="--limit_train_batches ${TRAIN_BATCHES}"
fi
for v in $(printenv |grep OMPI | cut -d '=' -f 1); do
unset $v
echo unset $v
done
CMD="python main.py \
--base hpu_config_web_dataset.yaml \
--train \
--scale_lr False \
--seed 0 \
--hpus 8 \
--batch_size ${BATCH_SIZE} \
--use_lazy_mode True \
--hmp \
--no-test True \
--max_epochs ${TRAIN_EPOCHS} \
${LIMIT_TRAIN_BATCHES} \
--limit_val_batches 0 \
--hpu_graph ${HPU_GRAPH} \
--ckpt_path=${CKPT_PATH} \
--num_nodes ${WORLD_SIZE}"
echo $CMD
eval $CMD
|