|
#!/bin/bash |
|
|
|
cd ${MODEL_GARDEN_ROOT}/PyTorch/generative_models/stable-diffusion-training/ |
|
|
|
git config --global --add safe.directory `pwd`/src/taming-transformers |
|
git config --global --add safe.directory `pwd`/src/clip |
|
pip install -r requirements.txt |
|
|
|
export PYTHONPATH=`pwd`/src/taming-transformers:$PYTHONPATH |
|
export CKPT_PATH=/software/lfs/data/pytorch/stable-diffusion/model.ckpt |
|
|
|
if [ -z ${OMPI_COMM_WORLD_SIZE} ]; then WORLD_SIZE=${WORLD_SIZE:-1}; else WORLD_SIZE=${OMPI_COMM_WORLD_SIZE}; fi |
|
if [ -z ${OMPI_COMM_WORLD_RANK} ]; then NODE_RANK=${NODE_RANK:-0}; else NODE_RANK=${OMPI_COMM_WORLD_RANK}; fi |
|
|
|
hostname |
|
|
|
ulimit -n $(ulimit -aH|grep "open file" |tr -s ' '|cut -d ' ' -f 4) |
|
echo ===SOFT LIMIT=== |
|
ulimit -a |
|
echo ===HARD LIMIT=== |
|
ulimit -aH |
|
echo ========== |
|
export NODE_RANK=${NODE_RANK} |
|
BATCH_SIZE=${BATCH_SIZE:-8} |
|
HPU_GRAPH=${HPU_GRAPH:-True} |
|
TRAIN_EPOCHS=${TRAIN_EPOCHS:-10} |
|
|
|
if [ ! -z ${TRAIN_BATCHES} ]; then |
|
LIMIT_TRAIN_BATCHES="--limit_train_batches ${TRAIN_BATCHES}" |
|
fi |
|
|
|
for v in $(printenv |grep OMPI | cut -d '=' -f 1); do |
|
unset $v |
|
echo unset $v |
|
done |
|
|
|
CMD="python main.py \ |
|
--base hpu_config_web_dataset.yaml \ |
|
--train \ |
|
--scale_lr False \ |
|
--seed 0 \ |
|
--hpus 8 \ |
|
--batch_size ${BATCH_SIZE} \ |
|
--use_lazy_mode True \ |
|
--hmp \ |
|
--no-test True \ |
|
--max_epochs ${TRAIN_EPOCHS} \ |
|
${LIMIT_TRAIN_BATCHES} \ |
|
--limit_val_batches 0 \ |
|
--hpu_graph ${HPU_GRAPH} \ |
|
--ckpt_path=${CKPT_PATH} \ |
|
--num_nodes ${WORLD_SIZE}" |
|
|
|
echo $CMD |
|
eval $CMD |
|
|