File size: 923 Bytes
2215b89
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
TASKS=("cola" "sst2" "mrpc" "qqp" "mnli" "qnli" "rte" "wnli")

# Create a directory for logs in the current working directory
LOG_DIR="./baseline"
mkdir -p "$LOG_DIR"

# Loop through each task
for TASK_NAME in "${TASKS[@]}"; do
    # Set epochs to 5 for MRPC and WNLI, otherwise 3
    if [[ "$TASK_NAME" == "mrpc" || "$TASK_NAME" == "wnli" ]]; then
        NUM_EPOCHS=5
    else
        NUM_EPOCHS=3
    fi

    echo "Running training for task: $TASK_NAME with $NUM_EPOCHS epochs..."

    CUDA_VISIBLE_DEVICES=0 python run_glue.py \
      --model_name_or_path google-bert/bert-base-cased \
      --task_name $TASK_NAME \
      --do_train \
      --do_eval \
      --max_seq_length 128 \
      --per_device_train_batch_size 32 \
      --learning_rate 2e-5 \
      --num_train_epochs $NUM_EPOCHS \
      --output_dir $LOG_DIR/$TASK_NAME/ \
      --overwrite_output_dir

    echo "Finished training for task: $TASK_NAME"
done