|
DIR="/sml1/datasets/slimpj/hub/datasets--MBZUAI-LLM--SlimPajama-627B-DC/snapshots/fe5ace6d3edb8568b6a4f608a460d3f7aef7bc0b" |
|
DATASET_NAME="RedPajamaArxiv" |
|
TRAIN_DIR="$DIR/train/$DATASET_NAME" |
|
TEST_DIR="$DIR/test/$DATASET_NAME" |
|
OUTPUT_TRAIN_DIR="$DIR/train/$DATASET_NAME-copy" |
|
OUTPUT_TEST_DIR="$DIR/test/$DATASET_NAME-copy" |
|
|
|
mkdir -p $OUTPUT_TEST_DIR |
|
mkdir -p $OUTPUT_TRAIN_DIR |
|
|
|
cd $TRAIN_DIR |
|
ls -lrt . | awk '{print $9,$11}' | while read a b; do cmd="cp $b $OUTPUT_TRAIN_DIR/$a"; eval $cmd;done; |
|
|
|
cd $TEST_DIR |
|
ls -lrt . | awk '{print $9,$11}' | while read a b; do cmd="cp $b $OUTPUT_TEST_DIR/$a"; eval $cmd;done; |
|
cd - |
|
|
|
FINAL_DIR="/sml1/datasets/$DATASET_NAME/" |
|
mkdir -p $FINAL_DIR |
|
mkdir -p $FINAL_DIR/train |
|
mkdir -p $FINAL_DIR/test |
|
|
|
max_files=$(ls -lrt $TRAIN_DIR | wc -l) |
|
mx=$(echo ${#max_files}) |
|
REGEX="[0-9]" |
|
for m in $(seq 1 $mx) |
|
do |
|
cmd="unzstd $OUTPUT_TRAIN_DIR/chunk_$REGEX.jsonl.zst --stdout > $FINAL_DIR/train/p_$m.jsonl" |
|
eval $cmd |
|
REGEX="[1-9]$REGEX" |
|
done |
|
final_cmd="cat $FINAL_DIR/train/p_[1-$mx].jsonl > $FINAL_DIR/train/final.jsonl" |
|
eval $final_cmd |
|
|
|
max_files=$(ls -lrt $TEST_DIR | wc -l) |
|
mx=$(echo ${#max_files}) |
|
REGEX="[0-9]" |
|
for m in $(seq 1 $mx) |
|
do |
|
cmd="unzstd $OUTPUT_TEST_DIR/chunk_$REGEX.jsonl.zst --stdout > $FINAL_DIR/test/p_$m.jsonl" |
|
eval $cmd |
|
REGEX="[1-9]$REGEX" |
|
done |
|
|
|
final_cmd="cat $FINAL_DIR/test/p_[1-$mx].jsonl > $FINAL_DIR/test/final.jsonl" |
|
eval $final_cmd |
|
|
|
cat $FINAL_DIR/*/final.jsonl > $FINAL_DIR/final.jsonl |
|
|
|
mkdir -p $FINAL_DIR/tokenizer/ |
|
python3 /sml1/Megatron-LLaMA/tools/preprocess_data.py \ |
|
--input $FINAL_DIR/final.jsonl \ |
|
--output-prefix $FINAL_DIR/tokenizer/ \ |
|
--vocab-file /sml1/datasets/gpt2/vocab.json \ |
|
--merge-file /sml1/datasets/gpt2/merges.txt \ |
|
--dataset-impl mmap --tokenizer-type GPT2BPETokenizer \ |
|
--append-eod --workers 8 --chunk-size 50 >tokenizer.out 2>tokenizer.err |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|