File size: 3,153 Bytes
c9ae6ee
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
#!/bin/bash

###############################################################################
# Copyright (C) 2023 Habana Labs, Ltd. an Intel Company
###############################################################################

[[ $0 != $BASH_SOURCE ]] || echo "This script must be sourced!"

export MLPERF_INFERENCE_CODE_DIR=$(realpath $(dirname $BASH_SOURCE) )

function mlperf_inference_usage()
{
    echo -e "\n usage: build_mlperf_inference [options]\n"
    echo -e "options:\n"
    echo -e "  --output-dir                Path to save logs, results and summary; optional"
    echo -e "  --skip-reqs                 Skip installing requirements, downloading MLCommons Inference and building loadgen; optional"
    echo -e "  --compliance                Create a submission package compliant with MLCommons submission checker; optional"
    echo -e "  --submission                List of scenarios to run; optional"
    echo -e "  -h,  --help                 Prints this help"

}

build_mlperf_inference()
{
    output_dir=$(pwd)/results
    submission_args=""
    compliance=false
    skip_reqs=false

    while [ -n "$1" ];
    do
        case $1 in

            -h  | --help )
                mlperf_inference_usage
                return 0
            ;;
            --output-dir )
                output_dir=$2
                shift 2
            ;;
            --compliance )
                compliance=true
                shift 1
            ;;
            --skip-reqs )
                shift
                skip_reqs=true
            ;;
            --submission )
                shift
                submission_args=$@
                break
            ;;
            --precommit )
                shift
                submission_args="gptj-99-quick"
                break
            ;;
            --promotion )
                shift
                submission_args="gptj-99-quick"
                break
            ;;
        esac
    done

    if [ "$skip_reqs" == "false" ]; then
        pip install -r $MLPERF_INFERENCE_CODE_DIR/gpt-j/requirements.txt

        BUILD_DIR=$(mktemp -d -t mlperf.XXXX)
        pushd $BUILD_DIR
        git clone --depth 1 --recurse-submodules https://github.com/mlcommons/inference.git mlcommons_inference
        cd mlcommons_inference/loadgen
        CFLAGS="-std=c++14 -O3" python setup.py bdist_wheel
        cd ..; pip install --force-reinstall loadgen/dist/`ls -r loadgen/dist/ | head -n1` ; cd -
        popd
    fi

    if [ ! -z "$submission_args" ]; then
        pushd $MLPERF_INFERENCE_CODE_DIR
        if [ "$compliance" == "true"  ]; then
            python run_mlperf_scenarios.py $submission_args --output-dir $output_dir --mlperf-path $BUILD_DIR/mlcommons_inference
            python prepare_and_check_submission.py $submission_args --output-dir $output_dir --mlperf-path $BUILD_DIR/mlcommons_inference --systems-dir-path $MLPERF_INFERENCE_CODE_DIR/../systems --measurements-dir-path $MLPERF_INFERENCE_CODE_DIR/../measurements
        else
            python run_mlperf_scenarios.py $submission_args --output-dir $output_dir
        fi
        popd

    fi

    rm -rf $BUILD_DIR
}