File size: 1,352 Bytes
6d7d1e3
9b970f9
72b6360
9b970f9
6d7d1e3
aae7167
e2828b6
0fa8793
6d7d1e3
 
 
 
 
555a15f
 
72b6360
555a15f
 
 
 
 
 
 
 
 
 
0fa8793
555a15f
 
72b6360
555a15f
 
 
 
 
 
 
 
 
 
0fa8793
555a15f
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
from datasets.features import Features, Sequence, Value
from evaluate.module import EvaluationModuleInfo
from evaluate.evaluation_suite import SubTask
import evaluate


class Suite(evaluate.EvaluationSuite):

    def _info(self):
        return EvaluationModuleInfo(
            description="dummy metric for tests",
            citation="insert citation here",
            features=Features({"predictions": Value("int64"), "references": Value("int64")}))
    preprocessor = None #lambda x: x["text"].lower()
    suite = [
        SubTask(
            data="imdb",
            split="test",
            data_preprocessor=preprocessor,
            args_for_task={
                "metric": "accuracy",
                "input_column": "text",
                "label_column": "label",
                "label_mapping": {
                    "LABEL_0": 0.0,
                    "LABEL_1": 1.0
                }
            }
        ),
        SubTask(
            data="sst2",
            split="test[:10]",
            data_preprocessor=preprocessor,
            args_for_task={
                "metric": "accuracy",
                "input_column": "sentence",
                "label_column": "label",
                "label_mapping": {
                    "LABEL_0": 0.0,
                    "LABEL_1": 1.0
                }
            }
        )
    ]