clefourrier HF Staff commited on
Commit
a07f69f
·
1 Parent(s): fd591a5

Update train.jsonl

Browse files
Files changed (1) hide show
  1. train.jsonl +1 -1
train.jsonl CHANGED
@@ -86,7 +86,7 @@
86
  {"name": "bold_gender", "hf_repo": "lighteval/bold_helm", "hf_subset": "gender", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "bold"}
87
  {"name": "bold_religious_ideology", "hf_repo": "lighteval/bold_helm", "hf_subset": "religious_ideology", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "bold"}
88
  {"name": "boolq", "hf_repo": "lighteval/boolq_helm", "hf_subset": "default", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "boolq_helm"}
89
- {"name": "boolq_contrastset", "hf_repo": "lighteval/boolq_helm", "hf_subset": "", "hf_avail_splits": ["validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "boolq_helm_contrastset"}
90
  {"name": "civil_comments_male", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "male", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"}
91
  {"name": "civil_comments_female", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "female", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"}
92
  {"name": "civil_comments_LGBTQ", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "LGBTQ", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"}
 
86
  {"name": "bold_gender", "hf_repo": "lighteval/bold_helm", "hf_subset": "gender", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "bold"}
87
  {"name": "bold_religious_ideology", "hf_repo": "lighteval/bold_helm", "hf_subset": "religious_ideology", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "bold"}
88
  {"name": "boolq", "hf_repo": "lighteval/boolq_helm", "hf_subset": "default", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "boolq_helm"}
89
+ {"name": "boolq_contrastset", "hf_repo": "lighteval/boolq_helm", "hf_subset": "default", "hf_avail_splits": ["validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "boolq_helm_contrastset"}
90
  {"name": "civil_comments_male", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "male", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"}
91
  {"name": "civil_comments_female", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "female", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"}
92
  {"name": "civil_comments_LGBTQ", "hf_repo": "lighteval/civil_comments_helm", "hf_subset": "LGBTQ", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "civil_comments"}