Unnamed: 0
int64
0
245k
repo_id
stringlengths
4
122
author
stringlengths
2
42
model_type
stringlengths
2
34
files_per_repo
int64
0
77k
downloads_30d
int64
0
55.9M
library
stringlengths
2
37
likes
int64
0
8.48k
pipeline
stringlengths
5
30
pytorch
bool
2 classes
tensorflow
bool
2 classes
jax
bool
2 classes
license
stringlengths
2
33
languages
stringlengths
2
1.63k
datasets
stringlengths
2
5.05k
co2
stringlengths
3
342
prs_count
int64
0
168
prs_open
int64
0
121
prs_merged
int64
0
167
prs_closed
int64
0
35
discussions_count
int64
0
226
discussions_open
int64
0
155
discussions_closed
int64
0
76
tags
stringlengths
2
7.26k
has_model_index
bool
2 classes
has_metadata
bool
2 classes
has_text
bool
2 classes
text_length
int64
0
849k
23,600
mrm8488/vit-base-patch16-224_finetuned-kvasirv2-colonoscopy
mrm8488
vit
7
12
transformers
4
image-classification
true
false
false
null
null
null
null
1
0
1
0
0
0
0
['pytorch', 'tensorboard', 'safetensors', 'vit', 'image-classification', 'transformers', 'medical', 'colon', 'autotrain_compatible']
false
true
true
2,168
23,601
mrm8488/vit-base-patch16-224_finetuned-pneumothorax
mrm8488
vit
5
14
transformers
1
image-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'vit', 'image-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
23,602
mrm8488/wav2vec2-large-xls-r-300m-spanish
mrm8488
null
5
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,603
mrm8488/wav2vec2-large-xlsr-53-breton
mrm8488
wav2vec2
11
7
transformers
0
automatic-speech-recognition
true
false
true
apache-2.0
['br']
['common_voice']
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'wav2vec2', 'automatic-speech-recognition', 'br', 'dataset:common_voice', 'transformers', 'audio', 'speech', 'xlsr-fine-tuning-week', 'license:apache-2.0', 'model-index']
true
true
true
3,321
23,604
mrm8488/wav2vec2-large-xlsr-53-esperanto
mrm8488
wav2vec2
10
7
transformers
1
automatic-speech-recognition
true
false
true
apache-2.0
['eo']
['common_voice']
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'wav2vec2', 'automatic-speech-recognition', 'eo', 'dataset:common_voice', 'transformers', 'audio', 'speech', 'xlsr-fine-tuning-week', 'license:apache-2.0', 'model-index']
true
true
true
3,375
23,605
mrm8488/wav2vec2-large-xlsr-53-euskera
mrm8488
wav2vec2
11
7
transformers
0
automatic-speech-recognition
true
false
true
apache-2.0
['eu']
['common_voice']
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'wav2vec2', 'automatic-speech-recognition', 'eu', 'dataset:common_voice', 'transformers', 'audio', 'speech', 'xlsr-fine-tuning-week', 'license:apache-2.0', 'model-index']
true
true
true
3,328
23,606
mrm8488/wav2vec2-large-xlsr-53-spanish
mrm8488
wav2vec2
12
21
transformers
2
automatic-speech-recognition
true
false
true
apache-2.0
['es']
['common_voice']
null
1
1
0
0
0
0
0
['pytorch', 'jax', 'wav2vec2', 'automatic-speech-recognition', 'es', 'dataset:common_voice', 'transformers', 'audio', 'speech', 'xlsr-fine-tuning-week', 'license:apache-2.0', 'model-index']
true
true
true
3,344
23,607
mrm8488/wav2vec2-large-xlsr-53-ukrainian
mrm8488
wav2vec2
11
74
transformers
1
automatic-speech-recognition
true
false
true
apache-2.0
['uk']
['common_voice']
null
2
2
0
0
0
0
0
['pytorch', 'jax', 'wav2vec2', 'automatic-speech-recognition', 'uk', 'dataset:common_voice', 'transformers', 'audio', 'speech', 'xlsr-fine-tuning-week', 'license:apache-2.0', 'model-index']
true
true
true
3,330
23,608
mrm8488/wav2vec2-large-xlsr-53-ukranian
mrm8488
null
2
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
['xlsr-fine-tuning-week']
false
true
true
11
23,609
mrm8488/wav2vec2-xls-r-300m-es
mrm8488
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,610
mrm8488/wav2vec2-xls-r-300m-peninsular-2
mrm8488
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,611
mrm8488/wav2vec2-xls-r-300m-peninsular
mrm8488
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,612
mrm8488/xlm-multi-finetuned-xquadv1
mrm8488
xlm
11
15
transformers
0
question-answering
true
false
false
null
['multilingual']
null
null
1
1
0
0
0
0
0
['pytorch', 'xlm', 'question-answering', 'multilingual', 'arxiv:1901.07291', 'arxiv:1910.11856', 'transformers', 'autotrain_compatible']
false
true
true
4,971
23,613
mrm8488/xlm-roberta-large-finetuned-tydiqa-multilingual-qa
mrm8488
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,614
mrojas/bio-bert-base-spanish-wwm-cased
mrojas
bert
7
5
transformers
0
fill-mask
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
23,615
mromero/prueba
mromero
null
3
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,616
mrp/bert-finetuned-squad
mrp
bert
14
10
transformers
0
question-answering
true
false
false
apache-2.0
null
['squad']
null
4
2
2
0
0
0
0
['pytorch', 'tensorboard', 'bert', 'question-answering', 'dataset:squad', 'transformers', 'generated_from_trainer', 'license:apache-2.0', 'model-index', 'autotrain_compatible']
true
true
true
955
23,617
mrp/distilbert-base-uncased-finetuned-imdb-accelerate
mrp
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,618
mrp/distilbert-base-uncased-finetuned-imdb
mrp
distilbert
10
6
transformers
0
fill-mask
true
false
false
apache-2.0
null
['imdb']
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'distilbert', 'fill-mask', 'dataset:imdb', 'transformers', 'generated_from_trainer', 'license:apache-2.0', 'autotrain_compatible']
true
true
true
1,319
23,619
mrp/marian-finetuned-kde4-en-to-fr
mrp
marian
14
10
transformers
0
translation
true
false
false
apache-2.0
null
['kde4']
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'marian', 'text2text-generation', 'dataset:kde4', 'transformers', 'translation', 'generated_from_trainer', 'license:apache-2.0', 'model-index', 'autotrain_compatible']
true
true
true
1,076
23,620
mrp/simcse-model-distil-m-bert
mrp
distilbert
12
15
sentence-transformers
0
sentence-similarity
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'arxiv:2104.08821', 'sentence-transformers', 'feature-extraction', 'sentence-similarity', 'transformers']
false
true
true
1,013
23,621
mrp/simcse-model-m-bert-thai-cased
mrp
bert
12
452
sentence-transformers
3
sentence-similarity
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'bert', 'arxiv:2104.08821', 'sentence-transformers', 'feature-extraction', 'sentence-similarity', 'transformers']
false
true
true
1,009
23,622
mrp/simcse-model-roberta-base-thai
mrp
xlm-roberta
12
37
sentence-transformers
1
sentence-similarity
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'xlm-roberta', 'arxiv:2104.08821', 'sentence-transformers', 'feature-extraction', 'sentence-similarity', 'transformers']
false
true
true
1,009
23,623
mrphilip/NLP
mrphilip
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,624
mrshu/wav2vec2-large-xlsr-slovene
mrshu
wav2vec2
13
7
transformers
2
automatic-speech-recognition
true
false
true
apache-2.0
['sl']
['common_voice']
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'wav2vec2', 'automatic-speech-recognition', 'sl', 'dataset:common_voice', 'transformers', 'audio', 'speech', 'xlsr-fine-tuning-week', 'license:apache-2.0', 'model-index']
true
true
true
3,353
23,625
mrsinghania/asr-question-detection
mrsinghania
bert
8
4,719
transformers
4
text-classification
true
false
false
null
null
null
null
1
1
0
0
1
1
0
['pytorch', 'bert', 'text-classification', 'transformers']
false
false
true
427
23,626
ms0697310/farmtest
ms0697310
bert
6
4
transformers
0
token-classification
false
false
false
null
null
null
null
0
0
0
0
0
0
0
['bert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
23,627
ms29315/distilbert-base-uncased-finetuned-cola
ms29315
distilbert
12
4
transformers
0
text-classification
false
true
false
apache-2.0
null
null
null
0
0
0
0
0
0
0
['tf', 'tensorboard', 'distilbert', 'text-classification', 'transformers', 'generated_from_keras_callback', 'license:apache-2.0']
true
true
true
1,336
23,628
msakthiganesh/TabQGen-Base
msakthiganesh
t5
9
13
transformers
0
text2text-generation
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 't5', 'text2text-generation', 'transformers', 'autotrain_compatible']
false
false
true
181
23,629
msakthiganesh/TabQGen-Large
msakthiganesh
t5
9
9
transformers
0
text2text-generation
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 't5', 'text2text-generation', 'transformers', 'autotrain_compatible']
false
false
true
181
23,630
msakthiganesh/TabQGen-Small
msakthiganesh
t5
9
9
transformers
0
text2text-generation
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 't5', 'text2text-generation', 'transformers', 'autotrain_compatible']
false
false
true
180
23,631
msarmi9/multi30k
msarmi9
null
42
0
null
2
translation
true
false
false
mit
['de', 'en']
['multi30k']
null
0
0
0
0
0
0
0
['tensorboard', 'de', 'en', 'dataset:multi30k', 'arxiv:1409.0473', 'translation', 'pytorch', 'license:mit', 'model-index', 'has_space']
true
true
true
414
23,632
msavel-prnt/distilbert-base-uncased-finetuned-clinc
msavel-prnt
distilbert
12
8
transformers
0
text-classification
true
false
false
apache-2.0
null
['clinc_oos']
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'distilbert', 'text-classification', 'dataset:clinc_oos', 'transformers', 'generated_from_trainer', 'license:apache-2.0']
false
true
true
1,479
23,633
msavel-prnt/distilbert-base-uncased-finetuned-emotion
msavel-prnt
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,634
msavel-prnt/xlm-roberta-base-finetuned-panx-de
msavel-prnt
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,635
mschulzer/NLP_v1
mschulzer
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,636
mschwab/va_bert_classification
mschwab
bert
9
28
transformers
0
text-classification
true
false
false
apache-2.0
['en']
['custom']
null
0
0
0
0
0
0
0
['pytorch', 'bert', 'text-classification', 'en', 'dataset:custom', 'transformers', 'sentence classification', 'vossian antonomasia', 'license:apache-2.0']
false
true
true
1,283
23,637
mse30/bart-base-finetuned-arxiv
mse30
bart
11
12
transformers
1
text2text-generation
true
false
false
apache-2.0
null
['scientific_papers']
null
0
0
0
0
0
0
0
['pytorch', 'bart', 'text2text-generation', 'dataset:scientific_papers', 'transformers', 'generated_from_trainer', 'license:apache-2.0', 'model-index', 'autotrain_compatible']
true
true
true
1,762
23,638
mse30/bart-base-finetuned-cnn
mse30
bart
10
8
transformers
0
text2text-generation
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'bart', 'text2text-generation', 'transformers', 'autotrain_compatible']
false
false
false
0
23,639
mse30/bart-base-finetuned-gigaword
mse30
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,640
mse30/bart-base-finetuned-multinews
mse30
bart
10
30
transformers
0
text2text-generation
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'bart', 'text2text-generation', 'transformers', 'autotrain_compatible']
false
false
false
0
23,641
mse30/bart-base-finetuned-pubmed
mse30
bart
11
72
transformers
2
text2text-generation
true
false
false
apache-2.0
null
['scientific_papers']
null
1
1
0
0
0
0
0
['pytorch', 'bart', 'text2text-generation', 'dataset:scientific_papers', 'transformers', 'generated_from_trainer', 'license:apache-2.0', 'model-index', 'autotrain_compatible']
true
true
true
1,749
23,642
mse30/bart-base-finetuned-xsum
mse30
bart
10
9
transformers
0
text2text-generation
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'bart', 'text2text-generation', 'transformers', 'autotrain_compatible']
false
false
false
0
23,643
mse30/bart-large-finetuned-cnn
mse30
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,644
mse30/bart-large-finetuned-gigaword
mse30
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,645
mse30/bart_cnn
mse30
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,646
mse30/pegasus-large-finetuned-xsum
mse30
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,647
msharma95/joke-generator
msharma95
gpt2
7
22
transformers
0
text-generation
false
false
false
null
null
null
null
0
0
0
0
0
0
0
['gpt2', 'text-generation', 'transformers']
false
false
false
0
23,648
msimon16/model_name
msimon16
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,649
msintaha/bert-base-uncased-copa-kb-17
msintaha
bert
12
3
transformers
0
multiple-choice
true
false
false
apache-2.0
null
['super_glue']
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'bert', 'multiple-choice', 'dataset:super_glue', 'transformers', 'generated_from_trainer', 'license:apache-2.0']
true
true
true
1,274
23,650
msintaha/bert-base-uncased-copa-kb-27
msintaha
bert
12
3
transformers
0
multiple-choice
true
false
false
apache-2.0
null
['super_glue']
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'bert', 'multiple-choice', 'dataset:super_glue', 'transformers', 'generated_from_trainer', 'license:apache-2.0']
true
true
true
1,274
23,651
msintaha/bert-base-uncased-finetuned-copa-data-new
msintaha
bert
12
3
transformers
0
multiple-choice
true
false
false
apache-2.0
null
['super_glue']
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'bert', 'multiple-choice', 'dataset:super_glue', 'transformers', 'generated_from_trainer', 'license:apache-2.0']
true
true
true
1,287
23,652
msintaha/gpt2-finetuned-rocstories
msintaha
gpt2
8
16
transformers
0
text-generation
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers']
false
false
false
0
23,653
msintaha/gpt2-rocstories
msintaha
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,654
msivanes/code-search-net-tokenizer
msivanes
null
6
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,655
mso/mso
mso
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,656
mtglearn/roberta-mtg-cards
mtglearn
null
2
0
null
0
null
false
false
false
apache-2.0
null
null
null
0
0
0
0
0
0
0
['license:apache-2.0']
false
true
false
0
23,657
mtr0930/i-manual_integrated_tokenizer
mtr0930
electra
8
8
transformers
0
question-answering
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'electra', 'question-answering', 'transformers', 'autotrain_compatible']
false
false
false
0
23,658
mtr0930/i-manual_tokenizer_updated
mtr0930
electra
8
8
transformers
0
question-answering
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'electra', 'question-answering', 'transformers', 'autotrain_compatible']
false
false
false
0
23,659
mtr0930/i-manual_trained_tokenizer
mtr0930
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,660
mtr0930/koelectra-base-v3_epoch-10
mtr0930
electra
8
9
transformers
0
question-answering
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'electra', 'question-answering', 'transformers', 'autotrain_compatible']
false
false
true
27
23,661
mtr0930/koelectra-base-v3_epoch-100
mtr0930
electra
7
16
transformers
0
question-answering
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'electra', 'question-answering', 'transformers', 'autotrain_compatible']
false
false
false
0
23,662
mudes/en-base
mudes
bert
13
10
transformers
1
token-classification
true
false
true
apache-2.0
['en']
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'token-classification', 'en', 'arxiv:2102.09665', 'arxiv:2104.04630', 'transformers', 'mudes', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,549
23,663
mudes/en-large
mudes
roberta
14
64
transformers
0
token-classification
true
false
true
apache-2.0
['en']
null
null
1
1
0
0
0
0
0
['pytorch', 'jax', 'roberta', 'token-classification', 'en', 'arxiv:2102.09665', 'arxiv:2104.04630', 'transformers', 'mudes', 'license:apache-2.0', 'autotrain_compatible', 'has_space']
false
true
true
1,550
23,664
mudes/multilingual-base
mudes
xlm-roberta
12
12
transformers
0
token-classification
true
false
false
apache-2.0
['multilingual']
null
null
0
0
0
0
0
0
0
['pytorch', 'xlm-roberta', 'token-classification', 'multilingual', 'arxiv:2102.09665', 'arxiv:2104.04630', 'transformers', 'mudes', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,559
23,665
mudes/multilingual-large
mudes
xlm-roberta
12
13
transformers
1
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'xlm-roberta', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
true
1,369
23,666
muellerzr/dummy
muellerzr
null
2
0
null
0
null
false
false
false
apache-2.0
null
null
null
0
0
0
0
0
0
0
['license:apache-2.0']
false
true
false
0
23,667
muellerzr/dummyv2
muellerzr
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,668
muellerzr/fastai-pets-resnet-34
muellerzr
null
5
0
null
1
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
['has_space']
false
false
true
1,177
23,669
muelletm/mpnet-base-snli-mnli
muelletm
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,670
muhardianab/DialoGPT-small-theoffice
muhardianab
gpt2
21
12
transformers
0
conversational
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
34
23,671
muhtasham/TajBERTo
muhtasham
roberta
23
16
transformers
3
fill-mask
true
false
false
null
['tg']
null
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'roberta', 'fill-mask', 'tg', 'transformers', 'generated_from_trainer', 'autotrain_compatible', 'has_space']
false
true
true
1,189
23,672
muhtasham/autonlp-Doctor_DE-24595544
muhtasham
distilbert
9
10
transformers
0
text-classification
true
false
false
null
['de']
['muhtasham/autonlp-data-Doctor_DE']
92.87363201770962
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'text-classification', 'de', 'dataset:muhtasham/autonlp-data-Doctor_DE', 'transformers', 'autonlp', 'co2_eq_emissions']
false
true
true
1,012
23,673
muhtasham/autonlp-Doctor_DE-24595545
muhtasham
bert
9
10
transformers
0
text-classification
true
false
false
null
['de']
['muhtasham/autonlp-data-Doctor_DE']
203.30658367993382
0
0
0
0
0
0
0
['pytorch', 'bert', 'text-classification', 'de', 'dataset:muhtasham/autonlp-data-Doctor_DE', 'transformers', 'autonlp', 'co2_eq_emissions']
false
true
true
1,015
23,674
muhtasham/autonlp-Doctor_DE-24595546
muhtasham
bert
9
10
transformers
0
text-classification
true
false
false
null
['de']
['muhtasham/autonlp-data-Doctor_DE']
210.5957437893554
0
0
0
0
0
0
0
['pytorch', 'bert', 'text-classification', 'de', 'dataset:muhtasham/autonlp-data-Doctor_DE', 'transformers', 'autonlp', 'co2_eq_emissions']
false
true
true
1,012
23,675
muhtasham/autonlp-Doctor_DE-24595547
muhtasham
electra
9
10
transformers
0
text-classification
true
false
false
null
['de']
['muhtasham/autonlp-data-Doctor_DE']
396.5529429198159
1
1
0
0
0
0
0
['pytorch', 'electra', 'text-classification', 'de', 'dataset:muhtasham/autonlp-data-Doctor_DE', 'transformers', 'autonlp', 'co2_eq_emissions']
false
true
true
999
23,676
muhtasham/autonlp-Doctor_DE-24595548
muhtasham
roberta
10
10
transformers
0
text-classification
true
false
false
null
['de']
['muhtasham/autonlp-data-Doctor_DE']
183.88911013564527
0
0
0
0
0
0
0
['pytorch', 'roberta', 'text-classification', 'de', 'dataset:muhtasham/autonlp-data-Doctor_DE', 'transformers', 'autonlp', 'co2_eq_emissions']
false
true
true
1,011
23,677
muhtasham/distilbert-base-uncased-finetuned-cuad
muhtasham
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,678
muhtasham/wav2vec2-large-xls-r-1b-turkish
muhtasham
null
5
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,679
muhtasham/wav2vec2-large-xls-r-2b-turkish
muhtasham
null
5
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,680
muirkat/tolkien-mythopoeic-gen
muirkat
gpt2
15
10
transformers
0
text-generation
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'tensorboard', 'gpt2', 'text-generation', 'transformers', 'generated_from_trainer', 'license:mit']
false
false
false
0
23,681
mujeensung/albert-base-v2_mnli_bc
mujeensung
albert
12
8
transformers
0
text-classification
true
false
false
apache-2.0
['en']
['glue']
null
0
0
0
0
0
0
0
['pytorch', 'albert', 'text-classification', 'en', 'dataset:glue', 'transformers', 'generated_from_trainer', 'license:apache-2.0', 'model-index']
true
true
true
1,368
23,682
mujeensung/bert-base-cased_mnli_bc
mujeensung
bert
12
32
transformers
0
text-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'bert', 'text-classification', 'transformers']
false
false
false
0
23,683
mujeensung/roberta-base_mnli_bc
mujeensung
roberta
14
95
transformers
0
text-classification
true
false
false
mit
['en']
['glue']
null
0
0
0
0
1
1
0
['pytorch', 'roberta', 'text-classification', 'en', 'dataset:glue', 'transformers', 'generated_from_trainer', 'license:mit', 'model-index']
true
true
true
1,362
23,684
mujerry/bert-base-uncased-finetuned-QnA-v1
mujerry
bert
9
8
transformers
0
fill-mask
true
false
false
apache-2.0
null
null
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'bert', 'fill-mask', 'transformers', 'generated_from_trainer', 'license:apache-2.0', 'autotrain_compatible']
true
true
true
2,127
23,685
mujerry/bert-base-uncased-finetuned-QnA
mujerry
bert
11
8
transformers
0
fill-mask
true
false
false
apache-2.0
null
[]
null
1
1
0
0
0
0
0
['pytorch', 'tensorboard', 'bert', 'fill-mask', 'transformers', 'generated_from_trainer', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,613
23,686
mujiatong/first-model
mujiatong
null
2
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,687
mukherjeearnab/opsolBERT
mukherjeearnab
roberta
8
10
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'roberta', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
true
6
23,688
mukund/privbert
mukund
roberta
9
592
transformers
0
fill-mask
true
true
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'roberta', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
true
831
23,689
munezah/DialoGPT-small-aot
munezah
gpt2
91
12
transformers
0
conversational
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
22
23,690
munezah/DialoGPT-small-sherlock
munezah
gpt2
31
12
transformers
0
conversational
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
26
23,691
acul3/bert-large-mc4
acul3
bert
11
3
transformers
0
fill-mask
false
false
true
null
null
null
null
0
0
0
0
0
0
0
['jax', 'tensorboard', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
23,692
acul3/dalle-mini-indo-base
acul3
t5
9
5
transformers
0
text2text-generation
false
false
true
null
null
null
null
0
0
0
0
0
0
0
['jax', 't5', 'text2text-generation', 'transformers', 'autotrain_compatible']
false
false
false
0
23,693
acul3/dalle-mini-indo
acul3
t5
9
5
transformers
0
text2text-generation
false
false
true
null
null
null
null
0
0
0
0
0
0
0
['jax', 't5', 'text2text-generation', 'transformers', 'autotrain_compatible']
false
false
false
0
23,694
acul3/image-captioning-marian
acul3
clip-vision-marian
5
7
transformers
0
text2text-generation
false
false
true
null
null
null
null
0
0
0
0
0
0
0
['jax', 'clip-vision-marian', 'text2text-generation', 'transformers', 'autotrain_compatible']
false
false
false
0
23,695
acul3/image-captioning
acul3
vit-gpt2
3
11
transformers
0
text2text-generation
false
false
true
null
null
null
null
0
0
0
0
0
0
0
['jax', 'vit-gpt2', 'text2text-generation', 'transformers', 'autotrain_compatible']
false
false
false
0
23,696
acul3/mt5-large-id-qgen-qa
acul3
t5
7
11
transformers
0
text2text-generation
true
false
false
mit
['id']
['Squad', 'XQuad', 'Tydiqa']
null
0
0
0
0
0
0
0
['pytorch', 't5', 'text2text-generation', 'id', 'dataset:Squad', 'dataset:XQuad', 'dataset:Tydiqa', 'transformers', 'license:mit', 'autotrain_compatible']
false
true
true
383
23,697
acul3/mt5-translate-en-id
acul3
t5
7
60
transformers
1
translation
true
false
false
mit
['id']
['OPUS', 'CC-aligned']
null
1
1
0
0
0
0
0
['pytorch', 't5', 'text2text-generation', 'id', 'dataset:OPUS', 'dataset:CC-aligned', 'transformers', 'translation', 'license:mit', 'autotrain_compatible']
false
true
true
227
23,698
acul3/roberta-large-mc4-id
acul3
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
23,699
acul3/xlsr_indonesia
acul3
wav2vec2
12
8
transformers
0
automatic-speech-recognition
true
false
false
apache-2.0
['id']
['common_voice']
null
0
0
0
0
0
0
0
['pytorch', 'wav2vec2', 'automatic-speech-recognition', 'id', 'dataset:common_voice', 'transformers', 'speech', 'audio', 'xlsr-fine-tuning-week', 'license:apache-2.0']
false
true
true
1,693