Unnamed: 0
int64
0
245k
repo_id
stringlengths
4
122
author
stringlengths
2
42
model_type
stringlengths
2
34
files_per_repo
int64
0
77k
downloads_30d
int64
0
55.9M
library
stringlengths
2
37
likes
int64
0
8.48k
pipeline
stringlengths
5
30
pytorch
bool
2 classes
tensorflow
bool
2 classes
jax
bool
2 classes
license
stringlengths
2
33
languages
stringlengths
2
1.63k
datasets
stringlengths
2
5.05k
co2
stringlengths
3
342
prs_count
int64
0
168
prs_open
int64
0
121
prs_merged
int64
0
167
prs_closed
int64
0
35
discussions_count
int64
0
226
discussions_open
int64
0
155
discussions_closed
int64
0
76
tags
stringlengths
2
7.26k
has_model_index
bool
2 classes
has_metadata
bool
2 classes
has_text
bool
2 classes
text_length
int64
0
849k
16,700
hfl/chinese-electra-base-generator
hfl
electra
10
17
transformers
0
fill-mask
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'fill-mask']
false
true
true
1,963
16,701
hfl/chinese-electra-large-discriminator
hfl
electra
10
40
transformers
1
null
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0']
false
true
true
1,963
16,702
hfl/chinese-electra-large-generator
hfl
electra
10
8
transformers
0
fill-mask
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'fill-mask']
false
true
true
1,963
16,703
hfl/chinese-electra-small-discriminator
hfl
electra
10
102
transformers
1
null
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0']
false
true
true
1,963
16,704
hfl/chinese-electra-small-ex-discriminator
hfl
null
10
12
transformers
2
null
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0']
false
true
true
1,963
16,705
hfl/chinese-electra-small-ex-generator
hfl
null
10
8
transformers
0
fill-mask
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'fill-mask']
false
true
true
1,963
16,706
hfl/chinese-electra-small-generator
hfl
electra
10
18
transformers
0
fill-mask
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'fill-mask']
false
true
true
1,963
16,707
hfl/chinese-legal-electra-base-discriminator
hfl
electra
9
73
transformers
1
null
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0']
false
true
true
1,881
16,708
hfl/chinese-legal-electra-base-generator
hfl
electra
10
51
transformers
6
fill-mask
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'fill-mask', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,881
16,709
hfl/chinese-legal-electra-large-discriminator
hfl
electra
9
23
transformers
3
null
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0']
false
true
true
1,881
16,710
hfl/chinese-legal-electra-large-generator
hfl
electra
10
17
transformers
7
fill-mask
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'fill-mask', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,881
16,711
hfl/chinese-legal-electra-small-discriminator
hfl
electra
9
51
transformers
1
null
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'pretraining', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0']
false
true
true
1,881
16,712
hfl/chinese-legal-electra-small-generator
hfl
electra
10
14
transformers
4
fill-mask
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'electra', 'fill-mask', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,881
16,713
hfl/chinese-macbert-base
hfl
bert
11
30,360
transformers
81
fill-mask
true
true
true
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible', 'has_space']
false
true
true
3,779
16,714
hfl/chinese-macbert-large
hfl
bert
11
2,808
transformers
18
fill-mask
true
true
true
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
3,779
16,715
hfl/chinese-pert-base
hfl
bert
10
3,939
transformers
9
feature-extraction
true
true
false
cc-by-nc-sa-4.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'bert', 'feature-extraction', 'zh', 'transformers', 'license:cc-by-nc-sa-4.0', 'has_space']
false
true
true
168
16,716
hfl/chinese-pert-large
hfl
bert
10
106
transformers
7
feature-extraction
true
true
false
cc-by-nc-sa-4.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'bert', 'feature-extraction', 'zh', 'transformers', 'license:cc-by-nc-sa-4.0']
false
true
true
168
16,717
hfl/chinese-roberta-wwm-ext-large
hfl
bert
11
12,053
transformers
43
fill-mask
true
true
true
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'zh', 'arxiv:1906.08101', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible', 'has_space']
false
true
true
2,007
16,718
hfl/chinese-roberta-wwm-ext
hfl
bert
11
57,073
transformers
152
fill-mask
true
true
true
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'zh', 'arxiv:1906.08101', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible', 'has_space']
false
true
true
2,007
16,719
hfl/chinese-xlnet-base
hfl
xlnet
10
1,624
transformers
17
text-generation
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'xlnet', 'text-generation', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'has_space']
false
true
true
1,503
16,720
hfl/chinese-xlnet-mid
hfl
xlnet
10
298
transformers
6
text-generation
true
true
false
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'xlnet', 'text-generation', 'zh', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0']
false
true
true
1,503
16,721
hfl/cino-base-v2
hfl
xlm-roberta
8
191
transformers
5
fill-mask
true
true
false
apache-2.0
['zh', 'bo', 'kk', 'ko', 'mn', 'ug', 'yue']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'xlm-roberta', 'fill-mask', 'zh', 'bo', 'kk', 'ko', 'mn', 'ug', 'yue', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,388
16,722
hfl/cino-large-v2
hfl
xlm-roberta
8
145
transformers
8
fill-mask
true
true
false
apache-2.0
['zh', 'bo', 'kk', 'ko', 'mn', 'ug', 'yue']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'xlm-roberta', 'fill-mask', 'zh', 'bo', 'kk', 'ko', 'mn', 'ug', 'yue', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,388
16,723
hfl/cino-large
hfl
xlm-roberta
8
69
transformers
6
fill-mask
true
true
false
apache-2.0
['zh', 'bo', 'kk', 'ko', 'mn', 'ug', 'yue']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'xlm-roberta', 'fill-mask', 'zh', 'bo', 'kk', 'ko', 'mn', 'ug', 'yue', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,388
16,724
hfl/cino-small-v2
hfl
xlm-roberta
8
825
transformers
6
fill-mask
true
true
false
apache-2.0
['zh', 'bo', 'kk', 'ko', 'mn', 'ug', 'yue']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'xlm-roberta', 'fill-mask', 'zh', 'bo', 'kk', 'ko', 'mn', 'ug', 'yue', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,388
16,725
hfl/english-pert-base
hfl
bert
8
37
transformers
4
feature-extraction
true
true
false
cc-by-nc-sa-4.0
['en']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'bert', 'feature-extraction', 'en', 'transformers', 'license:cc-by-nc-sa-4.0']
false
true
true
219
16,726
hfl/english-pert-large
hfl
bert
8
9
transformers
3
feature-extraction
true
true
false
cc-by-nc-sa-4.0
['en']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'bert', 'feature-extraction', 'en', 'transformers', 'license:cc-by-nc-sa-4.0']
false
true
true
219
16,727
hfl/rbt3
hfl
bert
11
3,107
transformers
9
fill-mask
true
true
true
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'zh', 'arxiv:1906.08101', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
2,004
16,728
hfl/rbt4
hfl
bert
11
15
transformers
4
fill-mask
true
true
true
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'zh', 'arxiv:1906.08101', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
2,003
16,729
hfl/rbt6
hfl
bert
11
1,214
transformers
5
fill-mask
true
true
true
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'zh', 'arxiv:1906.08101', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
2,003
16,730
hfl/rbtl3
hfl
bert
11
2,409
transformers
3
fill-mask
true
true
true
apache-2.0
['zh']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'zh', 'arxiv:1906.08101', 'arxiv:2004.13922', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
2,009
16,731
hgarg/fruits
hgarg
vit
11
24
transformers
2
image-classification
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'tensorboard', 'vit', 'image-classification', 'transformers', 'huggingpics', 'model-index', 'autotrain_compatible']
false
false
false
0
16,732
hgarg/indian-snacks
hgarg
vit
11
15
transformers
0
image-classification
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'tensorboard', 'vit', 'image-classification', 'transformers', 'huggingpics', 'model-index', 'autotrain_compatible']
false
false
false
0
16,733
hgharibi/wav2vec2-xls-r-300m-fa-colab
hgharibi
wav2vec2
15
8
transformers
0
automatic-speech-recognition
true
false
false
apache-2.0
null
['common_voice']
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'wav2vec2', 'automatic-speech-recognition', 'dataset:common_voice', 'transformers', 'generated_from_trainer', 'license:apache-2.0']
true
true
true
1,539
16,734
hgharibi/wav2vec2-xls-r-300m-fa
hgharibi
wav2vec2
12
6
transformers
0
automatic-speech-recognition
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'wav2vec2', 'automatic-speech-recognition', 'transformers']
false
false
false
0
16,735
hgiyt/ar-mbertmodel-mberttok
hgiyt
bert
7
9
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,736
hgiyt/ar-mbertmodel-monotok-adapter
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,737
hgiyt/ar-mbertmodel-monotok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,738
hgiyt/ar-monomodel-mberttok
hgiyt
bert
7
11
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,739
hgiyt/ar-monomodel-monotok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,740
hgiyt/fi-mbertmodel-mberttok
hgiyt
bert
7
8
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,741
hgiyt/fi-mbertmodel-monotok-adapter
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,742
hgiyt/fi-mbertmodel-monotok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,743
hgiyt/fi-monomodel-mberttok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,744
hgiyt/fi-monomodel-monotok
hgiyt
bert
7
6
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,745
hgiyt/id-mbertmodel-mberttok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,746
hgiyt/id-mbertmodel-monotok-adapter
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,747
hgiyt/id-mbertmodel-monotok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,748
hgiyt/id-monomodel-mberttok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,749
hgiyt/id-monomodel-monotok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,750
hgiyt/ko-mbertmodel-mberttok
hgiyt
bert
7
11
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,751
hgiyt/ko-mbertmodel-monotok-adapter
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,752
hgiyt/ko-mbertmodel-monotok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,753
hgiyt/ko-monomodel-mberttok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,754
hgiyt/ko-monomodel-monotok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,755
hgiyt/tr-mbertmodel-mberttok
hgiyt
bert
7
9
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,756
hgiyt/tr-mbertmodel-monotok-adapter
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,757
hgiyt/tr-mbertmodel-monotok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,758
hgiyt/tr-monomodel-mberttok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,759
hgiyt/tr-monomodel-monotok
hgiyt
bert
7
7
transformers
0
fill-mask
true
false
true
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'jax', 'bert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,760
hgw3lss/gpt-j-6B-Buckland
hgw3lss
gptj
8
12
transformers
1
text-generation
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'gptj', 'text-generation', 'transformers', 'has_space']
false
false
false
0
16,761
hhou435/chinese_roberta_L-2_H-128
hhou435
null
0
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,762
hide-JP/distilbert-base-uncased-finetuned-imdb-accelerate
hide-JP
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,763
hide-JP/distilbert-base-uncased-finetuned-imdb
hide-JP
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,764
higgzy/paraphrase-distilroberta-base-v1-finetuned-journal-conference
higgzy
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,765
highnoon/longformer-base-4096-finetuned-ner-finetuned-ner
highnoon
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,766
highnoon/longformer-base-4096-finetuned-squadv2-finetuned-ner
highnoon
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,767
highnoon/longformerwikiannen-finetuned-ner
highnoon
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,768
higopires/roB3rta
higopires
roberta
6
13
transformers
0
fill-mask
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'roberta', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,769
hiiamsid/BETO_es_binary_classification
hiiamsid
bert
7
27
transformers
2
text-classification
true
false
false
apache-2.0
['es']
['self made to classify whether text is related to technology or not.']
null
1
1
0
0
0
0
0
['pytorch', 'bert', 'text-classification', 'es', 'dataset:self made to classify whether text is related to technology or not.', 'transformers', 'ticket classification', 'license:apache-2.0', 'has_space']
false
true
true
905
16,770
hiiamsid/autonlp-Summarization-20684327
hiiamsid
mt5
9
10
transformers
0
text2text-generation
true
false
false
null
['es']
['hiiamsid/autonlp-data-Summarization']
437.2441955971972
0
0
0
0
0
0
0
['pytorch', 'mt5', 'text2text-generation', 'es', 'dataset:hiiamsid/autonlp-data-Summarization', 'transformers', 'autonlp', 'co2_eq_emissions', 'autotrain_compatible']
false
true
true
526
16,771
hiiamsid/autonlp-Summarization-20684328
hiiamsid
mt5
9
13
transformers
0
text2text-generation
true
false
false
null
['es']
['hiiamsid/autonlp-data-Summarization']
1133.9679082840014
0
0
0
0
0
0
0
['pytorch', 'mt5', 'text2text-generation', 'es', 'dataset:hiiamsid/autonlp-data-Summarization', 'transformers', 'autonlp', 'co2_eq_emissions', 'autotrain_compatible']
false
true
true
524
16,772
hiiamsid/est5-base-qg
hiiamsid
t5
11
8
transformers
0
text2text-generation
true
false
false
mit
['es']
null
null
0
0
0
0
0
0
0
['pytorch', 't5', 'text2text-generation', 'es', 'transformers', 'spanish', 'question generation', 'qg', 'license:mit', 'autotrain_compatible']
false
true
true
1,221
16,773
hiiamsid/est5-base
hiiamsid
t5
7
11
transformers
0
text2text-generation
true
false
false
mit
['es']
null
null
0
0
0
0
0
0
0
['pytorch', 't5', 'text2text-generation', 'es', 'transformers', 'spanish', 'license:mit', 'autotrain_compatible']
false
true
true
694
16,774
hiiamsid/hit5-base
hiiamsid
t5
7
8
transformers
0
text2text-generation
true
false
false
mit
['hi']
null
null
1
1
0
0
0
0
0
['pytorch', 't5', 'text2text-generation', 'hi', 'transformers', 'hindi', 'license:mit', 'autotrain_compatible']
false
true
true
617
16,775
hiiamsid/sentence_similarity_hindi
hiiamsid
bert
14
937
sentence-transformers
6
sentence-similarity
true
false
false
null
['hi']
null
null
0
0
0
0
0
0
0
['pytorch', 'bert', 'hi', 'sentence-transformers', 'feature-extraction', 'sentence-similarity', 'transformers']
false
true
true
4,074
16,776
hiiamsid/sentence_similarity_spanish_es
hiiamsid
bert
15
8,157
sentence-transformers
19
sentence-similarity
true
false
false
null
['es']
null
null
0
0
0
0
2
2
0
['pytorch', 'bert', 'es', 'sentence-transformers', 'feature-extraction', 'sentence-similarity', 'transformers', 'has_space']
false
true
true
4,311
16,777
hiiii23/Qtest
hiiii23
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,778
hiiii23/distilbert-base-uncased-finetuned-squad
hiiii23
distilbert
12
11
transformers
0
question-answering
true
false
false
apache-2.0
null
['squad']
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'distilbert', 'question-answering', 'dataset:squad', 'transformers', 'generated_from_trainer', 'license:apache-2.0', 'autotrain_compatible']
true
true
true
928
16,779
hiking1/model1
hiking1
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,780
himanshu-dutta/pycoder-gpt2
himanshu-dutta
gpt2
10
15
transformers
0
text-generation
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers']
false
false
true
4,329
16,781
hiraki/wav2vec2-base-timit-demo-colab
hiraki
wav2vec2
14
6
transformers
0
automatic-speech-recognition
true
false
false
apache-2.0
null
null
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'wav2vec2', 'automatic-speech-recognition', 'transformers', 'generated_from_trainer', 'license:apache-2.0']
true
true
true
1,937
16,782
hireddivas/DialoGPT-small-ray
hireddivas
gpt2
9
12
transformers
0
conversational
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
37
16,783
hireddivas/DialoGPT-small-scully
hireddivas
gpt2
9
12
transformers
0
conversational
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
46
16,784
hireddivas/dialoGPT-small-mulder
hireddivas
gpt2
9
12
transformers
0
conversational
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
35
16,785
hireddivas/dialoGPT-small-phil
hireddivas
gpt2
9
12
transformers
0
conversational
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
44
16,786
hireddivas/dialoGPT-small-sonic
hireddivas
gpt2
9
25
transformers
0
conversational
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
30
16,787
hiroshi-matsuda-rit/bert-base-japanese-basic-char-v2
hiroshi-matsuda-rit
bert
6
10
transformers
0
fill-mask
true
false
false
cc-by-sa-4.0
['ja']
['wikipedia']
null
0
0
0
0
0
0
0
['pytorch', 'bert', 'fill-mask', 'ja', 'dataset:wikipedia', 'transformers', 'license:cc-by-sa-4.0', 'autotrain_compatible']
false
true
true
404
16,788
hiroshi-matsuda-rit/ja_gsd
hiroshi-matsuda-rit
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,789
hiroshi-matsuda-rit/ja_gsd_bert_wwm_unidic_lite
hiroshi-matsuda-rit
null
22
10
spacy
0
token-classification
false
false
false
null
null
null
null
0
0
0
0
0
0
0
['ja', 'spacy', 'token-classification', 'license:cc-by-sa-4.0', 'model-index']
false
false
false
0
16,790
histinct7002/distilbert-base-uncased-finetuned-cola
histinct7002
distilbert
13
8
transformers
0
text-classification
true
false
false
apache-2.0
null
['glue']
null
1
1
0
0
0
0
0
['pytorch', 'tensorboard', 'distilbert', 'text-classification', 'dataset:glue', 'transformers', 'generated_from_trainer', 'license:apache-2.0', 'model-index']
true
true
true
1,565
16,791
histinct7002/distilbert-base-uncased-finetuned-ner
histinct7002
distilbert
16
9
transformers
0
token-classification
true
false
false
apache-2.0
null
['conll2003']
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'distilbert', 'token-classification', 'dataset:conll2003', 'transformers', 'generated_from_trainer', 'license:apache-2.0', 'model-index', 'autotrain_compatible']
true
true
true
1,555
16,792
histinct7002/distilroberta-base-finetuned-wikitext2
histinct7002
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,793
hivemind/gpt-j-6B-8bit
hivemind
gptj
6
7,786
transformers
119
text-generation
true
false
false
apache-2.0
['en']
['The Pile']
null
1
0
1
0
15
12
3
['pytorch', 'gptj', 'text-generation', 'en', 'dataset:The Pile', 'arxiv:2106.09685', 'arxiv:2110.02861', 'transformers', 'causal-lm', 'license:apache-2.0', 'has_space']
false
true
true
4,720
16,794
hizella/aBERT
hizella
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,795
hizella/aBERT_test
hizella
albert
37
6
transformers
0
fill-mask
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'albert', 'fill-mask', 'transformers', 'autotrain_compatible']
false
false
false
0
16,796
hizella/aBERT_v2
hizella
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,797
hjgwak/ViBE
hjgwak
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,798
hjj/hh
hjj
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
16,799
hksenpai/DialoGPT-small-rick
hksenpai
null
9
5
transformers
0
null
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'transformers', 'converstional']
false
true
false
0