id
stringlengths 5
118
| author
stringlengths 2
42
⌀ | lastModified
stringlengths 19
19
⌀ | downloads
float64 0
117M
⌀ | downloadsAllTime
float64 0
2.17B
⌀ | tags
stringlengths 2
28.2k
⌀ | pipeline_tag
stringclasses 52
values | createdAt
stringlengths 19
19
⌀ | dataset
stringlengths 1
5.92k
⌀ | license
stringclasses 104
values | architectures
stringlengths 2
125
⌀ | base_model
stringlengths 10
2.07k
⌀ | base_model_relation
stringclasses 11
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|
amagzari/bart-large-xsum-finetuned-samsum-v2
|
amagzari
|
2022-12-13 20:44:48
| 149 | 1,587 |
['transformers', 'pytorch', 'tensorboard', 'bart', 'text2text-generation', 'generated_from_trainer', 'model-index', 'autotrain_compatible', 'endpoints_compatible']
|
text2text-generation
|
2022-12-13 19:50:22
|
samsum
|
mit
|
BartForConditionalGeneration
| null |
unknown
|
arbml/whisper-medium-ar
|
arbml
|
2024-08-31 15:11:26
| 390 | 2,221 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'generated_from_trainer', 'hf-asr-leaderboard', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-13 21:16:15
|
arbml/mgb2
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
clu-ling/whisper-small-spanish
|
clu-ling
|
2023-03-03 21:24:33
| 244 | 1,812 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'generated_from_trainer', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-14 06:37:42
|
unknown
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
stanford-crfm/BioMedLM
|
stanford-crfm
|
2024-03-28 13:57:14
| 11,829 | 106,575 |
['transformers', 'pytorch', 'gpt2', 'text-generation', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible']
|
text-generation
|
2022-12-14 08:14:59
|
pubmed
|
bigscience-bloom-rail-1.0
|
GPT2LMHeadModel
| null |
unknown
|
pierreguillou/whisper-medium-portuguese
|
pierreguillou
|
2022-12-16 09:08:10
| 813 | 7,348 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'generated_from_trainer', 'whisper-event', 'pt', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-15 09:59:20
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
DigitalUmuganda/whisper_small_kinyarwanda
|
DigitalUmuganda
|
2024-12-05 14:20:21
| 32 | 528 |
['transformers', 'pytorch', 'whisper', 'automatic-speech-recognition', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-15 10:31:44
|
unknown
|
unknown
|
WhisperForConditionalGeneration
| null |
unknown
|
Conflictx/CGI_Animation
|
Conflictx
|
2022-12-15 19:34:05
| 0 | 0 |
['text-to-image', 'v2.0', 'Embedding']
|
text-to-image
|
2022-12-15 19:09:27
|
unknown
|
creativeml-openrail-m
|
unknown
| null |
unknown
|
koheiduck/bert-japanese-finetuned-sentiment
|
koheiduck
|
2022-12-20 07:21:09
| 97,156 | 2,844,833 |
['transformers', 'pytorch', 'bert', 'text-classification', 'autotrain_compatible', 'endpoints_compatible']
|
text-classification
|
2022-12-16 04:15:00
|
unknown
|
unknown
|
BertForSequenceClassification
| null |
unknown
|
timm/mobilenetv3_small_100.lamb_in1k
|
timm
|
2025-01-21 18:21:16
| 77,102,767 | 288,751,036 |
['timm', 'pytorch', 'safetensors', 'image-classification', 'transformers']
|
image-classification
|
2022-12-16 05:38:36
|
imagenet-1k
|
apache-2.0
|
unknown
| null |
source
|
lyua1225/clip-huge-zh-75k-steps-bs4096
|
lyua1225
|
2022-12-16 09:29:21
| 108 | 1,896 |
['transformers', 'pytorch', 'clip', 'zero-shot-image-classification', 'zh', 'Chinese', 'endpoints_compatible']
|
zero-shot-image-classification
|
2022-12-16 06:36:19
|
unknown
|
creativeml-openrail-m
|
CLIPModel
| null |
unknown
|
snehalyelmati/mt5-hindi-to-english
|
snehalyelmati
|
2022-12-16 09:31:09
| 323 | 1,754 |
['transformers', 'pytorch', 'mt5', 'text2text-generation', 'google/mt5-small', 'machine_translation', 'en', 'autotrain_compatible', 'endpoints_compatible']
|
text2text-generation
|
2022-12-16 08:32:48
|
unknown
|
apache-2.0
|
MT5ForConditionalGeneration
| null |
unknown
|
caidas/swin2SR-classical-sr-x2-64
|
caidas
|
2024-03-27 10:32:24
| 8,533 | 2,635,627 |
['transformers', 'pytorch', 'safetensors', 'swin2sr', 'image-to-image', 'vision']
|
image-to-image
|
2022-12-16 14:05:18
|
unknown
|
apache-2.0
|
Swin2SRForImageSuperResolution
| null |
unknown
|
caidas/swin2SR-lightweight-x2-64
|
caidas
|
2024-10-26 15:50:08
| 2,011 | 32,176 |
['transformers', 'pytorch', 'safetensors', 'swin2sr', 'image-to-image', 'vision']
|
image-to-image
|
2022-12-16 14:11:39
|
unknown
|
apache-2.0
|
Swin2SRForImageSuperResolution
| null |
unknown
|
caidas/swin2SR-realworld-sr-x4-64-bsrgan-psnr
|
caidas
|
2023-01-21 12:08:28
| 54,348 | 478,877 |
['transformers', 'pytorch', 'swin2sr', 'image-to-image', 'vision']
|
image-to-image
|
2022-12-16 14:13:44
|
unknown
|
apache-2.0
|
Swin2SRForImageSuperResolution
| null |
unknown
|
uclanlp/newsbert
|
uclanlp
|
2022-12-28 14:45:41
| 429 | 799 |
['transformers', 'pytorch', 'tf', 'bert', 'fill-mask', 'autotrain_compatible', 'endpoints_compatible']
|
fill-mask
|
2022-12-16 22:54:47
|
unknown
|
unknown
|
BertForMaskedLM
| null |
unknown
|
congazverse/worldBuilder
|
congazverse
|
2023-01-16 09:56:29
| 0 | 0 |
['stable-diffusion', 'text-to-image', 'image-to-image', 'hdri', '360 VR', 'en']
|
text-to-image
|
2022-12-17 10:42:26
|
unknown
|
openrail++
|
unknown
| null |
unknown
|
kohbanye/pixel-art-style
|
kohbanye
|
2023-01-27 11:30:59
| 232 | 14,443 |
['diffusers', 'stable-diffusion', 'text-to-image', 'stable-diffusion-diffusers', 'en', 'autotrain_compatible', 'endpoints_compatible']
|
text-to-image
|
2022-12-18 07:27:52
|
unknown
|
unknown
|
unknown
| null |
unknown
|
jonatasgrosman/whisper-large-zh-cv11
|
jonatasgrosman
|
2022-12-22 23:51:35
| 312 | 11,813 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'generated_from_trainer', 'zh', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-18 07:28:34
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
|
['openai/whisper-large-v2']
|
unknown_annotated
|
emmajoanne/models
|
emmajoanne
|
2023-08-15 01:32:57
| 0 | 0 |
[]
|
unknown
|
2022-12-18 08:49:26
|
unknown
|
unknown
|
unknown
| null |
unknown
|
bofenghuang/whisper-small-cv11-german
|
bofenghuang
|
2022-12-27 10:46:47
| 200 | 4,930 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'de', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-18 13:54:46
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
bofenghuang/whisper-large-v2-cv11-german
|
bofenghuang
|
2023-03-28 10:31:22
| 98 | 5,408 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'de', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-18 13:55:00
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
jstoone/whisper-medium-da
|
jstoone
|
2023-10-24 11:37:28
| 54 | 1,262 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'generated_from_trainer', 'hf-asr-leaderboard', 'whisper-event', 'da', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-18 19:11:05
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
|
['openai/whisper-medium']
|
finetune
|
xmzhu/whisper-tiny-zh
|
xmzhu
|
2022-12-19 05:51:53
| 295 | 1,895 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'generated_from_trainer', 'zh', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-18 20:21:09
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
kakaobrain/karlo-v1-alpha
|
kakaobrain
|
2023-02-06 18:23:45
| 1,430 | 137,442 |
['diffusers', 'safetensors', 'text-to-image']
|
text-to-image
|
2022-12-18 22:57:09
|
unknown
|
creativeml-openrail-m
|
unknown
| null |
unknown
|
lmqg/t5-base-squad-qag
|
lmqg
|
2023-01-10 03:08:25
| 180 | 12,556 |
['transformers', 'pytorch', 't5', 'text2text-generation', 'questions and answers generation', 'en', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible']
|
text2text-generation
|
2022-12-19 02:49:52
|
lmqg/qag_squad
|
cc-by-4.0
|
T5ForConditionalGeneration
| null |
unknown
|
microsoft/Promptist
|
microsoft
|
2023-01-24 17:21:42
| 9,381 | 126,659 |
['transformers', 'pytorch', 'gpt2', 'text-generation', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible']
|
text-generation
|
2022-12-19 07:43:54
|
unknown
|
unknown
|
GPT2LMHeadModel
| null |
unknown
|
BlueRaccoon/whisper-small-kab
|
BlueRaccoon
|
2022-12-19 16:03:45
| 116 | 369 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'generated_from_trainer', 'ka', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-19 07:44:45
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
vumichien/whisper-large-v2-mix-jp
|
vumichien
|
2023-09-11 12:38:04
| 64 | 1,748 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'generated_from_trainer', 'whisper-event', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-19 07:54:17
|
vumichien/preprocessed_jsut_jsss_css10_common_voice_11
|
apache-2.0
|
WhisperForConditionalGeneration
|
['openai/whisper-large-v2']
|
finetune
|
vasista22/whisper-kannada-tiny
|
vasista22
|
2023-04-24 20:21:41
| 237 | 2,668 |
['transformers', 'pytorch', 'jax', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'kn', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-19 17:11:31
|
unknown
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
teticio/audio-encoder
|
teticio
|
2023-06-01 18:19:25
| 20 | 6,164 |
['diffusers', 'audio', 'music']
|
unknown
|
2022-12-19 18:13:56
|
unknown
|
gpl-3.0
|
unknown
| null |
unknown
|
bayartsogt/whisper-large-v2-mn-13
|
bayartsogt
|
2022-12-22 02:36:39
| 96 | 1,077 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'hf-asr-leaderboard', 'generated_from_multiple_datasets', 'mn', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-20 05:20:02
|
mozilla-foundation/common_voice_11_0_google/fleurs_bayartsogt/ulaanbal-v0_bayartsogt/youtube-mongolian-v1
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
hkunlp/instructor-base
|
hkunlp
|
2023-01-21 06:31:16
| 11,212 | 578,675 |
['sentence-transformers', 'pytorch', 't5', 'text-embedding', 'embeddings', 'information-retrieval', 'beir', 'text-classification', 'language-model', 'text-clustering', 'text-semantic-similarity', 'text-evaluation', 'prompt-retrieval', 'text-reranking', 'feature-extraction', 'sentence-similarity', 'transformers', 'English', 'Sentence Similarity', 'natural_questions', 'ms_marco', 'fever', 'hotpot_qa', 'mteb', 'en', 'model-index', 'autotrain_compatible', 'text-generation-inference']
|
sentence-similarity
|
2022-12-20 05:59:40
|
unknown
|
apache-2.0
|
T5EncoderModel
| null |
unknown
|
DrishtiSharma/whisper-large-v2-hausa
|
DrishtiSharma
|
2022-12-21 07:32:21
| 43 | 805 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'generated_from_trainer', 'ha', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-20 14:18:00
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
faisalraza/layoutlm-invoices
|
faisalraza
|
2022-12-20 16:23:25
| 311 | 3,446 |
['transformers', 'pytorch', 'layoutlm', 'document-question-answering', 'pdf', 'invoices', 'en', 'endpoints_compatible']
|
document-question-answering
|
2022-12-20 14:28:28
|
unknown
|
cc-by-nc-sa-4.0
|
LayoutLMForQuestionAnswering
| null |
unknown
|
bitextor/bicleaner-ai-full-en-sq
|
bitextor
|
2023-01-10 10:10:15
| 29 | 420 |
['transformers', 'tf', 'xlm-roberta', 'bicleaner-ai', 'en', 'sq', 'multilingual', 'endpoints_compatible']
|
unknown
|
2022-12-20 16:47:22
|
unknown
|
gpl-3.0
|
XLMRBicleanerAI
| null |
unknown
|
vasista22/whisper-telugu-base
|
vasista22
|
2023-04-24 20:30:34
| 144 | 15,333 |
['transformers', 'pytorch', 'jax', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'te', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-20 19:08:58
|
unknown
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
DrishtiSharma/whisper-large-v2-kazakh
|
DrishtiSharma
|
2022-12-20 21:04:01
| 128 | 590 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'generated_from_trainer', 'kk', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-20 20:09:25
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
Fake-Person/Friends_stolen_Mixes
|
Fake-Person
|
2023-01-07 04:04:50
| 0 | 0 |
[]
|
unknown
|
2022-12-20 23:27:34
|
unknown
|
unknown
|
unknown
| null |
unknown
|
steja/whisper-small-yoruba
|
steja
|
2022-12-21 06:14:26
| 122 | 7,144 |
['transformers', 'pytorch', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'generated_from_trainer', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-21 06:06:09
|
google/fleurs
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
steja/whisper-large-persian
|
steja
|
2024-11-28 10:36:18
| 973 | 3,893 |
['transformers', 'pytorch', 'whisper', 'automatic-speech-recognition', 'whisper-event', 'generated_from_trainer', 'fa', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-21 10:50:21
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
Hosioka/Baka-Diffusion
|
Hosioka
|
2023-12-18 11:38:16
| 465 | 7,682 |
['diffusers', 'text-to-image', 'stable-Diffusion', 'stable-diffusion-diffusers', 'safetensors', 'en', 'my', 'autotrain_compatible', 'endpoints_compatible']
|
text-to-image
|
2022-12-21 20:52:12
|
unknown
|
cc-by-nc-4.0
|
unknown
| null |
unknown
|
sallyanndelucia/resnet_weather_model
|
sallyanndelucia
|
2022-12-22 05:31:54
| 279 | 669 |
['transformers', 'pytorch', 'tensorboard', 'resnet', 'image-classification', 'generated_from_trainer', 'model-index', 'autotrain_compatible', 'endpoints_compatible']
|
image-classification
|
2022-12-22 02:47:49
|
imagefolder
|
unknown
|
ResNetForImageClassification
| null |
unknown
|
timm/vit_base_patch8_224.augreg2_in21k_ft_in1k
|
timm
|
2025-01-21 19:14:27
| 182,542 | 1,469,483 |
['timm', 'pytorch', 'safetensors', 'image-classification', 'transformers']
|
image-classification
|
2022-12-22 07:22:31
|
imagenet-1k_imagenet-21k
|
apache-2.0
|
unknown
| null |
unknown
|
timm/vit_base_patch16_224.augreg_in21k
|
timm
|
2025-01-21 19:14:36
| 100,221 | 2,823,359 |
['timm', 'pytorch', 'safetensors', 'image-classification', 'transformers']
|
image-classification
|
2022-12-22 07:25:23
|
imagenet-21k
|
apache-2.0
|
unknown
| null |
unknown
|
timm/vit_base_patch32_224.augreg_in21k_ft_in1k
|
timm
|
2025-01-21 19:15:28
| 7,844 | 247,314 |
['timm', 'pytorch', 'safetensors', 'image-classification', 'transformers']
|
image-classification
|
2022-12-22 07:33:47
|
imagenet-1k_imagenet-21k
|
apache-2.0
|
unknown
| null |
unknown
|
timm/vit_small_patch16_224.dino
|
timm
|
2025-01-21 21:13:37
| 20,380 | 677,680 |
['timm', 'pytorch', 'safetensors', 'image-feature-extraction', 'transformers']
|
image-feature-extraction
|
2022-12-22 07:54:20
|
unknown
|
apache-2.0
|
unknown
| null |
unknown
|
wavymulder/modelshoot
|
wavymulder
|
2023-05-05 21:59:00
| 347 | 43,713 |
['diffusers', 'safetensors', 'stable-diffusion', 'stable-diffusion-diffusers', 'text-to-image', 'en', 'autotrain_compatible', 'endpoints_compatible']
|
text-to-image
|
2022-12-22 20:56:53
|
unknown
|
creativeml-openrail-m
|
unknown
| null |
unknown
|
u-haru/log-inspector
|
u-haru
|
2023-01-22 16:06:23
| 126 | 795 |
['transformers', 'pytorch', 'bert', 'text-classification', 'en', 'autotrain_compatible', 'endpoints_compatible']
|
text-classification
|
2022-12-23 00:31:14
|
unknown
|
apache-2.0
|
BertForSequenceClassification
| null |
unknown
|
timm/vit_small_r26_s32_384.augreg_in21k_ft_in1k
|
timm
|
2025-01-21 19:17:10
| 1,332 | 45,018 |
['timm', 'pytorch', 'safetensors', 'image-classification', 'transformers']
|
image-classification
|
2022-12-23 00:34:03
|
imagenet-1k_imagenet-21k
|
apache-2.0
|
unknown
| null |
unknown
|
wavymulder/portraitplus
|
wavymulder
|
2023-05-05 21:59:07
| 281,658 | 1,361,321 |
['diffusers', 'safetensors', 'stable-diffusion', 'stable-diffusion-diffusers', 'text-to-image', 'en', 'autotrain_compatible', 'endpoints_compatible']
|
text-to-image
|
2022-12-23 16:04:26
|
unknown
|
creativeml-openrail-m
|
unknown
| null |
unknown
|
philschmid/flan-t5-base-samsum
|
philschmid
|
2022-12-23 19:32:18
| 15,964 | 619,704 |
['transformers', 'pytorch', 'tensorboard', 't5', 'text2text-generation', 'generated_from_trainer', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible']
|
text2text-generation
|
2022-12-23 19:26:30
|
samsum
|
apache-2.0
|
T5ForConditionalGeneration
| null |
unknown
|
anujn/forkedanythingv32
|
anujn
|
2022-12-23 22:49:57
| 0 | 0 |
[]
|
unknown
|
2022-12-23 22:48:46
|
unknown
|
unknown
|
unknown
| null |
unknown
|
camenduru/plushies-pt
|
camenduru
|
2023-01-27 18:01:27
| 54 | 2,738 |
['diffusers', 'TPU', 'JAX', 'Flax', 'stable-diffusion', 'text-to-image', 'en', 'autotrain_compatible']
|
text-to-image
|
2022-12-24 07:02:01
|
camenduru/plushies
|
openrail
|
unknown
| null |
unknown
|
ProGamerGov/winter-cat-embeddings-sd-v2-1
|
ProGamerGov
|
2022-12-25 01:33:26
| 0 | 0 |
['stable-diffusion', 'text-to-image']
|
text-to-image
|
2022-12-24 21:36:38
|
unknown
|
creativeml-openrail-m
|
unknown
| null |
unknown
|
admruul/anything-v3.0
|
admruul
|
2023-05-16 09:40:18
| 11,211 | 120,735 |
['diffusers', 'safetensors', 'stable-diffusion', 'stable-diffusion-diffusers', 'text-to-image', 'en', 'autotrain_compatible', 'endpoints_compatible']
|
text-to-image
|
2022-12-25 09:53:44
|
unknown
|
creativeml-openrail-m
|
unknown
| null |
unknown
|
acheong08/nsfw
|
acheong08
|
2022-12-25 11:48:41
| 0 | 0 |
[]
|
unknown
|
2022-12-25 11:22:49
|
unknown
|
unlicense
|
unknown
| null |
unknown
|
emre/whisper-medium-turkish-2
|
emre
|
2023-09-11 12:50:53
| 352 | 9,865 |
['transformers', 'pytorch', 'whisper', 'automatic-speech-recognition', 'tr', 'model-index', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-25 12:35:23
|
mozilla-foundation/common_voice_11_0
|
apache-2.0
|
WhisperForConditionalGeneration
|
['openai/whisper-medium']
|
finetune
|
dream-textures/texture-diffusion
|
dream-textures
|
2023-01-27 13:15:28
| 1,161 | 50,983 |
['diffusers', 'text-to-image', 'stable-diffusion', 'autotrain_compatible', 'endpoints_compatible']
|
text-to-image
|
2022-12-25 21:29:56
|
unknown
|
openrail++
|
unknown
| null |
unknown
|
intfloat/e5-large
|
intfloat
|
2023-08-07 04:59:49
| 13,943 | 544,619 |
['sentence-transformers', 'pytorch', 'safetensors', 'bert', 'mteb', 'Sentence Transformers', 'sentence-similarity', 'en', 'model-index', 'autotrain_compatible', 'text-embeddings-inference', 'endpoints_compatible']
|
sentence-similarity
|
2022-12-26 06:03:12
|
unknown
|
mit
|
BertModel
| null |
unknown
|
Siddu0406/article-generator
|
Siddu0406
|
2024-11-27 17:34:24
| 171 | 1,625 |
['transformers', 'pytorch', 'tensorboard', 'gpt2', 'text-generation', 'generated_from_trainer', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible']
|
text-generation
|
2022-12-26 07:58:52
|
unknown
|
mit
|
GPT2LMHeadModel
| null |
unknown
|
facebook/mask2former-swin-small-coco-instance
|
facebook
|
2023-09-07 15:39:26
| 8,785 | 170,750 |
['transformers', 'pytorch', 'safetensors', 'mask2former', 'vision', 'image-segmentation', 'endpoints_compatible']
|
image-segmentation
|
2022-12-26 13:27:43
|
coco
|
other
|
Mask2FormerForUniversalSegmentation
| null |
unknown
|
keras-sd/diffusion-model-tflite
|
keras-sd
|
2023-01-24 14:38:49
| 0 | 0 |
['keras', 'tflite', 'diffusion model', 'stable diffusion', 'v1.4', 'text-to-image']
|
text-to-image
|
2022-12-27 00:41:53
|
unknown
|
apache-2.0
|
unknown
| null |
unknown
|
clu-ling/whisper-large-v2-spanish
|
clu-ling
|
2023-03-03 21:23:03
| 264 | 2,106 |
['transformers', 'pytorch', 'tensorboard', 'whisper', 'automatic-speech-recognition', 'generated_from_trainer', 'endpoints_compatible']
|
automatic-speech-recognition
|
2022-12-27 01:02:03
|
unknown
|
apache-2.0
|
WhisperForConditionalGeneration
| null |
unknown
|
syaimu/7th_Layer
|
syaimu
|
2023-10-06 13:47:47
| 0 | 0 |
[]
|
unknown
|
2022-12-27 06:10:12
|
unknown
|
other
|
unknown
| null |
unknown
|
thiros/YuzuLemonTea
|
thiros
|
2023-01-22 02:34:40
| 0 | 0 |
['stable-diffusion', 'text-to-image']
|
text-to-image
|
2022-12-27 07:55:19
|
unknown
|
cc0-1.0
|
unknown
| null |
unknown
|
MohamedRashad/diffusion_fashion
|
MohamedRashad
|
2023-06-15 13:42:47
| 308 | 18,487 |
['diffusers', 'safetensors', 'stable-diffusion', 'text-to-image', 'fashion', 'diffusion', 'openjourney', 'en', 'autotrain_compatible', 'endpoints_compatible']
|
text-to-image
|
2022-12-27 18:16:03
|
unknown
|
openrail
|
unknown
| null |
unknown
|
arpanghoshal/EkmanClassifier
|
arpanghoshal
|
2022-12-28 00:26:21
| 291 | 167,442 |
['transformers', 'pytorch', 'bert', 'text-classification', 'en', 'endpoints_compatible']
|
text-classification
|
2022-12-27 23:58:27
|
go_emotions
|
mit
|
BertForMultiLabelClassification
| null |
unknown
|
bongsoo/bert-small-kor-v1
|
bongsoo
|
2022-12-28 00:34:34
| 141 | 461 |
['transformers', 'pytorch', 'bert', 'pretraining', 'fill-mask', 'en', 'ko', 'endpoints_compatible']
|
fill-mask
|
2022-12-28 00:26:15
|
unknown
|
apache-2.0
|
BertForPreTraining
| null |
unknown
|
DucHaiten/DucHaitenAIart
|
DucHaiten
|
2024-05-17 16:50:42
| 1,544 | 138,510 |
['diffusers', 'safetensors', 'stable-diffusion', 'text-to-image', 'image-to-image', 'en', 'autotrain_compatible', 'endpoints_compatible']
|
text-to-image
|
2022-12-28 10:37:53
|
unknown
|
creativeml-openrail-m
|
unknown
| null |
unknown
|
heegyu/kogpt-j-350m
|
heegyu
|
2023-03-05 08:25:08
| 215 | 27,785 |
['transformers', 'pytorch', 'jax', 'gptj', 'text-generation', 'ko', 'autotrain_compatible', 'endpoints_compatible']
|
text-generation
|
2022-12-28 12:47:33
|
heegyu/korean-petitions_heegyu/namuwiki-extracted_heegyu/kowikitext
|
mit
|
GPTJForCausalLM
| null |
unknown
|
Kuaaangwen/Setfit-few-shot-classifier
|
Kuaaangwen
|
2022-12-29 02:33:54
| 34 | 698 |
['sentence-transformers', 'pytorch', 'mpnet', 'feature-extraction', 'sentence-similarity', 'transformers', 'autotrain_compatible', 'endpoints_compatible']
|
sentence-similarity
|
2022-12-28 15:02:47
|
unknown
|
unknown
|
MPNetModel
| null |
unknown
|
Jean-Baptiste/roberta-large-financial-news-sentiment-en
|
Jean-Baptiste
|
2023-03-22 02:27:09
| 3,447 | 61,755 |
['transformers', 'pytorch', 'onnx', 'safetensors', 'roberta', 'text-classification', 'financial', 'stocks', 'sentiment', 'en', 'autotrain_compatible', 'endpoints_compatible']
|
text-classification
|
2022-12-28 16:07:04
|
Jean-Baptiste/financial_news_sentiment_mixte_with_phrasebank_75
|
mit
|
RobertaForSequenceClassification
| null |
unknown
|
keremberke/yolov5n-football
|
keremberke
|
2022-12-30 20:49:33
| 386 | 18,004 |
['yolov5', 'tensorboard', 'yolo', 'vision', 'object-detection', 'pytorch', 'model-index']
|
object-detection
|
2022-12-28 20:39:20
|
keremberke/football-object-detection
|
unknown
|
unknown
| null |
unknown
|
m-a-p/MERT-v0
|
m-a-p
|
2023-06-02 13:49:06
| 6,476 | 53,975 |
['transformers', 'pytorch', 'mert_model', 'feature-extraction', 'music', 'custom_code']
|
feature-extraction
|
2022-12-29 03:01:08
|
unknown
|
cc-by-nc-4.0
|
MERTModel
| null |
unknown
|
nicky007/stable-diffusion-logo-fine-tuned
|
nicky007
|
2023-01-14 11:26:53
| 2,071 | 92,416 |
['diffusers', 'text-to-image', 'stable-diffusion', 'autotrain_compatible', 'endpoints_compatible']
|
text-to-image
|
2022-12-29 12:47:32
|
unknown
|
creativeml-openrail-m
|
unknown
| null |
unknown
|
SirVeggie/nixeu_embeddings
|
SirVeggie
|
2022-12-29 20:48:05
| 0 | 0 |
[]
|
unknown
|
2022-12-29 16:53:07
|
unknown
|
creativeml-openrail-m
|
unknown
| null |
unknown
|
keremberke/yolov5n-construction-safety
|
keremberke
|
2022-12-30 20:48:33
| 366 | 20,437 |
['yolov5', 'tensorboard', 'yolo', 'vision', 'object-detection', 'pytorch', 'model-index']
|
object-detection
|
2022-12-29 20:42:37
|
keremberke/construction-safety-object-detection
|
unknown
|
unknown
| null |
unknown
|
keremberke/yolov5s-construction-safety
|
keremberke
|
2022-12-30 20:48:25
| 623 | 16,979 |
['yolov5', 'tensorboard', 'yolo', 'vision', 'object-detection', 'pytorch', 'model-index']
|
object-detection
|
2022-12-29 21:36:32
|
keremberke/construction-safety-object-detection
|
unknown
|
unknown
| null |
unknown
|
Akumetsu971/SD_Anime_Futuristic_Armor
|
Akumetsu971
|
2022-12-30 05:53:39
| 0 | 0 |
['stable-diffusion', 'text-to-image', 'en']
|
text-to-image
|
2022-12-30 00:30:40
|
unknown
|
creativeml-openrail-m
|
unknown
| null |
unknown
|
nanashisan/DBLora
|
nanashisan
|
2023-01-02 11:27:22
| 0 | 0 |
[]
|
unknown
|
2022-12-30 12:27:53
|
unknown
|
unknown
|
unknown
| null |
unknown
|
shailja/fine-tuned-codegen-16B-Verilog
|
shailja
|
2023-08-30 16:57:18
| 192 | 4,024 |
['transformers', 'pytorch', 'codegen', 'text-generation', 'code', 'model-index', 'autotrain_compatible', 'endpoints_compatible']
|
text-generation
|
2022-12-30 16:46:58
|
shailja/Verilog_GitHub
|
bigcode-openrail-m
|
CodeGenForCausalLM
| null |
unknown
|
anujn/model21
|
anujn
|
2022-12-31 01:45:23
| 0 | 0 |
[]
|
unknown
|
2022-12-31 01:45:22
|
unknown
|
unknown
|
unknown
| null |
unknown
|
Sygil/Sygil-Diffusion
|
Sygil
|
2023-09-10 01:46:55
| 2,534 | 43,016 |
['diffusers', 'stable-diffusion', 'sygil-diffusion', 'text-to-image', 'sygil-devs', 'finetune', 'stable-diffusion-1.5', 'en', 'ja', 'es', 'zh', 'autotrain_compatible', 'endpoints_compatible']
|
text-to-image
|
2022-12-31 12:09:07
|
unknown
|
openrail++
|
unknown
|
['runwayml/stable-diffusion-v1-5']
|
unknown_annotated
|
keremberke/yolov5s-license-plate
|
keremberke
|
2023-01-01 09:59:41
| 747 | 21,525 |
['yolov5', 'tensorboard', 'yolo', 'vision', 'object-detection', 'pytorch', 'model-index']
|
object-detection
|
2023-01-01 03:56:07
|
keremberke/license-plate-object-detection
|
unknown
|
unknown
| null |
unknown
|
keremberke/yolov5m-license-plate
|
keremberke
|
2023-01-01 09:59:05
| 14,305 | 309,744 |
['yolov5', 'tensorboard', 'yolo', 'vision', 'object-detection', 'pytorch', 'model-index']
|
object-detection
|
2023-01-01 06:01:39
|
keremberke/license-plate-object-detection
|
unknown
|
unknown
| null |
unknown
|
RavenOnur/Sign-Language
|
RavenOnur
|
2023-01-01 21:08:11
| 626 | 2,946 |
['transformers', 'pytorch', 'tensorboard', 'vit', 'image-classification', 'huggingpics', 'model-index', 'autotrain_compatible', 'endpoints_compatible']
|
image-classification
|
2023-01-01 21:07:48
|
unknown
|
unknown
|
ViTForImageClassification
| null |
unknown
|
microsoft/git-large-coco
|
microsoft
|
2023-06-26 19:50:47
| 17,869 | 594,389 |
['transformers', 'pytorch', 'safetensors', 'git', 'image-text-to-text', 'vision', 'image-captioning', 'image-to-text', 'en', 'endpoints_compatible']
|
image-to-text
|
2023-01-02 10:44:21
|
unknown
|
mit
|
GitForCausalLM
| null |
unknown
|
microsoft/git-base-msrvtt-qa
|
microsoft
|
2024-04-04 07:37:26
| 93 | 6,962 |
['transformers', 'pytorch', 'safetensors', 'git', 'image-text-to-text', 'vision', 'image-to-text', 'en']
|
image-to-text
|
2023-01-02 10:55:17
|
unknown
|
mit
|
GitForCausalLM
| null |
unknown
|
coltekin/berturk-tremo
|
coltekin
|
2025-02-10 11:57:55
| 143 | 446 |
['transformers', 'pytorch', 'tf', 'safetensors', 'bert', 'text-classification', 'tr', 'autotrain_compatible', 'endpoints_compatible']
|
text-classification
|
2023-01-02 13:20:43
|
tremo
|
unknown
|
BertForSequenceClassification
| null |
unknown
|
laion/CLIP-ViT-B-16-laion2B-s34B-b88K
|
laion
|
2023-04-19 18:55:10
| 1,130,114 | 70,255,959 |
['open_clip', 'safetensors', 'zero-shot-image-classification']
|
zero-shot-image-classification
|
2023-01-03 00:16:18
|
unknown
|
mit
|
unknown
| null |
unknown
|
laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K
|
laion
|
2023-04-18 22:03:39
| 10,608 | 131,065 |
['open_clip', 'tensorboard', 'safetensors', 'clip', 'zero-shot-image-classification']
|
zero-shot-image-classification
|
2023-01-03 00:25:48
|
unknown
|
mit
|
unknown
| null |
unknown
|
BM-K/KoChatBART
|
BM-K
|
2023-04-26 04:21:11
| 604 | 1,809 |
['transformers', 'pytorch', 'safetensors', 'bart', 'text2text-generation', 'autotrain_compatible', 'endpoints_compatible']
|
text2text-generation
|
2023-01-03 04:30:17
|
unknown
|
unknown
|
BartForConditionalGeneration
| null |
unknown
|
ckpt/stable-diffusion-2-1
|
ckpt
|
2023-01-26 13:25:00
| 0 | 0 |
[]
|
unknown
|
2023-01-03 09:53:37
|
unknown
|
unknown
|
unknown
| null |
unknown
|
puzzz21/sci-sentiment-classify
|
puzzz21
|
2024-01-21 17:44:50
| 117 | 1,022 |
['transformers', 'pytorch', 'safetensors', 'bert', 'text-classification', 'en', 'autotrain_compatible', 'endpoints_compatible']
|
text-classification
|
2023-01-03 11:10:00
|
unknown
|
unknown
|
BertForSequenceClassification
| null |
unknown
|
facebook/mask2former-swin-large-cityscapes-panoptic
|
facebook
|
2023-09-07 18:57:04
| 1,829 | 40,236 |
['transformers', 'pytorch', 'safetensors', 'mask2former', 'vision', 'image-segmentation', 'endpoints_compatible']
|
image-segmentation
|
2023-01-03 11:42:47
|
coco
|
other
|
Mask2FormerForUniversalSegmentation
| null |
unknown
|
dreamlike-art/dreamlike-photoreal-2.0
|
dreamlike-art
|
2023-03-13 01:05:06
| 26,091 | 2,645,542 |
['diffusers', 'safetensors', 'stable-diffusion', 'stable-diffusion-diffusers', 'text-to-image', 'photorealistic', 'photoreal', 'en', 'autotrain_compatible']
|
text-to-image
|
2023-01-04 03:01:40
|
unknown
|
other
|
unknown
| null |
unknown
|
tomopari/test
|
tomopari
|
2024-11-10 16:30:18
| 0 | 432 |
[]
|
unknown
|
2023-01-04 06:28:50
|
unknown
|
openrail
|
unknown
| null |
unknown
|
nguyendangsonlam/lsg-ner-vietnamese-electra-base-1024
|
nguyendangsonlam
|
2023-01-04 07:52:33
| 217 | 919 |
['transformers', 'pytorch', 'electra', 'token-classification', 'named-entity-recognition', 'custom_code', 'vi', 'autotrain_compatible']
|
token-classification
|
2023-01-04 06:50:13
|
unknown
|
unknown
|
LSGElectraForTokenClassification
| null |
unknown
|
UchihaMadara/ABSA-MaskedLM-BERTbase-finetuned-sentihood
|
UchihaMadara
|
2023-01-05 03:14:58
| 209 | 336 |
['transformers', 'pytorch', 'tensorboard', 'bert', 'fill-mask', 'generated_from_trainer', 'autotrain_compatible', 'endpoints_compatible']
|
fill-mask
|
2023-01-04 13:51:43
|
unknown
|
apache-2.0
|
BertForMaskedLM
| null |
unknown
|
keremberke/yolov5s-smoke
|
keremberke
|
2023-01-04 22:14:32
| 337 | 15,545 |
['yolov5', 'tensorboard', 'yolo', 'vision', 'object-detection', 'pytorch', 'model-index']
|
object-detection
|
2023-01-04 22:13:56
|
keremberke/smoke-object-detection
|
unknown
|
unknown
| null |
unknown
|
Subsets and Splits
FLUX.1-dev Adapter Models
The query performs basic filtering to retrieve specific entries related to a particular base model, which provides limited analytical value.
FLUX.1-dev Adapter Models
This query retrieves a limited number of entries where the base model is from a specific vendor and relation is 'adapter', which provides basic filtering but limited analytical value.