id
stringlengths 5
118
| author
stringlengths 2
42
⌀ | lastModified
stringlengths 19
19
⌀ | downloads
float64 0
117M
⌀ | downloadsAllTime
float64 0
2.17B
⌀ | tags
stringlengths 2
28.2k
⌀ | pipeline_tag
stringclasses 52
values | createdAt
stringlengths 19
19
⌀ | dataset
stringlengths 1
5.92k
⌀ | license
stringclasses 104
values | architectures
stringlengths 2
125
⌀ | base_model
stringlengths 10
2.07k
⌀ | base_model_relation
stringclasses 11
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|
nlpaueb/bert-base-uncased-contracts | nlpaueb | 2022-04-28 14:43:56 | 35,114 | 677,936 | ['transformers', 'pytorch', 'tf', 'jax', 'bert', 'legal', 'fill-mask', 'en', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | cc-by-sa-4.0 | unknown | null | unknown |
nlpaueb/bert-base-uncased-echr | nlpaueb | 2022-04-28 14:44:26 | 164 | 13,577 | ['transformers', 'pytorch', 'tf', 'jax', 'bert', 'legal', 'fill-mask', 'en', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | cc-by-sa-4.0 | unknown | null | unknown |
nlpaueb/bert-base-uncased-eurlex | nlpaueb | 2022-04-28 14:44:15 | 360 | 160,645 | ['transformers', 'pytorch', 'tf', 'jax', 'bert', 'legal', 'fill-mask', 'en', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | cc-by-sa-4.0 | unknown | null | unknown |
nlpaueb/legal-bert-small-uncased | nlpaueb | 2022-04-28 14:43:32 | 27,390 | 7,938,613 | ['transformers', 'pytorch', 'tf', 'jax', 'bert', 'legal', 'fill-mask', 'en', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | cc-by-sa-4.0 | unknown | null | unknown |
nlpaueb/sec-bert-base | nlpaueb | 2022-04-28 14:46:31 | 757 | 86,109 | ['transformers', 'pytorch', 'tf', 'bert', 'pretraining', 'finance', 'financial', 'fill-mask', 'en', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | cc-by-sa-4.0 | BertForPreTraining | null | unknown |
nlpaueb/sec-bert-num | nlpaueb | 2022-04-28 14:46:16 | 171 | 2,249 | ['transformers', 'pytorch', 'tf', 'bert', 'pretraining', 'finance', 'financial', 'fill-mask', 'en', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | cc-by-sa-4.0 | BertForPreTraining | null | unknown |
nlpaueb/sec-bert-shape | nlpaueb | 2022-04-28 14:46:51 | 207 | 5,601 | ['transformers', 'pytorch', 'tf', 'bert', 'pretraining', 'finance', 'financial', 'fill-mask', 'en', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | cc-by-sa-4.0 | BertForPreTraining | null | unknown |
nlpconnect/vit-gpt2-image-captioning | nlpconnect | 2023-02-27 15:00:09 | 1,714,464 | 47,602,343 | ['transformers', 'pytorch', 'vision-encoder-decoder', 'image-text-to-text', 'image-to-text', 'image-captioning', 'endpoints_compatible'] | image-to-text | 2022-03-02 23:29:05 | unknown | apache-2.0 | VisionEncoderDecoderModel | null | unknown |
novakat/nerkor-cars-onpp-hubert | novakat | 2023-09-14 19:28:41 | 4,909 | 260,657 | ['transformers', 'pytorch', 'safetensors', 'bert', 'token-classification', 'hu', 'autotrain_compatible', 'endpoints_compatible'] | token-classification | 2022-03-02 23:29:05 | unknown | gpl | BertForTokenClassification | null | unknown |
nreimers/MiniLM-L6-H384-uncased | nreimers | 2021-08-30 20:05:29 | 1,325 | 523,806 | ['transformers', 'pytorch', 'jax', 'bert', 'feature-extraction', 'endpoints_compatible'] | feature-extraction | 2022-03-02 23:29:05 | unknown | mit | BertModel | ['microsoft/MiniLM-L12-H384-uncased'] | unknown_annotated |
nreimers/mMiniLMv2-L6-H384-distilled-from-XLMR-Large | nreimers | 2021-06-20 19:03:02 | 437 | 155,052 | ['transformers', 'pytorch', 'xlm-roberta', 'fill-mask', 'autotrain_compatible', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | unknown | XLMRobertaForMaskedLM | null | unknown |
nsi319/legal-led-base-16384 | nsi319 | 2021-03-01 12:33:48 | 723 | 34,858 | ['transformers', 'pytorch', 'led', 'text2text-generation', 'summarization', 'en', 'autotrain_compatible'] | summarization | 2022-03-02 23:29:05 | unknown | mit | LEDForConditionalGeneration | null | unknown |
ntu-spml/distilhubert | ntu-spml | 2023-07-24 18:30:45 | 5,498 | 7,155,028 | ['transformers', 'pytorch', 'safetensors', 'hubert', 'feature-extraction', 'speech', 'en', 'endpoints_compatible'] | feature-extraction | 2022-03-02 23:29:05 | librispeech_asr | apache-2.0 | HubertModel | null | source |
nvidia/mit-b0 | nvidia | 2023-11-15 07:49:03 | 84,346 | 2,181,371 | ['transformers', 'pytorch', 'tf', 'segformer', 'image-classification', 'vision', 'autotrain_compatible', 'endpoints_compatible'] | image-classification | 2022-03-02 23:29:05 | imagenet_1k | other | SegformerForImageClassification | null | unknown |
nvidia/segformer-b0-finetuned-cityscapes-1024-1024 | nvidia | 2022-08-08 13:43:30 | 6,285 | 69,414 | ['transformers', 'pytorch', 'tf', 'segformer', 'vision', 'image-segmentation', 'endpoints_compatible'] | image-segmentation | 2022-03-02 23:29:05 | cityscapes | other | SegformerForSemanticSegmentation | null | unknown |
nvidia/segformer-b1-finetuned-cityscapes-1024-1024 | nvidia | 2022-08-09 11:33:04 | 10,922 | 230,456 | ['transformers', 'pytorch', 'tf', 'segformer', 'vision', 'image-segmentation', 'endpoints_compatible'] | image-segmentation | 2022-03-02 23:29:05 | cityscapes | other | SegformerForSemanticSegmentation | null | unknown |
obi/deid_roberta_i2b2 | obi | 2025-02-23 03:36:26 | 588,074 | 12,084,051 | ['transformers', 'pytorch', 'safetensors', 'roberta', 'token-classification', 'deidentification', 'medical notes', 'ehr', 'phi', 'en', 'autotrain_compatible', 'endpoints_compatible'] | token-classification | 2022-03-02 23:29:05 | I2B2 | mit | RobertaForTokenClassification | null | unknown |
oliverguhr/fullstop-punctuation-multilang-large | oliverguhr | 2023-11-16 09:35:35 | 317,211 | 10,032,982 | ['transformers', 'pytorch', 'tf', 'onnx', 'safetensors', 'xlm-roberta', 'token-classification', 'punctuation prediction', 'punctuation', 'en', 'de', 'fr', 'it', 'multilingual', 'autotrain_compatible', 'endpoints_compatible'] | token-classification | 2022-03-02 23:29:05 | wmt/europarl | mit | XLMRobertaForTokenClassification | null | unknown |
openai/clip-vit-base-patch16 | openai | 2022-10-04 09:42:28 | 4,594,375 | 187,773,162 | ['transformers', 'pytorch', 'jax', 'clip', 'zero-shot-image-classification', 'vision', 'endpoints_compatible'] | zero-shot-image-classification | 2022-03-02 23:29:05 | unknown | unknown | CLIPModel | null | source |
opensource/extract_names | opensource | 2021-01-19 04:59:04 | 86 | 4,178 | ['transformers', 'tf', 'xlm-roberta', 'token-classification', 'Extract Names', 'multilingual', 'autotrain_compatible', 'endpoints_compatible'] | token-classification | 2022-03-02 23:29:05 | unknown | apache-2.0 | XLMRobertaForTokenClassification | null | unknown |
ozcangundes/mt5-small-turkish-summarization | ozcangundes | 2021-09-22 09:31:27 | 862 | 16,007 | ['transformers', 'pytorch', 'jax', 'mt5', 'text2text-generation', 'summarization', 'tr', 'autotrain_compatible', 'endpoints_compatible'] | summarization | 2022-03-02 23:29:05 | MLSUM | mit | MT5ForConditionalGeneration | null | unknown |
patrickvonplaten/bert2bert_cnn_daily_mail | patrickvonplaten | 2022-06-25 17:06:49 | 1,239 | 39,262 | ['transformers', 'pytorch', 'encoder-decoder', 'text2text-generation', 'summarization', 'en', 'model-index', 'autotrain_compatible', 'endpoints_compatible'] | summarization | 2022-03-02 23:29:05 | cnn_dailymail | apache-2.0 | EncoderDecoderModel | null | unknown |
pdelobelle/robbert-v2-dutch-base | pdelobelle | 2023-12-04 15:14:12 | 16,267 | 1,692,365 | ['transformers', 'pytorch', 'tf', 'jax', 'safetensors', 'roberta', 'fill-mask', 'Dutch', 'Flemish', 'RoBERTa', 'RobBERT', 'BERT', 'nl', 'autotrain_compatible', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | oscar_dbrd_lassy-ud_europarl-mono_conll2002 | mit | RobertaForMaskedLM | null | unknown |
peterhsu/marian-finetuned-kde4-en-to-zh_TW | peterhsu | 2022-02-28 11:26:43 | 347 | 1,023 | ['transformers', 'pytorch', 'tensorboard', 'marian', 'text2text-generation', 'translation', 'generated_from_trainer', 'model-index', 'autotrain_compatible', 'endpoints_compatible'] | translation | 2022-03-02 23:29:05 | kde4 | apache-2.0 | MarianMTModel | null | unknown |
philschmid/bart-large-cnn-samsum | philschmid | 2022-12-23 19:48:57 | 197,506 | 20,759,846 | ['transformers', 'pytorch', 'bart', 'text2text-generation', 'sagemaker', 'summarization', 'en', 'model-index', 'autotrain_compatible', 'endpoints_compatible'] | summarization | 2022-03-02 23:29:05 | samsum | mit | BartForConditionalGeneration | null | unknown |
phueb/BabyBERTa-1 | phueb | 2022-01-18 14:44:02 | 202 | 14,805 | ['transformers', 'pytorch', 'roberta', 'fill-mask', 'BabyBERTa', 'en', 'autotrain_compatible', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | CHILDES | unknown | RobertaForMaskedLM | null | unknown |
pierreguillou/bert-base-cased-squad-v1.1-portuguese | pierreguillou | 2022-01-04 09:57:53 | 4,463 | 224,328 | ['transformers', 'pytorch', 'tf', 'jax', 'bert', 'question-answering', 'bert-base', 'pt', 'endpoints_compatible'] | question-answering | 2022-03-02 23:29:05 | brWaC_squad_squad_v1_pt | mit | BertForQuestionAnswering | null | unknown |
pierreguillou/bert-large-cased-squad-v1.1-portuguese | pierreguillou | 2022-01-04 09:57:00 | 775 | 156,566 | ['transformers', 'pytorch', 'tf', 'bert', 'question-answering', 'bert-large', 'pt', 'endpoints_compatible'] | question-answering | 2022-03-02 23:29:05 | brWaC_squad_squad_v1_pt | mit | BertForQuestionAnswering | null | unknown |
pierreguillou/gpt2-small-portuguese | pierreguillou | 2021-05-23 10:59:56 | 7,787 | 174,123 | ['transformers', 'pytorch', 'tf', 'jax', 'gpt2', 'text-generation', 'pt', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | wikipedia | mit | GPT2LMHeadModel | null | unknown |
pierreguillou/ner-bert-base-cased-pt-lenerbr | pierreguillou | 2021-12-29 19:32:39 | 25,728 | 1,479,840 | ['transformers', 'pytorch', 'bert', 'token-classification', 'generated_from_trainer', 'pt', 'model-index', 'autotrain_compatible', 'endpoints_compatible'] | token-classification | 2022-03-02 23:29:05 | lener_br | unknown | BertForTokenClassification | null | unknown |
ponteineptique/latin-classical-small | ponteineptique | 2020-04-24 16:05:14 | 142 | 660 | ['transformers', 'pytorch', 'xlm', 'feature-extraction', 'endpoints_compatible'] | feature-extraction | 2022-03-02 23:29:05 | unknown | unknown | XLMModel | null | unknown |
prajjwal1/bert-medium | prajjwal1 | 2021-10-27 18:30:16 | 92,533 | 1,417,851 | ['transformers', 'pytorch', 'BERT', 'MNLI', 'NLI', 'transformer', 'pre-training', 'en', 'endpoints_compatible'] | unknown | 2022-03-02 23:29:05 | unknown | mit | unknown | null | unknown |
princeton-nlp/sup-simcse-bert-base-uncased | princeton-nlp | 2021-05-20 02:54:31 | 468,423 | 7,086,093 | ['transformers', 'pytorch', 'jax', 'bert', 'feature-extraction', 'endpoints_compatible'] | feature-extraction | 2022-03-02 23:29:05 | unknown | unknown | BertModel | null | unknown |
princeton-nlp/sup-simcse-roberta-large | princeton-nlp | 2022-11-11 20:04:02 | 88,758 | 24,470,221 | ['transformers', 'pytorch', 'jax', 'roberta', 'feature-extraction', 'endpoints_compatible'] | feature-extraction | 2022-03-02 23:29:05 | unknown | unknown | RobertaModel | null | unknown |
prithivida/parrot_paraphraser_on_T5 | prithivida | 2021-05-18 07:53:27 | 944,963 | 28,181,828 | ['transformers', 'pytorch', 't5', 'text2text-generation', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text2text-generation | 2022-03-02 23:29:05 | unknown | unknown | T5ForConditionalGeneration | null | unknown |
pszemraj/led-base-book-summary | pszemraj | 2023-11-28 19:11:49 | 3,605 | 233,478 | ['transformers', 'pytorch', 'safetensors', 'led', 'text2text-generation', 'summarization', 'summary', 'longformer', 'booksum', 'long-document', 'long-form', 'model-index', 'autotrain_compatible', 'endpoints_compatible'] | summarization | 2022-03-02 23:29:05 | kmfoda/booksum | apache-2.0_bsd-3-clause | LEDForConditionalGeneration | null | unknown |
pszemraj/led-large-book-summary | pszemraj | 2023-11-28 19:16:42 | 4,355 | 395,612 | ['transformers', 'pytorch', 'safetensors', 'led', 'text2text-generation', 'summarization', 'summary', 'longformer', 'booksum', 'long-document', 'long-form', 'en', 'model-index', 'autotrain_compatible', 'endpoints_compatible'] | summarization | 2022-03-02 23:29:05 | kmfoda/booksum | apache-2.0_bsd-3-clause | LEDForConditionalGeneration | null | unknown |
pucpr/biobertpt-all | pucpr | 2022-11-27 16:54:34 | 2,564 | 245,407 | ['transformers', 'pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'pt', 'autotrain_compatible', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | unknown | BertForMaskedLM | null | unknown |
pucpr/biobertpt-clin | pucpr | 2022-11-27 16:55:14 | 120 | 15,061 | ['transformers', 'pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'pt', 'autotrain_compatible', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | unknown | BertForMaskedLM | null | unknown |
pucpr/clinicalnerpt-medical | pucpr | 2021-10-13 09:28:28 | 162 | 11,281 | ['transformers', 'pytorch', 'bert', 'token-classification', 'pt', 'autotrain_compatible', 'endpoints_compatible'] | token-classification | 2022-03-02 23:29:05 | SemClinBr | unknown | BertForTokenClassification | null | unknown |
pucpr/gpt2-bio-pt | pucpr | 2022-11-23 12:33:37 | 160 | 8,691 | ['transformers', 'pytorch', 'tf', 'jax', 'gpt2', 'text-generation', 'pt', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | unknown | unknown | GPT2LMHeadModel | null | unknown |
pyannote/embedding | pyannote | 2024-05-10 19:36:51 | 377,351 | 15,124,561 | ['pyannote-audio', 'pytorch', 'tensorboard', 'pyannote', 'pyannote-audio-model', 'audio', 'voice', 'speech', 'speaker', 'speaker-recognition', 'speaker-verification', 'speaker-identification', 'speaker-embedding'] | unknown | 2022-03-02 23:29:05 | voxceleb | mit | unknown | null | unknown |
pysentimiento/robertuito-base-uncased | pysentimiento | 2023-05-22 14:06:59 | 454 | 35,428 | ['transformers', 'pytorch', 'safetensors', 'roberta', 'fill-mask', 'twitter', 'masked-lm', 'es', 'autotrain_compatible', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | unknown | RobertaForMaskedLM | null | unknown |
pysentimiento/robertuito-hate-speech | pysentimiento | 2023-02-20 19:04:44 | 5,693 | 886,958 | ['pysentimiento', 'pytorch', 'roberta', 'twitter', 'hate-speech', 'es'] | unknown | 2022-03-02 23:29:05 | unknown | unknown | RobertaForSequenceClassification | null | unknown |
qanastek/pos-french | qanastek | 2024-04-09 15:42:08 | 179 | 95,198 | ['flair', 'pytorch', 'token-classification', 'sequence-tagger-model', 'fr'] | token-classification | 2022-03-02 23:29:05 | qanastek/ANTILLES | unknown | unknown | null | unknown |
raynardj/wenyanwen-ancient-translate-to-modern | raynardj | 2022-01-08 04:22:30 | 514 | 5,293 | ['transformers', 'pytorch', 'encoder-decoder', 'text2text-generation', 'translation', '古文', '文言文', 'ancient', 'classical', 'zh', 'autotrain_compatible', 'endpoints_compatible'] | translation | 2022-03-02 23:29:05 | unknown | unknown | EncoderDecoderModel | null | unknown |
raynardj/wenyanwen-chinese-translate-to-ancient | raynardj | 2021-11-29 14:42:25 | 2,473 | 17,082 | ['transformers', 'pytorch', 'encoder-decoder', 'text2text-generation', 'translation', '文言文', 'ancient', 'zh', 'autotrain_compatible', 'endpoints_compatible'] | translation | 2022-03-02 23:29:05 | unknown | apache-2.0 | EncoderDecoderModel | null | unknown |
recobo/agri-sentence-transformer | recobo | 2022-10-15 21:04:01 | 179 | 1,684 | ['sentence-transformers', 'pytorch', 'bert', 'feature-extraction', 'sentence-similarity', 'transformers', 'en', 'autotrain_compatible', 'text-embeddings-inference', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | unknown | BertModel | null | unknown |
rinna/japanese-gpt-1b | rinna | 2024-07-20 07:52:31 | 2,272 | 732,621 | ['transformers', 'pytorch', 'safetensors', 'gpt2', 'text-generation', 'gpt', 'lm', 'nlp', 'ja', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | cc100_wikipedia_c4 | mit | GPT2LMHeadModel | null | unknown |
rinna/japanese-gpt2-medium | rinna | 2024-07-20 07:50:47 | 7,762 | 602,756 | ['transformers', 'pytorch', 'tf', 'jax', 'safetensors', 'gpt2', 'text-generation', 'lm', 'nlp', 'ja', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | cc100_wikipedia | mit | GPT2LMHeadModel | null | unknown |
saattrupdan/nbailab-base-ner-scandi | saattrupdan | 2024-12-16 16:21:04 | 73,213 | 440,778 | ['transformers', 'pytorch', 'safetensors', 'bert', 'token-classification', 'da', 'no', 'nb', 'nn', 'sv', 'fo', 'is', 'autotrain_compatible', 'endpoints_compatible'] | token-classification | 2022-03-02 23:29:05 | dane_norne_wikiann_suc3.0 | mit | BertForTokenClassification | null | unknown |
Saibo-creator/legal-roberta-base | Saibo-creator | 2021-08-31 15:36:35 | 243 | 120,022 | ['transformers', 'pytorch', 'tf', 'jax', 'roberta', 'fill-mask', 'legal', 'en', 'autotrain_compatible', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | apache-2.0 | RobertaForMaskedLM | null | unknown |
sakares/wav2vec2-large-xlsr-thai-demo | sakares | 2021-03-22 07:15:18 | 952 | 23,628 | ['transformers', 'pytorch', 'wav2vec2', 'automatic-speech-recognition', 'audio', 'speech', 'xlsr-fine-tuning-week', 'th', 'model-index', 'endpoints_compatible'] | automatic-speech-recognition | 2022-03-02 23:29:05 | common_voice | apache-2.0 | Wav2Vec2ForCTC | null | unknown |
Ashishkr/query_wellformedness_score | Ashishkr | 2024-03-30 11:51:12 | 979 | 73,426,480 | ['transformers', 'pytorch', 'jax', 'safetensors', 'roberta', 'text-classification', 'autotrain_compatible'] | text-classification | 2022-03-02 23:29:05 | google_wellformed_query | apache-2.0 | RobertaForSequenceClassification | null | unknown |
savasy/bert-base-turkish-ner-cased | savasy | 2024-02-01 09:21:04 | 98,434 | 284,627 | ['transformers', 'pytorch', 'jax', 'safetensors', 'bert', 'token-classification', 'tr', 'autotrain_compatible', 'endpoints_compatible'] | token-classification | 2022-03-02 23:29:05 | unknown | unknown | BertForTokenClassification | null | unknown |
savasy/bert-base-turkish-sentiment-cased | savasy | 2024-02-01 09:18:53 | 6,640 | 1,128,334 | ['transformers', 'pytorch', 'jax', 'safetensors', 'bert', 'text-classification', 'tr', 'autotrain_compatible', 'endpoints_compatible'] | text-classification | 2022-03-02 23:29:05 | unknown | unknown | BertForSequenceClassification | null | unknown |
savasy/bert-base-turkish-squad | savasy | 2024-02-01 09:17:38 | 1,318 | 117,651 | ['transformers', 'pytorch', 'jax', 'safetensors', 'bert', 'question-answering', 'tr', 'endpoints_compatible'] | question-answering | 2022-03-02 23:29:05 | unknown | unknown | BertForQuestionAnswering | null | unknown |
ai-forever/Real-ESRGAN | ai-forever | 2022-09-25 13:17:44 | 0 | 0 | ['PyTorch', 'ru', 'en'] | unknown | 2022-03-02 23:29:05 | unknown | unknown | unknown | null | unknown |
ai-forever/ruT5-base | ai-forever | 2023-12-11 17:45:55 | 5,939 | 70,625 | ['transformers', 'pytorch', 't5', 'text2text-generation', 'PyTorch', 'Transformers', 'ru', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text2text-generation | 2022-03-02 23:29:05 | unknown | unknown | T5ForConditionalGeneration | null | unknown |
ai-forever/ruT5-large | ai-forever | 2023-12-28 09:01:22 | 3,025 | 31,382 | ['transformers', 'pytorch', 't5', 'text2text-generation', 'PyTorch', 'Transformers', 'ru', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text2text-generation | 2022-03-02 23:29:05 | unknown | unknown | T5ForConditionalGeneration | null | unknown |
ai-forever/rugpt3large_based_on_gpt2 | ai-forever | 2023-12-04 14:43:51 | 16,893 | 406,205 | ['transformers', 'pytorch', 'jax', 'gpt2', 'text-generation', 'PyTorch', 'Transformers', 'ru', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | unknown | unknown | GPT2LMHeadModel | null | unknown |
ai-forever/rugpt3medium_based_on_gpt2 | ai-forever | 2023-12-05 08:43:34 | 5,798 | 233,529 | ['transformers', 'pytorch', 'gpt2', 'text-generation', 'PyTorch', 'Transformers', 'ru', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | unknown | unknown | GPT2LMHeadModel | null | unknown |
ai-forever/rugpt3small_based_on_gpt2 | ai-forever | 2023-12-05 09:19:39 | 29,624 | 346,849 | ['transformers', 'pytorch', 'jax', 'gpt2', 'text-generation', 'PyTorch', 'Transformers', 'ru', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | unknown | unknown | GPT2LMHeadModel | null | unknown |
ai-forever/sbert_large_mt_nlu_ru | ai-forever | 2024-06-13 07:29:25 | 3,309 | 402,631 | ['transformers', 'safetensors', 'bert', 'feature-extraction', 'PyTorch', 'Transformers', 'ru', 'text-embeddings-inference', 'endpoints_compatible'] | feature-extraction | 2022-03-02 23:29:05 | unknown | unknown | BertModel | null | unknown |
ai-forever/sbert_large_nlu_ru | ai-forever | 2024-10-07 11:09:02 | 870,047 | 12,808,600 | ['transformers', 'pytorch', 'safetensors', 'bert', 'feature-extraction', 'PyTorch', 'Transformers', 'ru', 'text-embeddings-inference', 'endpoints_compatible'] | feature-extraction | 2022-03-02 23:29:05 | unknown | unknown | BertModel | null | unknown |
sdadas/polish-roberta-large-v2 | sdadas | 2024-04-23 15:32:52 | 1,250 | 57,057 | ['transformers', 'pytorch', 'safetensors', 'roberta', 'fill-mask', 'pl', 'autotrain_compatible', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | lgpl-3.0 | RobertaForMaskedLM | null | unknown |
seidel/plsum-base-ptt5 | seidel | 2022-04-28 16:59:49 | 133 | 1,345 | ['transformers', 'pytorch', 't5', 'text2text-generation', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text2text-generation | 2022-03-02 23:29:05 | unknown | unknown | T5ForConditionalGeneration | null | unknown |
sentence-transformers/LaBSE | sentence-transformers | 2025-03-06 13:40:25 | 588,072 | 9,223,941 | ['sentence-transformers', 'pytorch', 'tf', 'jax', 'onnx', 'safetensors', 'bert', 'feature-extraction', 'sentence-similarity', 'multilingual', 'af', 'sq', 'am', 'ar', 'hy', 'as', 'az', 'eu', 'be', 'bn', 'bs', 'bg', 'my', 'ca', 'ceb', 'zh', 'co', 'hr', 'cs', 'da', 'nl', 'en', 'eo', 'et', 'fi', 'fr', 'fy', 'gl', 'ka', 'de', 'el', 'gu', 'ht', 'ha', 'haw', 'he', 'hi', 'hmn', 'hu', 'is', 'ig', 'id', 'ga', 'it', 'ja', 'jv', 'kn', 'kk', 'km', 'rw', 'ko', 'ku', 'ky', 'lo', 'la', 'lv', 'lt', 'lb', 'mk', 'mg', 'ms', 'ml', 'mt', 'mi', 'mr', 'mn', 'ne', 'no', 'ny', 'or', 'fa', 'pl', 'pt', 'pa', 'ro', 'ru', 'sm', 'gd', 'sr', 'st', 'sn', 'si', 'sk', 'sl', 'so', 'es', 'su', 'sw', 'sv', 'tl', 'tg', 'ta', 'tt', 'te', 'th', 'bo', 'tr', 'tk', 'ug', 'uk', 'ur', 'uz', 'vi', 'cy', 'wo', 'xh', 'yi', 'yo', 'zu', 'autotrain_compatible', 'text-embeddings-inference', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | BertModel | null | unknown |
sentence-transformers/all-mpnet-base-v1 | sentence-transformers | 2025-03-06 13:19:48 | 14,790 | 1,238,396 | ['sentence-transformers', 'pytorch', 'onnx', 'safetensors', 'openvino', 'mpnet', 'fill-mask', 'feature-extraction', 'sentence-similarity', 'transformers', 'en', 'autotrain_compatible', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | MPNetForMaskedLM | null | unknown |
sentence-transformers/average_word_embeddings_glove.6B.300d | sentence-transformers | 2025-03-06 13:40:43 | 0 | 0 | ['sentence-transformers', 'feature-extraction', 'sentence-similarity', 'autotrain_compatible', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | unknown | null | unknown |
sentence-transformers/clip-ViT-B-32-multilingual-v1 | sentence-transformers | 2024-11-05 16:39:09 | 123,936 | 3,135,782 | ['sentence-transformers', 'pytorch', 'tf', 'onnx', 'safetensors', 'openvino', 'distilbert', 'feature-extraction', 'sentence-similarity', 'multilingual', 'autotrain_compatible', 'text-embeddings-inference', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | DistilBertModel | null | unknown |
sentence-transformers/clip-ViT-B-32 | sentence-transformers | 2024-02-12 10:14:11 | 0 | 0 | ['sentence-transformers', 'feature-extraction', 'sentence-similarity', 'autotrain_compatible', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | unknown | unknown | null | unknown |
sentence-transformers/gtr-t5-base | sentence-transformers | 2025-03-06 13:39:57 | 158,312 | 1,045,731 | ['sentence-transformers', 'pytorch', 'safetensors', 't5', 'feature-extraction', 'sentence-similarity', 'en', 'autotrain_compatible', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | T5EncoderModel | null | unknown |
sentence-transformers/gtr-t5-xxl | sentence-transformers | 2025-03-06 13:38:08 | 979 | 35,509 | ['sentence-transformers', 'pytorch', 'safetensors', 't5', 'feature-extraction', 'sentence-similarity', 'en', 'autotrain_compatible', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | T5EncoderModel | null | unknown |
sentence-transformers/msmarco-MiniLM-L6-v3 | sentence-transformers | 2025-03-06 13:17:32 | 23,725 | 2,036,563 | ['sentence-transformers', 'pytorch', 'tf', 'jax', 'onnx', 'safetensors', 'openvino', 'bert', 'feature-extraction', 'sentence-similarity', 'transformers', 'autotrain_compatible', 'text-embeddings-inference', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | BertModel | null | unknown |
sentence-transformers/multi-qa-MiniLM-L6-cos-v1 | sentence-transformers | 2024-11-05 17:17:16 | 17,035,956 | 79,722,672 | ['sentence-transformers', 'pytorch', 'tf', 'onnx', 'safetensors', 'openvino', 'bert', 'feature-extraction', 'sentence-similarity', 'transformers', 'en', 'autotrain_compatible', 'text-embeddings-inference', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | flax-sentence-embeddings/stackexchange_xml_ms_marco_gooaq_yahoo_answers_topics_search_qa_eli5_natural_questions_trivia_qa_embedding-data/QQP_embedding-data/PAQ_pairs_embedding-data/Amazon-QA_embedding-data/WikiAnswers | unknown | BertModel | ['nreimers/MiniLM-L6-H384-uncased'] | unknown_annotated |
sentence-transformers/multi-qa-distilbert-cos-v1 | sentence-transformers | 2024-11-05 17:18:43 | 107,566 | 3,781,387 | ['sentence-transformers', 'pytorch', 'onnx', 'safetensors', 'openvino', 'distilbert', 'fill-mask', 'feature-extraction', 'sentence-similarity', 'transformers', 'en', 'autotrain_compatible', 'text-embeddings-inference', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | flax-sentence-embeddings/stackexchange_xml_ms_marco_gooaq_yahoo_answers_topics_search_qa_eli5_natural_questions_trivia_qa_embedding-data/QQP_embedding-data/PAQ_pairs_embedding-data/Amazon-QA_embedding-data/WikiAnswers | unknown | DistilBertForMaskedLM | null | unknown |
sentence-transformers/multi-qa-mpnet-base-cos-v1 | sentence-transformers | 2024-11-05 17:21:14 | 192,406 | 4,242,743 | ['sentence-transformers', 'pytorch', 'onnx', 'safetensors', 'openvino', 'mpnet', 'fill-mask', 'feature-extraction', 'sentence-similarity', 'transformers', 'en', 'autotrain_compatible', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | unknown | MPNetForMaskedLM | null | unknown |
sentence-transformers/multi-qa-mpnet-base-dot-v1 | sentence-transformers | 2024-11-05 15:15:42 | 1,693,788 | 28,435,893 | ['sentence-transformers', 'pytorch', 'onnx', 'safetensors', 'openvino', 'mpnet', 'fill-mask', 'feature-extraction', 'sentence-similarity', 'transformers', 'en', 'autotrain_compatible', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | flax-sentence-embeddings/stackexchange_xml_ms_marco_gooaq_yahoo_answers_topics_search_qa_eli5_natural_questions_trivia_qa_embedding-data/QQP_embedding-data/PAQ_pairs_embedding-data/Amazon-QA_embedding-data/WikiAnswers | unknown | MPNetForMaskedLM | null | unknown |
sentence-transformers/paraphrase-MiniLM-L6-v2 | sentence-transformers | 2025-03-06 13:26:35 | 7,458,416 | 92,938,663 | ['sentence-transformers', 'pytorch', 'tf', 'onnx', 'safetensors', 'openvino', 'bert', 'feature-extraction', 'sentence-similarity', 'transformers', 'autotrain_compatible', 'text-embeddings-inference', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | BertModel | null | unknown |
sentence-transformers/sentence-t5-base | sentence-transformers | 2025-03-06 13:39:02 | 485,213 | 3,160,724 | ['sentence-transformers', 'pytorch', 'rust', 'safetensors', 't5', 'feature-extraction', 'sentence-similarity', 'en', 'autotrain_compatible', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | T5EncoderModel | null | unknown |
sentence-transformers/sentence-t5-large | sentence-transformers | 2025-03-06 13:37:56 | 19,555 | 1,009,256 | ['sentence-transformers', 'pytorch', 'onnx', 'safetensors', 't5', 'feature-extraction', 'sentence-similarity', 'en', 'autotrain_compatible', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | T5EncoderModel | null | unknown |
sentence-transformers/stsb-roberta-base-v2 | sentence-transformers | 2025-03-06 13:36:30 | 14,415 | 691,785 | ['sentence-transformers', 'pytorch', 'tf', 'jax', 'onnx', 'safetensors', 'openvino', 'roberta', 'feature-extraction', 'sentence-similarity', 'transformers', 'autotrain_compatible', 'text-embeddings-inference', 'endpoints_compatible'] | sentence-similarity | 2022-03-02 23:29:05 | unknown | apache-2.0 | RobertaModel | null | unknown |
seokho/gpt2-emotion | seokho | 2021-07-06 06:07:33 | 172 | 2,144 | ['transformers', 'pytorch', 'gpt2', 'text-generation', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | unknown | unknown | GPT2LMHeadModel | null | unknown |
serdarakyol/interpress-turkish-news-classification | serdarakyol | 2022-03-10 16:06:33 | 217 | 3,262 | ['transformers', 'pytorch', 'tf', 'jax', 'bert', 'text-classification', 'tr', 'autotrain_compatible', 'endpoints_compatible'] | text-classification | 2022-03-02 23:29:05 | unknown | unknown | BertForSequenceClassification | null | unknown |
seyonec/ChemBERTa-zinc-base-v1 | seyonec | 2021-05-20 20:55:33 | 255,206 | 3,686,376 | ['transformers', 'pytorch', 'jax', 'roberta', 'fill-mask', 'chemistry', 'autotrain_compatible', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | unknown | RobertaForMaskedLM | null | unknown |
seyonec/ChemBERTa_zinc250k_v2_40k | seyonec | 2021-05-20 20:57:42 | 772 | 183,895 | ['transformers', 'pytorch', 'jax', 'roberta', 'fill-mask', 'autotrain_compatible', 'endpoints_compatible'] | fill-mask | 2022-03-02 23:29:05 | unknown | unknown | RobertaForMaskedLM | null | unknown |
shahp7575/gpt2-horoscopes | shahp7575 | 2021-08-24 02:34:10 | 643 | 28,330 | ['transformers', 'pytorch', 'gpt2', 'text-generation', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | unknown | unknown | GPT2LMHeadModel | null | unknown |
shahrukhx01/bert-mini-finetune-question-detection | shahrukhx01 | 2023-03-29 22:00:48 | 7,686 | 1,227,014 | ['transformers', 'pytorch', 'safetensors', 'bert', 'text-classification', 'neural-search-query-classification', 'neural-search', 'en', 'autotrain_compatible', 'endpoints_compatible'] | text-classification | 2022-03-02 23:29:05 | unknown | unknown | BertForSequenceClassification | null | unknown |
shahrukhx01/question-vs-statement-classifier | shahrukhx01 | 2023-03-29 22:01:12 | 65,309 | 1,976,779 | ['transformers', 'pytorch', 'safetensors', 'bert', 'text-classification', 'neural-search-query-classification', 'neural-search', 'en', 'autotrain_compatible', 'endpoints_compatible'] | text-classification | 2022-03-02 23:29:05 | unknown | unknown | BertForSequenceClassification | null | unknown |
shibing624/code-autocomplete-distilgpt2-python | shibing624 | 2024-02-19 09:34:30 | 346 | 376,473 | ['transformers', 'pytorch', 'safetensors', 'gpt2', 'text-generation', 'code', 'autocomplete', 'en', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | unknown | apache-2.0 | GPT2LMHeadModel | null | unknown |
shibing624/macbert4csc-base-chinese | shibing624 | 2024-09-27 06:55:18 | 32,677 | 1,507,210 | ['transformers', 'pytorch', 'onnx', 'safetensors', 'bert', 'fill-mask', 'zh', 'pycorrector', 'text2text-generation', 'autotrain_compatible', 'endpoints_compatible'] | text2text-generation | 2022-03-02 23:29:05 | shibing624/CSC | apache-2.0 | BertForMaskedLM | null | unknown |
siebert/sentiment-roberta-large-english | siebert | 2024-06-11 16:40:11 | 178,937 | 6,704,022 | ['transformers', 'pytorch', 'tf', 'jax', 'roberta', 'text-classification', 'sentiment', 'twitter', 'reviews', 'siebert', 'en', 'autotrain_compatible', 'endpoints_compatible'] | text-classification | 2022-03-02 23:29:05 | unknown | unknown | RobertaForSequenceClassification | null | unknown |
skt/ko-gpt-trinity-1.2B-v0.5 | skt | 2021-09-23 16:29:25 | 1,841 | 169,114 | ['transformers', 'pytorch', 'gpt2', 'text-generation', 'gpt3', 'ko', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | unknown | cc-by-nc-sa-4.0 | GPT2LMHeadModel | null | unknown |
skt/kobert-base-v1 | skt | 2021-07-01 07:16:05 | 80,878 | 5,202,484 | ['transformers', 'pytorch', 'bert', 'feature-extraction', 'endpoints_compatible'] | feature-extraction | 2022-03-02 23:29:05 | unknown | unknown | BertModel | null | unknown |
skt/kogpt2-base-v2 | skt | 2021-09-23 16:29:28 | 27,956 | 2,219,083 | ['transformers', 'pytorch', 'jax', 'gpt2', 'text-generation', 'ko', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible'] | text-generation | 2022-03-02 23:29:05 | unknown | cc-by-nc-sa-4.0 | GPT2LMHeadModel | null | unknown |
snunlp/KR-FinBert-SC | snunlp | 2022-04-28 05:07:18 | 16,011 | 970,724 | ['transformers', 'pytorch', 'bert', 'text-classification', 'ko', 'autotrain_compatible', 'endpoints_compatible'] | text-classification | 2022-03-02 23:29:05 | unknown | unknown | BertForSequenceClassification | null | unknown |
sonoisa/sentence-bert-base-ja-mean-tokens-v2 | sonoisa | 2024-04-17 11:39:38 | 253,911 | 8,266,890 | ['sentence-transformers', 'pytorch', 'safetensors', 'bert', 'sentence-bert', 'feature-extraction', 'sentence-similarity', 'ja', 'autotrain_compatible', 'endpoints_compatible'] | feature-extraction | 2022-03-02 23:29:05 | unknown | cc-by-sa-4.0 | BertModel | null | unknown |
sonoisa/t5-base-japanese | sonoisa | 2024-12-12 13:36:45 | 7,665 | 615,096 | ['transformers', 'pytorch', 'jax', 'safetensors', 't5', 'feature-extraction', 'text2text-generation', 'seq2seq', 'ja', 'text-generation-inference', 'endpoints_compatible'] | text2text-generation | 2022-03-02 23:29:05 | wikipedia_oscar_cc100 | cc-by-sa-4.0 | T5Model | null | unknown |
spacy/en_core_web_lg | spacy | 2023-11-21 08:13:57 | 265 | 17,215 | ['spacy', 'token-classification', 'en', 'model-index'] | token-classification | 2022-03-02 23:29:05 | unknown | mit | unknown | null | unknown |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.