model_id
stringlengths 12
50
| downloads
int64 167
17M
| likes
int64 201
4.86k
| tags
stringlengths 116
738
| created_at
unknown | architectures
stringclasses 31
values | model_type
stringclasses 29
values | num_attention_heads
float64 12
128
⌀ | vocab_size
float64 32k
251k
⌀ | pad_token_id
float64 0
200k
⌀ | hidden_size
float64 896
16.4k
⌀ | intermediate_size
float64 4.1k
53.2k
⌀ | num_hidden_layers
float64 16
126
⌀ | hidden_act
stringclasses 5
values | layer_norm_eps
float64 0
0
⌀ | max_position_embeddings
float64 1.02k
1.05M
⌀ | activation_function
stringclasses 3
values | rms_norm_eps
float64 0
0
⌀ | attention_probs_dropout_prob
float64 0
0.1
⌀ | hidden_dropout_prob
float64 0
0.1
⌀ | year
int32 2.02k
2.03k
| month
int32 1
12
| day
int32 1
31
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Alibaba-NLP/gte-Qwen2-7B-instruct | 130,545 | 341 | ['sentence-transformers', 'safetensors', 'qwen2', 'text-generation', 'mteb', 'transformers', 'Qwen2', 'sentence-similarity', 'custom_code', 'arxiv:2308.03281', 'license:apache-2.0', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'text-embeddings-inference', 'endpoints_compatible', 'region:us'] | "2024-06-15T11:24:21Z" | ['Qwen2ForCausalLM'] | qwen2 | 28 | 151,646 | null | 3,584 | 18,944 | 28 | silu | null | 131,072 | null | 0.000001 | null | null | 2,024 | 6 | 15 |
nvidia/Llama3-ChatQA-1.5-70B | 228 | 332 | ['transformers', 'safetensors', 'llama', 'text-generation', 'nvidia', 'chatqa-1.5', 'chatqa', 'llama-3', 'pytorch', 'conversational', 'en', 'arxiv:2401.10225', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-04-28T21:44:57Z" | ['LlamaForCausalLM'] | llama | 64 | 128,256 | null | 8,192 | 28,672 | 80 | silu | null | 8,192 | null | 0.00001 | null | null | 2,024 | 4 | 28 |
openai-community/gpt2-xl | 334,386 | 331 | ['transformers', 'pytorch', 'tf', 'jax', 'rust', 'safetensors', 'gpt2', 'text-generation', 'en', 'arxiv:1910.09700', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2022-03-02T23:29:04Z" | ['GPT2LMHeadModel'] | gpt2 | null | 50,257 | null | null | null | null | null | null | null | gelu_new | null | null | null | 2,022 | 3 | 2 |
Qwen/CodeQwen1.5-7B-Chat | 3,168 | 329 | ['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2309.16609', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-04-15T07:17:06Z" | ['Qwen2ForCausalLM'] | qwen2 | 32 | 92,416 | null | 4,096 | 13,440 | 32 | silu | null | 65,536 | null | 0.00001 | null | null | 2,024 | 4 | 15 |
Phind/Phind-CodeLlama-34B-v1 | 2,388 | 324 | ['transformers', 'pytorch', 'llama', 'text-generation', 'code llama', 'license:llama2', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-08-25T20:16:25Z" | ['LlamaForCausalLM'] | llama | 64 | 32,000 | null | 8,192 | 22,016 | 48 | silu | null | 16,384 | null | 0.00001 | null | null | 2,023 | 8 | 25 |
huggyllama/llama-7b | 171,501 | 320 | ['transformers', 'pytorch', 'safetensors', 'llama', 'text-generation', 'conversational', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-04-03T23:16:48Z" | ['LlamaForCausalLM'] | llama | 32 | 32,000 | 0 | 4,096 | 11,008 | 32 | silu | null | 2,048 | null | 0.000001 | null | null | 2,023 | 4 | 3 |
TheBloke/Wizard-Vicuna-13B-Uncensored-GPTQ | 4,024 | 317 | ['transformers', 'safetensors', 'llama', 'text-generation', 'uncensored', 'en', 'dataset:ehartford/wizard_vicuna_70k_unfiltered', 'base_model:cognitivecomputations/Wizard-Vicuna-13B-Uncensored', 'base_model:quantized:cognitivecomputations/Wizard-Vicuna-13B-Uncensored', 'license:other', 'autotrain_compatible', 'text-generation-inference', '4-bit', 'gptq', 'region:us'] | "2023-05-13T08:18:23Z" | ['LlamaForCausalLM'] | llama | 40 | 32,000 | 0 | 5,120 | 13,824 | 40 | silu | null | 2,048 | null | 0.000001 | null | null | 2,023 | 5 | 13 |
LinkSoul/Chinese-Llama-2-7b | 2,814 | 316 | ['transformers', 'pytorch', 'llama', 'text-generation', 'zh', 'en', 'dataset:LinkSoul/instruction_merge_set', 'license:openrail', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-07-20T08:23:15Z" | ['LlamaForCausalLM'] | llama | 32 | 32,000 | 0 | 4,096 | 11,008 | 32 | silu | null | 2,048 | null | 0.00001 | null | null | 2,023 | 7 | 20 |
codellama/CodeLlama-70b-hf | 825 | 316 | ['transformers', 'pytorch', 'safetensors', 'llama', 'text-generation', 'llama-2', 'code', 'arxiv:2308.12950', 'license:llama2', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-01-29T10:59:03Z" | ['LlamaForCausalLM'] | llama | 64 | 32,016 | null | 8,192 | 28,672 | 80 | silu | null | 16,384 | null | 0.00001 | null | null | 2,024 | 1 | 29 |
stabilityai/stablelm-3b-4e1t | 17,455 | 310 | ['transformers', 'safetensors', 'stablelm', 'text-generation', 'causal-lm', 'en', 'dataset:tiiuae/falcon-refinedweb', 'dataset:togethercomputer/RedPajama-Data-1T', 'dataset:CarperAI/pilev2-dev', 'dataset:bigcode/starcoderdata', 'dataset:allenai/peS2o', 'arxiv:2307.09288', 'arxiv:2104.09864', 'arxiv:2204.06745', 'arxiv:1607.06450', 'arxiv:1910.07467', 'arxiv:2101.00027', 'arxiv:2305.06161', 'arxiv:1910.02054', 'license:cc-by-sa-4.0', 'model-index', 'autotrain_compatible', 'endpoints_compatible', 'region:us'] | "2023-09-29T06:05:21Z" | ['StableLmForCausalLM'] | stablelm | 32 | 50,304 | null | 2,560 | 6,912 | 32 | silu | 0.00001 | 4,096 | null | null | null | null | 2,023 | 9 | 29 |
NousResearch/Nous-Hermes-Llama2-13b | 23,321 | 309 | ['transformers', 'pytorch', 'safetensors', 'llama', 'text-generation', 'llama-2', 'self-instruct', 'distillation', 'synthetic instruction', 'en', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-07-20T23:25:25Z" | ['LlamaForCausalLM'] | llama | 40 | 32,032 | 0 | 5,120 | 13,824 | 40 | silu | null | 4,096 | null | 0.00001 | null | null | 2,023 | 7 | 20 |
NousResearch/Hermes-3-Llama-3.1-8B | 107,485 | 302 | ['transformers', 'safetensors', 'llama', 'text-generation', 'Llama-3', 'instruct', 'finetune', 'chatml', 'gpt4', 'synthetic data', 'distillation', 'function calling', 'json mode', 'axolotl', 'roleplaying', 'chat', 'conversational', 'en', 'arxiv:2408.11857', 'base_model:meta-llama/Llama-3.1-8B', 'base_model:finetune:meta-llama/Llama-3.1-8B', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-07-28T06:00:57Z" | ['LlamaForCausalLM'] | llama | 32 | 128,256 | null | 4,096 | 14,336 | 32 | silu | null | 131,072 | null | 0.00001 | null | null | 2,024 | 7 | 28 |
TheBloke/dolphin-2.5-mixtral-8x7b-GGUF | 5,686 | 301 | ['transformers', 'gguf', 'mixtral', 'en', 'dataset:ehartford/dolphin', 'dataset:jondurbin/airoboros-2.2.1', 'dataset:ehartford/dolphin-coder', 'dataset:migtissera/Synthia-v1.3', 'dataset:teknium/openhermes', 'dataset:ise-uiuc/Magicoder-OSS-Instruct-75K', 'dataset:ise-uiuc/Magicoder-Evol-Instruct-110K', 'dataset:LDJnr/Pure-Dove', 'base_model:cognitivecomputations/dolphin-2.5-mixtral-8x7b', 'base_model:quantized:cognitivecomputations/dolphin-2.5-mixtral-8x7b', 'license:apache-2.0', 'region:us', 'conversational'] | "2023-12-14T10:34:15Z" | null | mixtral | null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 12 | 14 |
togethercomputer/GPT-JT-6B-v1 | 6,277 | 300 | ['transformers', 'pytorch', 'gptj', 'text-generation', 'en', 'dataset:natural_instructions', 'dataset:the_pile', 'dataset:cot', 'dataset:Muennighoff/P3', 'arxiv:2205.05131', 'arxiv:2210.11399', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us'] | "2022-11-24T06:09:34Z" | ['GPTJForCausalLM'] | gptj | null | 50,400 | null | null | null | null | null | null | null | gelu_new | null | null | null | 2,022 | 11 | 24 |
openai-community/gpt2-large | 1,187,194 | 299 | ['transformers', 'pytorch', 'tf', 'jax', 'rust', 'onnx', 'safetensors', 'gpt2', 'text-generation', 'en', 'arxiv:1910.09700', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2022-03-02T23:29:04Z" | ['GPT2LMHeadModel'] | gpt2 | null | 50,257 | null | null | null | null | null | null | null | gelu_new | null | null | null | 2,022 | 3 | 2 |
succinctly/text2image-prompt-generator | 50,539 | 298 | ['transformers', 'pytorch', 'gpt2', 'text-generation', 'text2image', 'prompting', 'en', 'dataset:succinctly/midjourney-prompts', 'license:cc-by-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2022-07-21T22:17:43Z" | ['GPT2LMHeadModel'] | gpt2 | null | 50,257 | null | null | null | null | null | null | null | gelu_new | null | null | null | 2,022 | 7 | 21 |
nomic-ai/gpt4all-j | 2,713 | 297 | ['transformers', 'pytorch', 'safetensors', 'gptj', 'text-generation', 'en', 'dataset:nomic-ai/gpt4all-j-prompt-generations', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us'] | "2023-04-11T15:39:16Z" | ['GPTJForCausalLM'] | gptj | null | 50,400 | null | null | null | null | null | null | null | gelu_new | null | null | null | 2,023 | 4 | 11 |
NousResearch/DeepHermes-3-Llama-3-8B-Preview | 22,628 | 296 | ['transformers', 'safetensors', 'llama', 'text-generation', 'Llama-3', 'instruct', 'finetune', 'chatml', 'gpt4', 'synthetic data', 'distillation', 'function calling', 'json mode', 'axolotl', 'roleplaying', 'chat', 'reasoning', 'r1', 'vllm', 'conversational', 'en', 'base_model:meta-llama/Llama-3.1-8B', 'base_model:finetune:meta-llama/Llama-3.1-8B', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2025-02-12T04:09:45Z" | ['LlamaForCausalLM'] | llama | 32 | 128,256 | 128,004 | 4,096 | 14,336 | 32 | silu | null | 131,072 | null | 0.00001 | null | null | 2,025 | 2 | 12 |
EleutherAI/gpt-neo-1.3B | 270,507 | 290 | ['transformers', 'pytorch', 'jax', 'rust', 'safetensors', 'gpt_neo', 'text-generation', 'text generation', 'causal-lm', 'en', 'dataset:EleutherAI/pile', 'arxiv:2101.00027', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'region:us'] | "2022-03-02T23:29:04Z" | ['GPTNeoForCausalLM'] | gpt_neo | null | 50,257 | null | 2,048 | null | null | null | null | 2,048 | gelu_new | null | null | null | 2,022 | 3 | 2 |
codellama/CodeLlama-34b-Instruct-hf | 15,938 | 283 | ['transformers', 'pytorch', 'safetensors', 'llama', 'text-generation', 'llama-2', 'conversational', 'code', 'arxiv:2308.12950', 'license:llama2', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-08-24T16:58:22Z" | ['LlamaForCausalLM'] | llama | 64 | 32,000 | null | 8,192 | 22,016 | 48 | silu | null | 16,384 | null | 0.00001 | null | null | 2,023 | 8 | 24 |
Qwen/Qwen2.5-14B-Instruct-1M | 56,257 | 283 | ['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2501.15383', 'base_model:Qwen/Qwen2.5-14B', 'base_model:finetune:Qwen/Qwen2.5-14B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2025-01-23T13:23:24Z" | ['Qwen2ForCausalLM'] | qwen2 | 40 | 152,064 | null | 5,120 | 13,824 | 48 | silu | null | 1,010,000 | null | 0.00001 | null | null | 2,025 | 1 | 23 |
Qwen/Qwen2-VL-72B-Instruct | 169,897 | 281 | ['transformers', 'safetensors', 'qwen2_vl', 'image-text-to-text', 'multimodal', 'conversational', 'en', 'arxiv:2409.12191', 'arxiv:2308.12966', 'base_model:Qwen/Qwen2-VL-72B', 'base_model:finetune:Qwen/Qwen2-VL-72B', 'license:other', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-09-17T04:25:34Z" | ['Qwen2VLForConditionalGeneration'] | qwen2_vl | 64 | 152,064 | null | 8,192 | 29,568 | 80 | silu | null | 32,768 | null | 0.000001 | null | null | 2,024 | 9 | 17 |
OpenAssistant/oasst-sft-1-pythia-12b | 4,907 | 278 | ['transformers', 'pytorch', 'gpt_neox', 'text-generation', 'sft', 'en', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-03-09T16:47:26Z" | ['GPTNeoXForCausalLM'] | gpt_neox | 40 | 50,288 | null | 5,120 | 20,480 | 36 | gelu | 0.00001 | 2,048 | null | null | null | null | 2,023 | 3 | 9 |
ai-forever/ruGPT-3.5-13B | 3,208 | 278 | ['transformers', 'pytorch', 'gpt2', 'text-generation', 'gpt3', 'en', 'ru', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-05-02T12:53:36Z" | ['GPT2LMHeadModel'] | gpt2 | null | 50,272 | 0 | null | null | null | null | null | null | gelu_new | null | null | null | 2,023 | 5 | 2 |
microsoft/DialoGPT-large | 42,589 | 276 | ['transformers', 'pytorch', 'tf', 'jax', 'gpt2', 'text-generation', 'conversational', 'arxiv:1911.00536', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2022-03-02T23:29:05Z" | ['GPT2LMHeadModel'] | gpt2 | null | 50,257 | null | null | null | null | null | null | null | gelu_new | null | null | null | 2,022 | 3 | 2 |
SciPhi/Triplex | 1,108 | 276 | ['transformers', 'safetensors', 'gguf', 'phi3', 'text-generation', 'conversational', 'custom_code', 'license:cc-by-nc-sa-4.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-07-10T21:58:18Z" | ['Phi3ForCausalLM'] | phi3 | 32 | 32,064 | 32,000 | 3,072 | 8,192 | 32 | silu | null | 131,072 | null | 0.00001 | null | null | 2,024 | 7 | 10 |
IDEA-CCNL/Ziya-LLaMA-13B-v1 | 2,292 | 274 | ['transformers', 'pytorch', 'llama', 'text-generation', 'en', 'zh', 'arxiv:2210.08590', 'license:gpl-3.0', 'autotrain_compatible', 'text-generation-inference', 'region:us'] | "2023-05-16T10:32:58Z" | ['LlamaForCausalLM'] | llama | 40 | 39,424 | 0 | 5,120 | 13,824 | 40 | silu | null | 2,048 | null | 0.000001 | null | null | 2,023 | 5 | 16 |
FlagAlpha/Llama2-Chinese-13b-Chat | 2,215 | 274 | ['transformers', 'pytorch', 'llama', 'text-generation', 'question-answering', 'zh', 'en', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-07-24T12:10:46Z" | ['LlamaForCausalLM'] | llama | 40 | 32,000 | 0 | 5,120 | 13,824 | 40 | silu | null | 4,096 | null | 0.00001 | null | null | 2,023 | 7 | 24 |
Groq/Llama-3-Groq-8B-Tool-Use | 819 | 274 | ['transformers', 'safetensors', 'llama', 'text-generation', 'facebook', 'meta', 'pytorch', 'llama-3', 'groq', 'tool-use', 'function-calling', 'conversational', 'en', 'base_model:meta-llama/Meta-Llama-3-8B', 'base_model:finetune:meta-llama/Meta-Llama-3-8B', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-06-24T20:06:59Z" | ['LlamaForCausalLM'] | llama | 32 | 128,262 | null | 4,096 | 14,336 | 32 | silu | null | 8,192 | null | 0.00001 | null | null | 2,024 | 6 | 24 |
Salesforce/SFR-Embedding-Mistral | 31,352 | 272 | ['sentence-transformers', 'safetensors', 'mistral', 'feature-extraction', 'mteb', 'transformers', 'en', 'arxiv:2210.07316', 'arxiv:2310.06825', 'arxiv:2401.00368', 'arxiv:2104.08663', 'license:cc-by-nc-4.0', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'text-embeddings-inference', 'endpoints_compatible', 'region:us'] | "2024-01-24T22:29:26Z" | ['MistralModel'] | mistral | 32 | 32,000 | 2 | 4,096 | 14,336 | 32 | silu | null | 32,768 | null | 0.00001 | null | null | 2,024 | 1 | 24 |
Qwen/Qwen2.5-7B-Instruct-1M | 329,624 | 268 | ['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2501.15383', 'base_model:Qwen/Qwen2.5-7B', 'base_model:finetune:Qwen/Qwen2.5-7B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2025-01-23T13:36:32Z" | ['Qwen2ForCausalLM'] | qwen2 | 28 | 152,064 | null | 3,584 | 18,944 | 28 | silu | null | 1,010,000 | null | 0.00001 | null | null | 2,025 | 1 | 23 |
Qwen/Qwen2.5-VL-3B-Instruct | 1,035,947 | 267 | ['transformers', 'safetensors', 'qwen2_5_vl', 'image-text-to-text', 'multimodal', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2409.12191', 'arxiv:2308.12966', 'base_model:Qwen/Qwen2.5-VL-3B-Instruct', 'base_model:finetune:Qwen/Qwen2.5-VL-3B-Instruct', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2025-01-26T09:25:35Z" | ['Qwen2_5_VLForConditionalGeneration'] | qwen2_5_vl | 16 | 151,936 | null | 2,048 | 11,008 | 36 | silu | null | 128,000 | null | 0.000001 | null | null | 2,025 | 1 | 26 |
TheBloke/guanaco-65B-GPTQ | 2,065 | 266 | ['transformers', 'safetensors', 'llama', 'text-generation', 'base_model:timdettmers/guanaco-65b', 'base_model:quantized:timdettmers/guanaco-65b', 'license:other', 'autotrain_compatible', 'text-generation-inference', '4-bit', 'gptq', 'region:us'] | "2023-05-25T16:14:59Z" | ['LlamaForCausalLM'] | llama | 64 | 32,000 | 0 | 8,192 | 22,016 | 80 | silu | null | 2,048 | null | 0.00001 | null | null | 2,023 | 5 | 25 |
Qwen/Qwen2.5-0.5B-Instruct | 1,009,630 | 266 | ['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-0.5B', 'base_model:finetune:Qwen/Qwen2.5-0.5B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-09-16T11:52:46Z" | ['Qwen2ForCausalLM'] | qwen2 | 14 | 151,936 | null | 896 | 4,864 | 24 | silu | null | 32,768 | null | 0.000001 | null | null | 2,024 | 9 | 16 |
DiscoResearch/mixtral-7b-8expert | 47,842 | 264 | ['transformers', 'pytorch', 'mistral', 'text-generation', 'custom_code', 'en', 'fr', 'it', 'es', 'de', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-12-08T23:33:07Z" | ['MixtralForCausalLM'] | mistral | 32 | 32,000 | null | 4,096 | 14,336 | 32 | silu | null | 32,768 | null | 0.00001 | null | null | 2,023 | 12 | 8 |
TheBloke/Llama-2-7B-Chat-GPTQ | 21,972 | 263 | ['transformers', 'safetensors', 'llama', 'text-generation', 'facebook', 'meta', 'pytorch', 'llama-2', 'en', 'arxiv:2307.09288', 'base_model:meta-llama/Llama-2-7b-chat-hf', 'base_model:quantized:meta-llama/Llama-2-7b-chat-hf', 'license:llama2', 'autotrain_compatible', 'text-generation-inference', '4-bit', 'gptq', 'region:us'] | "2023-07-18T17:38:15Z" | ['LlamaForCausalLM'] | llama | 32 | 32,000 | 0 | 4,096 | 11,008 | 32 | silu | null | 4,096 | null | 0.000001 | null | null | 2,023 | 7 | 18 |
TheBloke/Llama-2-70B-Chat-GPTQ | 7,313 | 261 | ['transformers', 'safetensors', 'llama', 'text-generation', 'facebook', 'meta', 'pytorch', 'llama-2', 'en', 'arxiv:2307.09288', 'base_model:meta-llama/Llama-2-70b-chat-hf', 'base_model:quantized:meta-llama/Llama-2-70b-chat-hf', 'license:llama2', 'autotrain_compatible', 'text-generation-inference', '4-bit', 'gptq', 'region:us'] | "2023-07-18T23:33:13Z" | ['LlamaForCausalLM'] | llama | 64 | 32,000 | 0 | 8,192 | 28,672 | 80 | silu | null | 4,096 | null | 0.00001 | null | null | 2,023 | 7 | 18 |
shenzhi-wang/Llama3.1-8B-Chinese-Chat | 8,159 | 261 | ['transformers', 'safetensors', 'gguf', 'llama', 'text-generation', 'llama-factory', 'orpo', 'conversational', 'en', 'zh', 'arxiv:2403.07691', 'base_model:meta-llama/Llama-3.1-8B-Instruct', 'base_model:quantized:meta-llama/Llama-3.1-8B-Instruct', 'doi:10.57967/hf/2779', 'license:llama3.1', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-07-24T07:28:39Z" | ['LlamaForCausalLM'] | llama | 32 | 128,256 | null | 4,096 | 14,336 | 32 | silu | null | 131,072 | null | 0.00001 | null | null | 2,024 | 7 | 24 |
ai-forever/mGPT | 10,276 | 260 | ['transformers', 'pytorch', 'gpt2', 'text-generation', 'multilingual', 'PyTorch', 'Transformers', 'gpt3', 'Deepspeed', 'Megatron', 'ar', 'he', 'vi', 'id', 'jv', 'ms', 'tl', 'lv', 'lt', 'eu', 'ml', 'ta', 'te', 'hy', 'bn', 'mr', 'hi', 'ur', 'af', 'da', 'en', 'de', 'sv', 'fr', 'it', 'pt', 'ro', 'es', 'el', 'os', 'tg', 'fa', 'ja', 'ka', 'ko', 'th', 'bxr', 'xal', 'mn', 'sw', 'yo', 'be', 'bg', 'ru', 'uk', 'pl', 'my', 'uz', 'ba', 'kk', 'ky', 'tt', 'az', 'cv', 'tr', 'tk', 'tyv', 'sax', 'et', 'fi', 'hu', 'dataset:mc4', 'dataset:wikipedia', 'arxiv:2112.10668', 'arxiv:2204.07580', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2022-04-07T09:13:42Z" | ['GPT2LMHeadModel'] | gpt2 | null | 100,000 | 1 | null | null | null | null | null | null | gelu_new | null | null | null | 2,022 | 4 | 7 |
gradientai/Llama-3-8B-Instruct-262k | 32,477 | 258 | ['transformers', 'safetensors', 'llama', 'text-generation', 'meta', 'llama-3', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2402.08268', 'arxiv:2305.14233', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-04-25T06:24:10Z" | ['LlamaForCausalLM'] | llama | 32 | 128,256 | null | 4,096 | 14,336 | 32 | silu | null | 262,144 | null | 0.00001 | null | null | 2,024 | 4 | 25 |
TheBloke/Mistral-7B-v0.1-GGUF | 16,485 | 257 | ['transformers', 'gguf', 'mistral', 'pretrained', 'text-generation', 'base_model:mistralai/Mistral-7B-v0.1', 'base_model:quantized:mistralai/Mistral-7B-v0.1', 'license:apache-2.0', 'region:us'] | "2023-09-27T16:17:24Z" | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 9 | 27 |
cognitivecomputations/dolphin-2.1-mistral-7b | 6,537 | 256 | ['transformers', 'pytorch', 'safetensors', 'mistral', 'text-generation', 'conversational', 'en', 'dataset:ehartford/dolphin', 'dataset:jondurbin/airoboros-2.2.1', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-10-11T04:42:16Z" | ['MistralForCausalLM'] | mistral | 32 | 32,002 | null | 4,096 | 14,336 | 32 | silu | null | 32,768 | null | 0.00001 | null | null | 2,023 | 10 | 11 |
teknium/OpenHermes-2-Mistral-7B | 6,629 | 255 | ['transformers', 'pytorch', 'mistral', 'text-generation', 'instruct', 'finetune', 'chatml', 'gpt4', 'synthetic data', 'distillation', 'conversational', 'en', 'base_model:mistralai/Mistral-7B-v0.1', 'base_model:finetune:mistralai/Mistral-7B-v0.1', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-10-12T20:07:15Z" | ['MistralForCausalLM'] | mistral | 32 | 32,002 | null | 4,096 | 14,336 | 32 | silu | null | 32,768 | null | 0.00001 | null | null | 2,023 | 10 | 12 |
stabilityai/stablelm-zephyr-3b | 12,312 | 253 | ['transformers', 'safetensors', 'stablelm', 'text-generation', 'causal-lm', 'conversational', 'en', 'dataset:HuggingFaceH4/ultrachat_200k', 'dataset:HuggingFaceH4/ultrafeedback_binarized', 'dataset:meta-math/MetaMathQA', 'dataset:WizardLM/WizardLM_evol_instruct_V2_196k', 'dataset:Intel/orca_dpo_pairs', 'arxiv:2305.18290', 'arxiv:2306.05685', 'license:other', 'model-index', 'autotrain_compatible', 'endpoints_compatible', 'region:us'] | "2023-11-21T16:25:10Z" | ['StableLmForCausalLM'] | stablelm | 32 | 50,304 | null | 2,560 | 6,912 | 32 | silu | 0.00001 | 4,096 | null | null | null | null | 2,023 | 11 | 21 |
Phind/Phind-CodeLlama-34B-Python-v1 | 2,297 | 252 | ['transformers', 'pytorch', 'llama', 'text-generation', 'code llama', 'license:llama2', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-08-25T20:33:09Z" | ['LlamaForCausalLM'] | llama | 64 | 32,000 | null | 8,192 | 22,016 | 48 | silu | null | 16,384 | null | 0.00001 | null | null | 2,023 | 8 | 25 |
openai-community/openai-gpt | 36,144 | 250 | ['transformers', 'pytorch', 'tf', 'rust', 'safetensors', 'openai-gpt', 'text-generation', 'en', 'arxiv:1705.11168', 'arxiv:1803.02324', 'arxiv:1910.09700', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'region:us'] | "2022-03-02T23:29:04Z" | ['OpenAIGPTLMHeadModel'] | openai-gpt | null | 40,478 | null | null | null | null | null | null | null | null | null | null | null | 2,022 | 3 | 2 |
microsoft/biogpt | 48,459 | 245 | ['transformers', 'pytorch', 'biogpt', 'text-generation', 'en', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'region:us'] | "2022-11-20T13:20:45Z" | ['BioGptForCausalLM'] | biogpt | 16 | 42,384 | 1 | 1,024 | 4,096 | 24 | gelu | 0 | 1,024 | null | null | 0.1 | 0.1 | 2,022 | 11 | 20 |
TheBloke/Mistral-7B-OpenOrca-GGUF | 7,884 | 245 | ['transformers', 'gguf', 'mistral', 'text-generation', 'en', 'dataset:Open-Orca/OpenOrca', 'arxiv:2306.02707', 'arxiv:2301.13688', 'base_model:Open-Orca/Mistral-7B-OpenOrca', 'base_model:quantized:Open-Orca/Mistral-7B-OpenOrca', 'license:apache-2.0', 'region:us'] | "2023-10-02T14:27:59Z" | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 10 | 2 |
cyberagent/DeepSeek-R1-Distill-Qwen-32B-Japanese | 5,404 | 244 | ['safetensors', 'qwen2', 'japanese', 'text-generation', 'conversational', 'ja', 'arxiv:2501.12948', 'base_model:deepseek-ai/DeepSeek-R1-Distill-Qwen-32B', 'base_model:finetune:deepseek-ai/DeepSeek-R1-Distill-Qwen-32B', 'license:mit', 'region:us'] | "2025-01-27T06:53:14Z" | ['Qwen2ForCausalLM'] | qwen2 | 40 | 152,064 | null | 5,120 | 27,648 | 64 | silu | null | 131,072 | null | 0.00001 | null | null | 2,025 | 1 | 27 |
KoboldAI/OPT-13B-Erebus | 7,487 | 239 | ['transformers', 'pytorch', 'opt', 'text-generation', 'en', 'arxiv:2205.01068', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'region:us'] | "2022-09-09T09:11:05Z" | ['OPTForCausalLM'] | opt | 40 | 50,265 | 1 | 5,120 | null | 40 | null | null | 2,048 | relu | null | null | null | 2,022 | 9 | 9 |
liuhaotian/llava-v1.6-mistral-7b | 37,461 | 236 | ['transformers', 'safetensors', 'llava_mistral', 'text-generation', 'image-text-to-text', 'conversational', 'license:apache-2.0', 'autotrain_compatible', 'region:us'] | "2024-01-31T04:20:00Z" | ['LlavaMistralForCausalLM'] | llava_mistral | 32 | 32,000 | null | 4,096 | 14,336 | 32 | silu | null | 32,768 | null | 0.00001 | null | null | 2,024 | 1 | 31 |
Qwen/Qwen2.5-32B-Instruct | 512,360 | 236 | ['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-32B', 'base_model:finetune:Qwen/Qwen2.5-32B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-09-17T04:17:55Z" | ['Qwen2ForCausalLM'] | qwen2 | 40 | 152,064 | null | 5,120 | 27,648 | 64 | silu | null | 32,768 | null | 0.000001 | null | null | 2,024 | 9 | 17 |
Qwen/Qwen-VL | 17,955 | 234 | ['transformers', 'pytorch', 'qwen', 'text-generation', 'custom_code', 'zh', 'en', 'arxiv:2308.12966', 'autotrain_compatible', 'region:us'] | "2023-08-18T02:20:59Z" | ['QWenLMHeadModel'] | qwen | 32 | 151,936 | null | 4,096 | 22,016 | 32 | null | null | 8,192 | null | null | null | null | 2,023 | 8 | 18 |
TheBloke/OpenHermes-2.5-Mistral-7B-GGUF | 10,184 | 233 | ['transformers', 'gguf', 'mistral', 'instruct', 'finetune', 'chatml', 'gpt4', 'synthetic data', 'distillation', 'en', 'base_model:teknium/OpenHermes-2.5-Mistral-7B', 'base_model:quantized:teknium/OpenHermes-2.5-Mistral-7B', 'license:apache-2.0', 'region:us'] | "2023-11-02T21:44:04Z" | null | mistral | null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 11 | 2 |
tiiuae/falcon-mamba-7b | 19,124 | 233 | ['transformers', 'safetensors', 'falcon_mamba', 'text-generation', 'en', 'dataset:tiiuae/falcon-refinedweb', 'dataset:HuggingFaceFW/fineweb-edu', 'arxiv:2312.00752', 'arxiv:2410.05355', 'license:other', 'model-index', 'autotrain_compatible', 'endpoints_compatible', 'region:us'] | "2024-07-17T07:06:26Z" | ['FalconMambaForCausalLM'] | falcon_mamba | null | 65,024 | 11 | 4,096 | 8,192 | 64 | silu | null | null | null | null | null | null | 2,024 | 7 | 17 |
mistral-community/Mistral-7B-v0.2 | 31,485 | 232 | ['transformers', 'safetensors', 'mistral', 'text-generation', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-03-23T22:27:12Z" | ['MistralForCausalLM'] | mistral | 32 | 32,000 | null | 4,096 | 14,336 | 32 | silu | null | 32,768 | null | 0.00001 | null | null | 2,024 | 3 | 23 |
Qwen/Qwen2.5-0.5B | 541,351 | 232 | ['transformers', 'safetensors', 'qwen2', 'text-generation', 'conversational', 'en', 'arxiv:2407.10671', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-09-15T12:15:39Z" | ['Qwen2ForCausalLM'] | qwen2 | 14 | 151,936 | null | 896 | 4,864 | 24 | silu | null | 32,768 | null | 0.000001 | null | null | 2,024 | 9 | 15 |
defog/llama-3-sqlcoder-8b | 10,645 | 226 | ['transformers', 'safetensors', 'llama', 'text-generation', 'code', 'conversational', 'license:cc-by-sa-4.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-05-09T15:58:49Z" | ['LlamaForCausalLM'] | llama | 32 | 128,256 | null | 4,096 | 14,336 | 32 | silu | null | 8,192 | null | 0.00001 | null | null | 2,024 | 5 | 9 |
codellama/CodeLlama-7b-Instruct-hf | 43,471 | 225 | ['transformers', 'pytorch', 'safetensors', 'llama', 'text-generation', 'llama-2', 'conversational', 'code', 'arxiv:2308.12950', 'license:llama2', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-08-24T16:33:37Z" | ['LlamaForCausalLM'] | llama | 32 | 32,016 | null | 4,096 | 11,008 | 32 | silu | null | 16,384 | null | 0.00001 | null | null | 2,023 | 8 | 24 |
cognitivecomputations/dolphin-2.8-mistral-7b-v02 | 618 | 224 | ['transformers', 'safetensors', 'mistral', 'text-generation', 'conversational', 'en', 'dataset:cognitivecomputations/dolphin', 'dataset:cognitivecomputations/dolphin-coder', 'dataset:cognitivecomputations/samantha-data', 'dataset:jondurbin/airoboros-2.2.1', 'dataset:teknium/openhermes-2.5', 'dataset:m-a-p/Code-Feedback', 'dataset:m-a-p/CodeFeedback-Filtered-Instruction', 'base_model:mistral-community/Mistral-7B-v0.2', 'base_model:finetune:mistral-community/Mistral-7B-v0.2', 'license:apache-2.0', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-03-28T06:24:16Z" | ['MistralForCausalLM'] | mistral | 32 | 32,002 | null | 4,096 | 14,336 | 32 | silu | null | 32,768 | null | 0.00001 | null | null | 2,024 | 3 | 28 |
TheBloke/Llama-2-7B-GGML | 526 | 220 | ['transformers', 'llama', 'facebook', 'meta', 'pytorch', 'llama-2', 'text-generation', 'en', 'arxiv:2307.09288', 'base_model:meta-llama/Llama-2-7b-hf', 'base_model:finetune:meta-llama/Llama-2-7b-hf', 'license:llama2', 'region:us'] | "2023-07-18T17:06:01Z" | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 7 | 18 |
FlagAlpha/Llama2-Chinese-7b-Chat | 2,623 | 220 | ['transformers', 'pytorch', 'llama', 'text-generation', 'question-answering', 'zh', 'en', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-07-23T10:12:21Z" | ['LlamaForCausalLM'] | llama | 32 | 32,000 | 0 | 4,096 | 11,008 | 32 | silu | null | 4,096 | null | 0.000001 | null | null | 2,023 | 7 | 23 |
bosonai/Higgs-Llama-3-70B | 281 | 220 | ['transformers', 'safetensors', 'llama', 'text-generation', 'conversational', 'base_model:meta-llama/Meta-Llama-3-70B', 'base_model:finetune:meta-llama/Meta-Llama-3-70B', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-06-05T20:10:04Z" | ['LlamaForCausalLM'] | llama | 64 | 128,256 | null | 8,192 | 28,672 | 80 | silu | null | 8,192 | null | 0.00001 | null | null | 2,024 | 6 | 5 |
TheBloke/stable-vicuna-13B-GPTQ | 167 | 219 | ['transformers', 'safetensors', 'llama', 'text-generation', 'causal-lm', 'en', 'dataset:OpenAssistant/oasst1', 'dataset:nomic-ai/gpt4all_prompt_generations', 'dataset:tatsu-lab/alpaca', 'arxiv:2302.13971', 'license:cc-by-nc-sa-4.0', 'autotrain_compatible', 'text-generation-inference', '4-bit', 'gptq', 'region:us'] | "2023-04-28T21:14:29Z" | ['LlamaForCausalLM'] | llama | 40 | 32,001 | 0 | 5,120 | 13,824 | 40 | silu | null | 2,048 | null | 0.000001 | null | null | 2,023 | 4 | 28 |
Qwen/Qwen1.5-72B-Chat | 2,340 | 218 | ['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2309.16609', 'base_model:Qwen/Qwen1.5-72B', 'base_model:finetune:Qwen/Qwen1.5-72B', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-01-30T17:20:46Z" | ['Qwen2ForCausalLM'] | qwen2 | 64 | 152,064 | null | 8,192 | 24,576 | 80 | silu | null | 32,768 | null | 0.000001 | null | null | 2,024 | 1 | 30 |
microsoft/Phi-3-medium-4k-instruct | 29,698 | 217 | ['transformers', 'safetensors', 'phi3', 'text-generation', 'nlp', 'code', 'conversational', 'custom_code', 'multilingual', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-05-07T15:27:19Z" | ['Phi3ForCausalLM'] | phi3 | 40 | 32,064 | 32,000 | 5,120 | 17,920 | 40 | silu | null | 4,096 | null | 0.00001 | null | null | 2,024 | 5 | 7 |
teknium/Mistral-Trismegistus-7B | 203 | 214 | ['transformers', 'pytorch', 'mistral', 'text-generation', 'mistral-7b', 'instruct', 'finetune', 'gpt4', 'synthetic data', 'distillation', 'en', 'base_model:mistralai/Mistral-7B-v0.1', 'base_model:finetune:mistralai/Mistral-7B-v0.1', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-10-07T00:21:46Z" | ['MistralForCausalLM'] | mistral | 32 | 32,000 | null | 4,096 | 14,336 | 32 | silu | null | 32,768 | null | 0.00001 | null | null | 2,023 | 10 | 7 |
Qwen/Qwen2.5-3B-Instruct | 651,558 | 214 | ['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-3B', 'base_model:finetune:Qwen/Qwen2.5-3B', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-09-17T14:08:52Z" | ['Qwen2ForCausalLM'] | qwen2 | 16 | 151,936 | null | 2,048 | 11,008 | 36 | silu | null | 32,768 | null | 0.000001 | null | null | 2,024 | 9 | 17 |
McGill-NLP/Llama-3-8B-Web | 286 | 213 | ['transformers', 'pytorch', 'safetensors', 'llama', 'text-generation', 'agents', 'agent', 'llm', 'conversational', 'en', 'dataset:McGill-NLP/WebLINX', 'arxiv:2402.05930', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-04-22T20:48:36Z" | ['LlamaForCausalLM'] | llama | 32 | 128,256 | null | 4,096 | 14,336 | 32 | silu | null | 8,192 | null | 0.00001 | null | null | 2,024 | 4 | 22 |
mlabonne/phixtral-4x2_8 | 227 | 212 | ['transformers', 'safetensors', 'phi-msft', 'text-generation', 'moe', 'nlp', 'code', 'cognitivecomputations/dolphin-2_6-phi-2', 'lxuechen/phi-2-dpo', 'Yhyu13/phi-2-sft-dpo-gpt4_en-ep1', 'mrm8488/phi-2-coder', 'conversational', 'custom_code', 'en', 'license:mit', 'autotrain_compatible', 'region:us'] | "2024-01-08T00:05:45Z" | ['PhiForCausalLM'] | phi-msft | null | 51,200 | null | null | null | null | null | null | null | gelu_new | null | null | null | 2,024 | 1 | 8 |
tiiuae/falcon-11B | 27,361 | 212 | ['transformers', 'safetensors', 'falcon', 'text-generation', 'conversational', 'custom_code', 'en', 'de', 'es', 'fr', 'it', 'nl', 'pl', 'pt', 'ro', 'cs', 'dataset:tiiuae/falcon-refinedweb', 'arxiv:2407.14885', 'arxiv:2005.14165', 'arxiv:2104.09864', 'arxiv:1911.02150', 'arxiv:2307.08691', 'arxiv:2311.16867', 'license:unknown', 'autotrain_compatible', 'text-generation-inference', 'region:us'] | "2024-05-09T08:11:59Z" | ['FalconForCausalLM'] | falcon | 32 | 65,024 | null | 4,096 | null | 60 | null | null | 8,192 | null | null | null | null | 2,024 | 5 | 9 |
microsoft/phi-1 | 7,298 | 211 | ['transformers', 'safetensors', 'phi', 'text-generation', 'code', 'en', 'arxiv:2306.11644', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-09-10T04:10:57Z" | ['PhiForCausalLM'] | phi | 32 | 51,200 | null | 2,048 | 8,192 | 24 | gelu_new | 0.00001 | 2,048 | null | null | null | null | 2,023 | 9 | 10 |
stabilityai/stablelm-base-alpha-7b | 2,655 | 209 | ['transformers', 'pytorch', 'gpt_neox', 'text-generation', 'causal-lm', 'en', 'license:cc-by-sa-4.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-04-11T05:55:14Z" | ['GPTNeoXForCausalLM'] | gpt_neox | 48 | 50,432 | null | 6,144 | 24,576 | 16 | gelu | 0.00001 | 4,096 | null | null | null | null | 2,023 | 4 | 11 |
cognitivecomputations/dolphin-2.6-mixtral-8x7b | 2,285 | 209 | ['transformers', 'pytorch', 'mixtral', 'text-generation', 'conversational', 'en', 'dataset:ehartford/dolphin', 'dataset:jondurbin/airoboros-2.2.1', 'dataset:ehartford/dolphin-coder', 'dataset:teknium/openhermes', 'dataset:ise-uiuc/Magicoder-OSS-Instruct-75K', 'dataset:ise-uiuc/Magicoder-Evol-Instruct-110K', 'dataset:LDJnr/Capybara', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2023-12-21T22:34:09Z" | ['MixtralForCausalLM'] | mixtral | 32 | 32,002 | null | 4,096 | 14,336 | 32 | silu | null | 32,768 | null | 0.00001 | null | null | 2,023 | 12 | 21 |
TheBloke/Vicuna-13B-1.1-GPTQ | 361 | 208 | ['transformers', 'llama', 'text-generation', 'conversational', 'license:other', 'autotrain_compatible', 'region:us'] | "2023-04-12T22:01:51Z" | ['LlamaForCausalLM'] | llama | 40 | 32,000 | 0 | 5,120 | 13,824 | 40 | silu | null | 2,048 | null | 0.000001 | null | null | 2,023 | 4 | 12 |
Qwen/Qwen2.5-14B-Instruct | 500,909 | 208 | ['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-14B', 'base_model:finetune:Qwen/Qwen2.5-14B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-09-16T11:56:10Z" | ['Qwen2ForCausalLM'] | qwen2 | 40 | 152,064 | null | 5,120 | 13,824 | 48 | silu | null | 32,768 | null | 0.000001 | null | null | 2,024 | 9 | 16 |
Qwen/Qwen-14B | 7,004 | 207 | ['transformers', 'safetensors', 'qwen', 'text-generation', 'custom_code', 'zh', 'en', 'arxiv:2309.16609', 'autotrain_compatible', 'region:us'] | "2023-09-24T03:28:41Z" | ['QWenLMHeadModel'] | qwen | 40 | 152,064 | null | 5,120 | 27,392 | 40 | null | null | 8,192 | null | null | null | null | 2,023 | 9 | 24 |
codellama/CodeLlama-70b-Instruct-hf | 6,341 | 207 | ['transformers', 'pytorch', 'safetensors', 'llama', 'text-generation', 'llama-2', 'conversational', 'code', 'arxiv:2308.12950', 'license:llama2', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-01-29T11:00:04Z" | ['LlamaForCausalLM'] | llama | 64 | 32,016 | null | 8,192 | 28,672 | 80 | silu | null | 4,096 | null | 0.00001 | null | null | 2,024 | 1 | 29 |
NousResearch/Hermes-3-Llama-3.1-405B | 1,302 | 207 | ['transformers', 'safetensors', 'llama', 'text-generation', 'Llama-3', 'instruct', 'finetune', 'chatml', 'gpt4', 'synthetic data', 'distillation', 'function calling', 'json mode', 'axolotl', 'roleplaying', 'chat', 'conversational', 'en', 'arxiv:2408.11857', 'base_model:meta-llama/Llama-3.1-405B', 'base_model:finetune:meta-llama/Llama-3.1-405B', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-08-13T04:57:53Z" | ['LlamaForCausalLM'] | llama | 128 | 128,256 | null | 16,384 | 53,248 | 126 | silu | null | 131,072 | null | 0.00001 | null | null | 2,024 | 8 | 13 |
Orenguteng/Llama-3-8B-Lexi-Uncensored | 27,007 | 205 | ['transformers', 'safetensors', 'llama', 'text-generation', 'uncensored', 'llama3', 'instruct', 'open', 'conversational', 'license:llama3', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-04-23T21:14:40Z" | ['LlamaForCausalLM'] | llama | 32 | 128,256 | null | 4,096 | 14,336 | 32 | silu | null | 8,192 | null | 0.00001 | null | null | 2,024 | 4 | 23 |
huihui-ai/DeepSeek-R1-Distill-Qwen-32B-abliterated | 5,369 | 204 | ['transformers', 'safetensors', 'qwen2', 'text-generation', 'abliterated', 'uncensored', 'conversational', 'base_model:deepseek-ai/DeepSeek-R1-Distill-Qwen-32B', 'base_model:finetune:deepseek-ai/DeepSeek-R1-Distill-Qwen-32B', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2025-01-22T12:43:29Z" | ['Qwen2ForCausalLM'] | qwen2 | 40 | 152,064 | null | 5,120 | 27,648 | 64 | silu | null | 131,072 | null | 0.00001 | null | null | 2,025 | 1 | 22 |
bigscience/bloom-7b1 | 22,817 | 203 | ['transformers', 'pytorch', 'jax', 'safetensors', 'bloom', 'text-generation', 'ak', 'ar', 'as', 'bm', 'bn', 'ca', 'code', 'en', 'es', 'eu', 'fon', 'fr', 'gu', 'hi', 'id', 'ig', 'ki', 'kn', 'lg', 'ln', 'ml', 'mr', 'ne', 'nso', 'ny', 'or', 'pa', 'pt', 'rn', 'rw', 'sn', 'st', 'sw', 'ta', 'te', 'tn', 'ts', 'tum', 'tw', 'ur', 'vi', 'wo', 'xh', 'yo', 'zh', 'zhs', 'zht', 'zu', 'arxiv:1909.08053', 'arxiv:2110.02861', 'arxiv:2108.12409', 'doi:10.57967/hf/2655', 'license:bigscience-bloom-rail-1.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2022-05-19T11:53:18Z" | ['BloomForCausalLM'] | bloom | null | 250,880 | 3 | 4,096 | null | null | null | null | null | null | null | null | null | 2,022 | 5 | 19 |
mosaicml/mpt-30b-chat | 3,066 | 203 | ['transformers', 'pytorch', 'mpt', 'text-generation', 'Composer', 'MosaicML', 'llm-foundry', 'conversational', 'custom_code', 'dataset:camel-ai/code', 'dataset:ehartford/wizard_vicuna_70k_unfiltered', 'dataset:anon8231489123/ShareGPT_Vicuna_unfiltered', 'dataset:timdettmers/openassistant-guanaco', 'dataset:camel-ai/math', 'dataset:camel-ai/biology', 'dataset:camel-ai/chemistry', 'dataset:camel-ai/ai_society', 'dataset:jondurbin/airoboros-gpt4-1.2', 'dataset:LongConversations', 'dataset:camel-ai/physics', 'arxiv:2205.14135', 'arxiv:2108.12409', 'arxiv:2010.04245', 'license:cc-by-nc-sa-4.0', 'autotrain_compatible', 'text-generation-inference', 'region:us'] | "2023-06-09T20:01:17Z" | ['MPTForCausalLM'] | mpt | null | 50,432 | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 6 | 9 |
NousResearch/Hermes-2-Theta-Llama-3-8B | 8,804 | 201 | ['transformers', 'safetensors', 'llama', 'text-generation', 'Llama-3', 'instruct', 'finetune', 'chatml', 'DPO', 'RLHF', 'gpt4', 'synthetic data', 'distillation', 'function calling', 'json mode', 'axolotl', 'merges', 'conversational', 'en', 'dataset:teknium/OpenHermes-2.5', 'base_model:NousResearch/Hermes-2-Pro-Llama-3-8B', 'base_model:finetune:NousResearch/Hermes-2-Pro-Llama-3-8B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us'] | "2024-05-05T09:14:29Z" | ['LlamaForCausalLM'] | llama | 32 | 128,256 | null | 4,096 | 14,336 | 32 | silu | null | 8,192 | null | 0.00001 | null | null | 2,024 | 5 | 5 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.