initial commit
Browse files
app.py
CHANGED
@@ -3,6 +3,9 @@ import gradio as gr
|
|
3 |
import transformers
|
4 |
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
|
5 |
import torch
|
|
|
|
|
|
|
6 |
|
7 |
title = """
|
8 |
# Welcome to 馃専Tonic's馃Command-A
|
@@ -11,8 +14,8 @@ Join us : 馃専TeamTonic馃専 is always making cool demos! Join our active builder
|
|
11 |
"""
|
12 |
|
13 |
model_id = "Tonic/c4ai-command-a-03-2025-4bit_fp4"
|
14 |
-
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
15 |
-
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", torch_dtype=torch.bfloat16)
|
16 |
|
17 |
|
18 |
@spaces.GPU
|
|
|
3 |
import transformers
|
4 |
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
|
5 |
import torch
|
6 |
+
import os
|
7 |
+
|
8 |
+
HF_TOKEN = os.environ.get("HF_TOKEN") # Make sure to set this in your Space secrets
|
9 |
|
10 |
title = """
|
11 |
# Welcome to 馃専Tonic's馃Command-A
|
|
|
14 |
"""
|
15 |
|
16 |
model_id = "Tonic/c4ai-command-a-03-2025-4bit_fp4"
|
17 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id, token=HF_TOKEN) )
|
18 |
+
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", torch_dtype=torch.bfloat16, token=HF_TOKEN)
|
19 |
|
20 |
|
21 |
@spaces.GPU
|