File size: 1,271 Bytes
e62171b
 
1ecbc96
 
 
 
e62171b
 
 
 
 
 
 
 
 
1ecbc96
 
e62171b
1ecbc96
 
 
 
 
 
 
e62171b
1ecbc96
 
e62171b
1ecbc96
 
 
 
e62171b
1ecbc96
e62171b
 
 
 
 
1ecbc96
 
e62171b
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import os
from huggingface_hub import login
import torch
from transformers import pipeline
import gradio as gr

# Set the Hugging Face token from the environment variable
hf_token = os.getenv("HF_TOKEN")
if hf_token is None:
    raise ValueError("Hugging Face token is not set in the environment variable.")

# Log in to Hugging Face with the token
login(token=hf_token)

# Define the model ID
model_id = "meta-llama/Llama-3.2-1B-Instruct"

# Load the pipeline with the model
pipe = pipeline(
    "text-classification", 
    model=model_id, 
    torch_dtype=torch.bfloat16, 
    device_map="auto"
)

# Define custom labels for classification
pipe.model.config.id2label = {0: 'greeting', 1: 'farewell', 2: 'other'}

# Function to classify input text
def classify_text(text):
    result = pipe(text)
    return result[0]['label']

# Create Gradio interface
iface = gr.Interface(
    fn=classify_text,  # Function to be called
    inputs=gr.Textbox(label="Enter Text"),  # Textbox input for user
    outputs=gr.Label(label="Classification"),  # Output label showing classification
    title="Text Classifier",  # Title of the app
    description="Classify your text as 'greeting', 'farewell', or 'other'."  # Description of the app
)

# Launch the Gradio app
iface.launch()