File size: 660 Bytes
b828125
70345d6
b828125
70345d6
 
 
 
 
 
b828125
 
 
 
 
70345d6
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
from transformers import pipeline
from huggingface_hub import login

# OPTIONAL: Authenticate with your Hugging Face API token (if needed)
# login("your_huggingface_api_token_here")  

# Load the Mistral-7B-Instruct model
model_name = "mistralai/Mistral-7B-Instruct"
generator = pipeline("text-generation", model=model_name, device="cpu")  # Change "cpu" to "cuda" if using a GPU

def generate_response(prompt):
    response = generator(prompt, max_length=200)
    return response[0]["generated_text"]

# Example Usage
if __name__ == "__main__":
    user_prompt = "How can I reuse a plastic bottle?"
    print("AI Suggestion:", generate_response(user_prompt))