from transformers import pipeline from huggingface_hub import login # OPTIONAL: Authenticate with your Hugging Face API token (if needed) # login("your_huggingface_api_token_here") # Load the Mistral-7B-Instruct model model_name = "mistralai/Mistral-7B-Instruct" generator = pipeline("text-generation", model=model_name, device="cpu") # Change "cpu" to "cuda" if using a GPU def generate_response(prompt): response = generator(prompt, max_length=200) return response[0]["generated_text"] # Example Usage if __name__ == "__main__": user_prompt = "How can I reuse a plastic bottle?" print("AI Suggestion:", generate_response(user_prompt))