# groq_llm.py import openai import os openai.api_key = os.getenv("GROQ_API_KEY") def get_groq_response(user_input, system_prompt, language="English"): messages = [ {"role": "system", "content": f"You are an AI assistant. Respond in {language}. {system_prompt}"}, {"role": "user", "content": user_input} ] response = openai.ChatCompletion.create( model="llama3-70b-8192", messages=messages, temperature=0.7 ) return response.choices[0].message.content.strip()