File size: 2,041 Bytes
0e78cbf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import os 
from getpass import getpass 
from langchain_groq import ChatGroq
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_openai import AzureChatOpenAI
from langchain_community.llms import Ollama
from langchain_openai.chat_models.base import BaseChatOpenAI

def azure_openai_service(key,max_retries=3):
    os.environ["AZURE_OPENAI_API_KEY"] = key
    os.environ["AZURE_OPENAI_ENDPOINT"] = "https://indus.api.michelin.com/openai-key-weu"
    model = AzureChatOpenAI(
    azure_deployment="gpt-4o",  # or your deployment
    api_version="2023-06-01-preview",  # or your api version
    temperature=0,
    max_tokens=None,
    timeout=None,
    max_retries=max_retries)
    return model

def get_ollama():
    ## terminal --> ollama start
    llm = Ollama(base_url="http://localhost:11434", model="mistral")
    return llm 

def get_googleGemini(key):
    os.environ["GOOGLE_API_KEY"] = key
    llm = ChatGoogleGenerativeAI(
        model="gemini-1.5-pro",
        temperature=0,
        max_tokens=None,
        timeout=None,
        max_retries=2)
    return llm

def get_groq_model(key,model_name = "gemma2-9b-it"):
    os.environ["GROQ_API_KEY"] = key
    llm_groq = ChatGroq(model=model_name)
    return llm_groq


def get_llm(option,key):
    llm = None 
    if option =='deepseek-r1-distill-llama-70b':
        llm = get_groq_model(key,model_name = "deepseek-r1-distill-llama-70b")
    elif option =='gemma2-9b-it':
        llm = get_groq_model(key,model_name="gemma2-9b-it")
    elif option == 'llama-3.2-3b-preview':
        llm  = get_groq_model(key,model_name="llama-3.2-3b-preview")
    elif option == 'llama-3.2-1b-preview':
        llm = get_groq_model(key,model_name="llama-3.2-1b-preview")
    elif option == 'llama3-8b-8192':
        llm = get_groq_model(key,model_name="llama3-8b-8192")
    elif option == 'Openai':
        llm = azure_openai_service(key)
    elif option == 'Google':
        llm = get_googleGemini(key)    
    elif option == "Ollama" :
        llm = get_ollama()
    return llm