import random | |
import gradio as gr | |
from transformers import AutoModelForCausalLM | |
def LMmodel(message, history): | |
base_model_id="LeoLM/leo-mistral-hessianai-7b-chat" | |
base_model = AutoModelForCausalLM.from_pretrained(base_model_id, device_map="auto",trust_remote_code=True) | |
return "works" | |
gr.ChatInterface(LMmodel).launch() |