Nac31 commited on
Commit
7139070
·
1 Parent(s): 494b259
Files changed (2) hide show
  1. app.py +14 -15
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,25 +1,24 @@
1
  import gradio as gr
2
- from transformers import AutoTokenizer, pipeline
 
 
3
 
4
- def initialize_model():
5
- tokenizer = AutoTokenizer.from_pretrained("Nac31/Sacha-Mistral-0", trust_remote_code=True)
6
- pipe = pipeline(
7
- "text-generation",
8
- model="Nac31/Sacha-Mistral-0",
9
- tokenizer=tokenizer,
10
- trust_remote_code=True
11
- )
12
- return pipe
13
 
14
- def generate_response(message, temperature=0.7, max_length=500):
 
 
 
 
 
 
15
  try:
16
- pipe = initialize_model()
17
- response = pipe(
18
  message,
19
- max_length=max_length,
20
  temperature=temperature,
21
  do_sample=True
22
- )[0]['generated_text']
23
  return response
24
  except Exception as e:
25
  return f"Une erreur s'est produite : {str(e)}"
 
1
  import gradio as gr
2
+ from huggingface_hub import InferenceClient
3
+ import os
4
+ from dotenv import load_dotenv
5
 
6
+ load_dotenv()
 
 
 
 
 
 
 
 
7
 
8
+ # Initialiser le client
9
+ client = InferenceClient(
10
+ model="Nac31/Sacha-Mistral-0",
11
+ token=os.getenv("HF_TOKEN") # Votre token HF
12
+ )
13
+
14
+ def generate_response(message, temperature=0.7):
15
  try:
16
+ response = client.text_generation(
 
17
  message,
18
+ max_new_tokens=500,
19
  temperature=temperature,
20
  do_sample=True
21
+ )
22
  return response
23
  except Exception as e:
24
  return f"Une erreur s'est produite : {str(e)}"
requirements.txt CHANGED
@@ -9,4 +9,5 @@ bitsandbytes
9
  openai
10
  langchain
11
  python-dotenv
12
- langchain-community
 
 
9
  openai
10
  langchain
11
  python-dotenv
12
+ langchain-community
13
+ huggingface_hub