tferhan commited on
Commit
c43af2c
·
verified ·
1 Parent(s): 06f21ae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -1
app.py CHANGED
@@ -16,6 +16,7 @@ import chromadb
16
  from unidecode import unidecode
17
 
18
  from transformers import AutoTokenizer
 
19
  import transformers
20
  import torch
21
  import tqdm
@@ -96,6 +97,9 @@ def initialize_llmchain(temperature, max_tokens, top_k, vector_db):
96
 
97
  qa = initialize_llmchain(0.6, 1024, 40, vec_cre) #The model question answer
98
 
 
 
 
99
  def format_chat_history(message, chat_history):
100
  formatted_chat_history = []
101
  for user_message, bot_message in chat_history:
@@ -119,8 +123,13 @@ def conversation(message, history):
119
  response_source1_page = response_sources[0].metadata["page"] + 1
120
  response_source2_page = response_sources[1].metadata["page"] + 1
121
  response_source3_page = response_sources[2].metadata["page"] + 1
 
 
 
 
 
 
122
 
123
- return response_answer
124
 
125
 
126
 
 
16
  from unidecode import unidecode
17
 
18
  from transformers import AutoTokenizer
19
+ from transformers import pipeline
20
  import transformers
21
  import torch
22
  import tqdm
 
97
 
98
  qa = initialize_llmchain(0.6, 1024, 40, vec_cre) #The model question answer
99
 
100
+ pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-fr") # This pipeline translate english to french , it isn't adviced as it add more latency
101
+
102
+
103
  def format_chat_history(message, chat_history):
104
  formatted_chat_history = []
105
  for user_message, bot_message in chat_history:
 
123
  response_source1_page = response_sources[0].metadata["page"] + 1
124
  response_source2_page = response_sources[1].metadata["page"] + 1
125
  response_source3_page = response_sources[2].metadata["page"] + 1
126
+ #If you want the return in english leave it at :
127
+ #return response_answer
128
+
129
+ #If you want the return in french
130
+ return pipe(response_answer)[0]['translation_text'] + " (Traduis d'anglais en français)"
131
+
132
 
 
133
 
134
 
135