jayebaku commited on
Commit
bdc1c30
·
verified ·
1 Parent(s): 4da2d54

Update qa_summary.py

Browse files
Files changed (1) hide show
  1. qa_summary.py +5 -5
qa_summary.py CHANGED
@@ -3,7 +3,7 @@ import torch
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
5
  @spaces.GPU(duration=60)
6
- def generate_answer(llm_name, texts, query, queries, mode='validate'):
7
 
8
  if llm_name == 'solar':
9
  tokenizer = AutoTokenizer.from_pretrained("Upstage/SOLAR-10.7B-Instruct-v1.0", use_fast=True)
@@ -17,7 +17,7 @@ def generate_answer(llm_name, texts, query, queries, mode='validate'):
17
  tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.2", use_fast=True)
18
  llm_model = AutoModelForCausalLM.from_pretrained(
19
  "mistralai/Mistral-7B-Instruct-v0.2",
20
- #device_map="auto",
21
  device_map="cuda",
22
  torch_dtype=torch.float16,
23
  )
@@ -28,7 +28,7 @@ def generate_answer(llm_name, texts, query, queries, mode='validate'):
28
  "microsoft/Phi-3-mini-128k-instruct",
29
  device_map="auto",
30
  torch_dtype="auto",
31
- trust_remote_code=False,
32
  )
33
 
34
  template_texts =""
@@ -42,8 +42,8 @@ def generate_answer(llm_name, texts, query, queries, mode='validate'):
42
  elif mode == 'h_summarize':
43
  conversation = [ {'role': 'user', 'content': f'The documents below describe a developing disaster event. Based on these documents, write a brief summary in the form of a paragraph, highlighting the most crucial information. \nDocuments: {template_texts}'} ]
44
  elif mode == "multi_summarize":
45
- # conversation = [ {'role': 'user', 'content': f'For the following queries and documents, try to answer the given queries based on the documents. Also, return the top 5 unaltered documents that answer the queries.\nQueries: {queries} \nDocuments: {template_texts}.'} ]
46
- conversation = [ {'role': 'user', 'content': f'For the following queries and documents, in a brief paragraph try to answer the given queries based on the documents. Then, return the top 5 documents as provided that answer the queries.\nQueries: {queries} \nDocuments: {template_texts}.'} ]
47
 
48
 
49
  prompt = tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True)
 
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
5
  @spaces.GPU(duration=60)
6
+ def generate_answer(llm_name, texts, query, queries, response_lang, mode='validate'):
7
 
8
  if llm_name == 'solar':
9
  tokenizer = AutoTokenizer.from_pretrained("Upstage/SOLAR-10.7B-Instruct-v1.0", use_fast=True)
 
17
  tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.2", use_fast=True)
18
  llm_model = AutoModelForCausalLM.from_pretrained(
19
  "mistralai/Mistral-7B-Instruct-v0.2",
20
+ # device_map="auto",
21
  device_map="cuda",
22
  torch_dtype=torch.float16,
23
  )
 
28
  "microsoft/Phi-3-mini-128k-instruct",
29
  device_map="auto",
30
  torch_dtype="auto",
31
+ trust_remote_code=True,
32
  )
33
 
34
  template_texts =""
 
42
  elif mode == 'h_summarize':
43
  conversation = [ {'role': 'user', 'content': f'The documents below describe a developing disaster event. Based on these documents, write a brief summary in the form of a paragraph, highlighting the most crucial information. \nDocuments: {template_texts}'} ]
44
  elif mode == "multi_summarize":
45
+ conversation = [ {'role': 'user', 'content': f"""For the following queries and documents, in a brief paragraph try to answer the given queries based on the documents.
46
+ Then, return the top 5 documents as provided that answer the queries.\nQueries: {queries} \nDocuments: {template_texts}. Give your response in {response_lang} language"""} ]
47
 
48
 
49
  prompt = tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True)