Ali2206 commited on
Commit
095998d
·
verified ·
1 Parent(s): 7e55ae2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -11
app.py CHANGED
@@ -9,7 +9,7 @@ import gc
9
  import re
10
  import torch
11
  from typing import List, Tuple, Dict
12
- from concurrent.futures import ThreadPoolExecutor
13
 
14
  # Directories
15
  persistent_dir = "/data/hf_cache"
@@ -100,18 +100,21 @@ def init_agent() -> TxAgent:
100
  agent.init_model()
101
  return agent
102
 
103
- def analyze_batches(agent, batches: List[List[str]]) -> List[str]:
 
 
104
  results = []
105
- for batch in batches:
106
- prompt = "\n\n".join(build_prompt(c) for c in batch)
 
 
107
  try:
108
- response = ""
109
  for r in agent.run_gradio_chat(
110
  message=prompt,
111
  history=[],
112
  temperature=0.0,
113
- max_new_tokens=MAX_NEW_TOKENS,
114
- max_token=MAX_MODEL_TOKENS,
115
  call_agent=False,
116
  conversation=[]
117
  ):
@@ -123,11 +126,15 @@ def analyze_batches(agent, batches: List[List[str]]) -> List[str]:
123
  response += m.content
124
  elif hasattr(r, "content"):
125
  response += r.content
126
- results.append(clean_response(response))
127
  except Exception as e:
128
- results.append(f"❌ Error: {str(e)}")
129
- torch.cuda.empty_cache()
130
- gc.collect()
 
 
 
 
 
131
  return results
132
 
133
  def generate_final_summary(agent, combined: str) -> str:
 
9
  import re
10
  import torch
11
  from typing import List, Tuple, Dict
12
+ from concurrent.futures import ThreadPoolExecutor, as_completed
13
 
14
  # Directories
15
  persistent_dir = "/data/hf_cache"
 
100
  agent.init_model()
101
  return agent
102
 
103
+
104
+
105
+ def analyze_batches(agent, batches: List[List[str]], max_workers: int = 3) -> List[str]:
106
  results = []
107
+
108
+ def process_single_batch(batch):
109
+ prompt = "\n\n".join(build_prompt(chunk) for chunk in batch)
110
+ response = ""
111
  try:
 
112
  for r in agent.run_gradio_chat(
113
  message=prompt,
114
  history=[],
115
  temperature=0.0,
116
+ max_new_tokens=4096,
117
+ max_token=131072,
118
  call_agent=False,
119
  conversation=[]
120
  ):
 
126
  response += m.content
127
  elif hasattr(r, "content"):
128
  response += r.content
 
129
  except Exception as e:
130
+ response = f"❌ Error: {str(e)}"
131
+ return clean_response(response)
132
+
133
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
134
+ futures = [executor.submit(process_single_batch, batch) for batch in batches]
135
+ for future in as_completed(futures):
136
+ results.append(future.result())
137
+
138
  return results
139
 
140
  def generate_final_summary(agent, combined: str) -> str: