Ali2206 commited on
Commit
585f453
Β·
verified Β·
1 Parent(s): f719cf7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +123 -52
app.py CHANGED
@@ -126,20 +126,63 @@ def init_agent():
126
  return agent
127
 
128
 
129
- def stream_final_report(agent, file) -> Generator[Tuple[List[Dict[str, str]], str], None, None]:
 
 
 
 
130
  if file is None or not hasattr(file, "name"):
131
- yield ([{"role": "assistant", "content": "❌ Please upload a valid Excel file before analyzing."}], "")
 
132
  return
133
 
134
- extracted_text = extract_text_from_excel(file.name)
135
- chunks = split_text_into_chunks(extracted_text)
136
- chunk_responses = []
137
-
138
- for chunk in chunks:
139
- prompt = build_prompt_from_text(chunk)
140
- response = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
  for result in agent.run_gradio_chat(
142
- message=prompt,
143
  history=[],
144
  temperature=0.2,
145
  max_new_tokens=MAX_NEW_TOKENS,
@@ -148,57 +191,84 @@ def stream_final_report(agent, file) -> Generator[Tuple[List[Dict[str, str]], st
148
  conversation=[],
149
  ):
150
  if isinstance(result, str):
151
- response += result
152
  elif hasattr(result, "content"):
153
- response += result.content
154
  elif isinstance(result, list):
155
  for r in result:
156
  if hasattr(r, "content"):
157
- response += r.content
158
- chunk_responses.append(clean_response(response))
159
-
160
- final_prompt = "\n\n".join(chunk_responses) + "\n\nSummarize the key findings above."
161
- messages = [{"role": "user", "content": f"[Excel Uploaded: {file.name}]"}]
162
- stream_text = ""
163
- for result in agent.run_gradio_chat(
164
- message=final_prompt,
165
- history=[],
166
- temperature=0.2,
167
- max_new_tokens=MAX_NEW_TOKENS,
168
- max_token=MAX_TOKENS,
169
- call_agent=False,
170
- conversation=[],
171
- ):
172
- if isinstance(result, str):
173
- stream_text += result
174
- elif hasattr(result, "content"):
175
- stream_text += result.content
176
- elif isinstance(result, list):
177
- for r in result:
178
- if hasattr(r, "content"):
179
- stream_text += r.content
180
- messages.append({"role": "assistant", "content": clean_response(stream_text)})
181
- yield (messages, None)
182
-
183
- final_report = f"# \U0001f9e0 Final Patient Report\n\n{clean_response(stream_text)}"
184
- report_path = os.path.join(report_dir, f"report_{datetime.now().strftime('%Y%m%d_%H%M%S')}.md")
185
- with open(report_path, 'w') as f:
186
- f.write(final_report)
187
-
188
- messages.append({"role": "assistant", "content": final_report})
189
- yield (messages, report_path)
190
 
191
  def create_ui(agent):
192
- with gr.Blocks(title="Patient History Chat") as demo:
193
- chatbot = gr.Chatbot(label="Clinical Assistant", show_copy_button=True, type="messages")
194
- file_upload = gr.File(label="Upload Excel File", file_types=[".xlsx"])
195
- analyze_btn = gr.Button("🧠 Analyze Patient History")
196
- report_output = gr.File(label="Download Report")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
197
 
198
  analyze_btn.click(
199
  fn=lambda file: stream_final_report(agent, file),
200
  inputs=[file_upload],
201
- outputs=[chatbot, report_output]
 
 
 
 
 
 
 
 
 
 
 
202
  )
203
 
204
  return demo
@@ -212,7 +282,8 @@ if __name__ == "__main__":
212
  server_name="0.0.0.0",
213
  server_port=7860,
214
  show_error=True,
215
- allowed_paths=["/data/hf_cache/reports"]
 
216
  )
217
  except Exception as e:
218
  print(f"Error: {str(e)}")
 
126
  return agent
127
 
128
 
129
+ def stream_final_report(agent, file) -> Generator[Tuple[List[Dict[str, str]], Union[str, None]], None, None]:
130
+ # Initialize with empty values
131
+ messages = []
132
+ report_path = None
133
+
134
  if file is None or not hasattr(file, "name"):
135
+ messages = [{"role": "assistant", "content": "❌ Please upload a valid Excel file before analyzing."}]
136
+ yield messages, None
137
  return
138
 
139
+ try:
140
+ # Initial processing message
141
+ messages = [{"role": "user", "content": f"Processing Excel file: {file.name}"},
142
+ {"role": "assistant", "content": "⏳ Extracting and analyzing data..."}]
143
+ yield messages, None
144
+
145
+ extracted_text = extract_text_from_excel(file.name)
146
+ chunks = split_text_into_chunks(extracted_text)
147
+ chunk_responses = []
148
+
149
+ # Process each chunk
150
+ for i, chunk in enumerate(chunks):
151
+ messages.append({"role": "assistant", "content": f"πŸ” Analyzing chunk {i+1}/{len(chunks)}..."})
152
+ yield messages, None
153
+
154
+ prompt = build_prompt_from_text(chunk)
155
+ response = ""
156
+ for result in agent.run_gradio_chat(
157
+ message=prompt,
158
+ history=[],
159
+ temperature=0.2,
160
+ max_new_tokens=MAX_NEW_TOKENS,
161
+ max_token=MAX_TOKENS,
162
+ call_agent=False,
163
+ conversation=[],
164
+ ):
165
+ if isinstance(result, str):
166
+ response += result
167
+ elif hasattr(result, "content"):
168
+ response += result.content
169
+ elif isinstance(result, list):
170
+ for r in result:
171
+ if hasattr(r, "content"):
172
+ response += r.content
173
+
174
+ chunk_responses.append(clean_response(response))
175
+ messages.append({"role": "assistant", "content": f"βœ… Chunk {i+1} analysis complete"})
176
+ yield messages, None
177
+
178
+ # Final summarization
179
+ final_prompt = "\n\n".join(chunk_responses) + "\n\nSummarize the key findings above."
180
+ messages.append({"role": "assistant", "content": "πŸ“Š Generating final report..."})
181
+ yield messages, None
182
+
183
+ stream_text = ""
184
  for result in agent.run_gradio_chat(
185
+ message=final_prompt,
186
  history=[],
187
  temperature=0.2,
188
  max_new_tokens=MAX_NEW_TOKENS,
 
191
  conversation=[],
192
  ):
193
  if isinstance(result, str):
194
+ stream_text += result
195
  elif hasattr(result, "content"):
196
+ stream_text += result.content
197
  elif isinstance(result, list):
198
  for r in result:
199
  if hasattr(r, "content"):
200
+ stream_text += r.content
201
+
202
+ messages[-1]["content"] = f"πŸ“Š Generating final report...\n\n{clean_response(stream_text)}"
203
+ yield messages, None
204
+
205
+ # Save final report
206
+ final_report = f"# \U0001f9e0 Final Patient Report\n\n{clean_response(stream_text)}"
207
+ timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
208
+ report_path = os.path.join(report_dir, f"report_{timestamp}.md")
209
+
210
+ with open(report_path, 'w') as f:
211
+ f.write(final_report)
212
+
213
+ messages.append({"role": "assistant", "content": f"βœ… Report generated and saved: report_{timestamp}.md"})
214
+ yield messages, report_path
215
+
216
+ except Exception as e:
217
+ messages.append({"role": "assistant", "content": f"❌ Error: {str(e)}"})
218
+ yield messages, None
219
+
 
 
 
 
 
 
 
 
 
 
 
 
 
220
 
221
  def create_ui(agent):
222
+ with gr.Blocks(title="Patient History Chat", css=".gradio-container {max-width: 900px !important}") as demo:
223
+ gr.Markdown("## πŸ₯ Patient History Analysis Tool")
224
+
225
+ with gr.Row():
226
+ with gr.Column(scale=3):
227
+ chatbot = gr.Chatbot(
228
+ label="Clinical Assistant",
229
+ show_copy_button=True,
230
+ height=600,
231
+ avatar_images=(
232
+ None, # User avatar
233
+ "https://i.imgur.com/6wX7Zb4.png" # Bot avatar
234
+ )
235
+ )
236
+ with gr.Column(scale=1):
237
+ file_upload = gr.File(
238
+ label="Upload Excel File",
239
+ file_types=[".xlsx"],
240
+ height=100
241
+ )
242
+ analyze_btn = gr.Button(
243
+ "🧠 Analyze Patient History",
244
+ variant="primary"
245
+ )
246
+ report_output = gr.File(
247
+ label="Download Report",
248
+ visible=False,
249
+ interactive=False
250
+ )
251
+ gr.Examples(
252
+ examples=["sample_data/sample_patient_history.xlsx"],
253
+ inputs=file_upload,
254
+ label="Sample Files"
255
+ )
256
 
257
  analyze_btn.click(
258
  fn=lambda file: stream_final_report(agent, file),
259
  inputs=[file_upload],
260
+ outputs=[chatbot, report_output],
261
+ api_name="analyze"
262
+ )
263
+
264
+ def show_report(report_path):
265
+ if report_path:
266
+ return gr.File(visible=True, value=report_path)
267
+ return gr.File(visible=False)
268
+
269
+ demo.load(
270
+ lambda: None,
271
+ outputs=report_output
272
  )
273
 
274
  return demo
 
282
  server_name="0.0.0.0",
283
  server_port=7860,
284
  show_error=True,
285
+ allowed_paths=["/data/hf_cache/reports"],
286
+ share=False
287
  )
288
  except Exception as e:
289
  print(f"Error: {str(e)}")