Ali2206 commited on
Commit
72dc45e
Β·
verified Β·
1 Parent(s): 6e63754

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +52 -49
app.py CHANGED
@@ -108,60 +108,63 @@ def init_agent():
108
  agent.init_model()
109
  return agent
110
 
111
- def stream_report(agent, file: Union[str, 'file'], full_output: str) -> Generator:
112
- accumulated_text = ""
113
- try:
114
- if file is None:
115
- yield "❌ Please upload a valid Excel file.", None, ""
116
- return
117
-
118
- if hasattr(file, "read"):
119
- text = extract_text_from_excel(file)
120
- elif isinstance(file, str) and os.path.exists(file):
121
- text = extract_text_from_excel(file)
122
- else:
123
- raise ValueError("❌ Invalid or missing file.")
124
-
125
- chunks = split_text_into_chunks(text)
126
-
127
- for i, chunk in enumerate(chunks):
128
- prompt = build_prompt_from_text(chunk)
129
- partial = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
130
  for res in agent.run_gradio_chat(
131
- message=prompt, history=[], temperature=0.2,
132
  max_new_tokens=MAX_NEW_TOKENS, max_token=MAX_MODEL_TOKENS,
133
  call_agent=False, conversation=[]
134
  ):
135
  if isinstance(res, str):
136
- partial += res
137
  elif hasattr(res, "content"):
138
- partial += res.content
139
- cleaned = clean_response(partial)
140
- accumulated_text += f"\n\nπŸ“„ **Chunk {i+1}**:\n{cleaned}"
141
- yield accumulated_text, None, ""
142
-
143
- summary_prompt = f"Summarize this analysis in a final structured report:\n\n" + accumulated_text
144
- final_report = ""
145
- for res in agent.run_gradio_chat(
146
- message=summary_prompt, history=[], temperature=0.2,
147
- max_new_tokens=MAX_NEW_TOKENS, max_token=MAX_MODEL_TOKENS,
148
- call_agent=False, conversation=[]
149
- ):
150
- if isinstance(res, str):
151
- final_report += res
152
- elif hasattr(res, "content"):
153
- final_report += res.content
154
-
155
- cleaned = clean_response(final_report)
156
- accumulated_text += f"\n\nπŸ“Š **Final Summary**:\n{cleaned}"
157
- report_path = os.path.join(report_dir, f"report_{datetime.now().strftime('%Y%m%d_%H%M%S')}.md")
158
- with open(report_path, 'w') as f:
159
- f.write(f"# 🧠 Final Patient Report\n\n{cleaned}")
160
-
161
- yield accumulated_text, report_path, cleaned
162
 
163
- except Exception as e:
164
- yield f"❌ Error: {str(e)}", None, ""
 
 
 
 
 
 
 
 
 
 
165
 
166
  def create_ui(agent):
167
  with gr.Blocks(css="""
@@ -210,7 +213,7 @@ Upload clinical Excel records below and click **Analyze** to generate a medical
210
  full_output = gr.State(value="")
211
 
212
  analyze_btn.click(
213
- fn=lambda file, state: stream_report(agent, file, state),
214
  inputs=[file_upload, full_output],
215
  outputs=[report_output_markdown, report_file, full_output]
216
  )
@@ -224,4 +227,4 @@ if __name__ == "__main__":
224
  demo.launch(server_name="0.0.0.0", server_port=7860, allowed_paths=["/data/hf_cache/reports"], share=False)
225
  except Exception as e:
226
  print(f"Error: {str(e)}")
227
- sys.exit(1)
 
108
  agent.init_model()
109
  return agent
110
 
111
+ def stream_report_wrapper(agent):
112
+ def wrapped(file: Union[str, 'file'], full_output: str) -> Generator:
113
+ accumulated_text = ""
114
+ try:
115
+ if file is None:
116
+ yield "❌ Please upload a valid Excel file.", None, ""
117
+ return
118
+
119
+ if hasattr(file, "read"):
120
+ text = extract_text_from_excel(file)
121
+ elif isinstance(file, str) and os.path.exists(file):
122
+ text = extract_text_from_excel(file)
123
+ else:
124
+ raise ValueError("❌ Invalid or missing file.")
125
+
126
+ chunks = split_text_into_chunks(text)
127
+
128
+ for i, chunk in enumerate(chunks):
129
+ prompt = build_prompt_from_text(chunk)
130
+ partial = ""
131
+ for res in agent.run_gradio_chat(
132
+ message=prompt, history=[], temperature=0.2,
133
+ max_new_tokens=MAX_NEW_TOKENS, max_token=MAX_MODEL_TOKENS,
134
+ call_agent=False, conversation=[]
135
+ ):
136
+ if isinstance(res, str):
137
+ partial += res
138
+ elif hasattr(res, "content"):
139
+ partial += res.content
140
+ cleaned = clean_response(partial)
141
+ accumulated_text += f"\n\nπŸ“„ **Chunk {i+1}**:\n{cleaned}"
142
+ yield accumulated_text, None, ""
143
+
144
+ summary_prompt = f"Summarize this analysis in a final structured report:\n\n" + accumulated_text
145
+ final_report = ""
146
  for res in agent.run_gradio_chat(
147
+ message=summary_prompt, history=[], temperature=0.2,
148
  max_new_tokens=MAX_NEW_TOKENS, max_token=MAX_MODEL_TOKENS,
149
  call_agent=False, conversation=[]
150
  ):
151
  if isinstance(res, str):
152
+ final_report += res
153
  elif hasattr(res, "content"):
154
+ final_report += res.content
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
 
156
+ cleaned = clean_response(final_report)
157
+ accumulated_text += f"\n\nπŸ“Š **Final Summary**:\n{cleaned}"
158
+ report_path = os.path.join(report_dir, f"report_{datetime.now().strftime('%Y%m%d_%H%M%S')}.md")
159
+ with open(report_path, 'w') as f:
160
+ f.write(f"# 🧠 Final Patient Report\n\n{cleaned}")
161
+
162
+ yield accumulated_text, report_path, cleaned
163
+
164
+ except Exception as e:
165
+ yield f"❌ Error: {str(e)}", None, ""
166
+
167
+ return wrapped
168
 
169
  def create_ui(agent):
170
  with gr.Blocks(css="""
 
213
  full_output = gr.State(value="")
214
 
215
  analyze_btn.click(
216
+ fn=stream_report_wrapper(agent),
217
  inputs=[file_upload, full_output],
218
  outputs=[report_output_markdown, report_file, full_output]
219
  )
 
227
  demo.launch(server_name="0.0.0.0", server_port=7860, allowed_paths=["/data/hf_cache/reports"], share=False)
228
  except Exception as e:
229
  print(f"Error: {str(e)}")
230
+ sys.exit(1)