Ali2206 commited on
Commit
05f1ef4
·
verified ·
1 Parent(s): f2840bd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -133
app.py CHANGED
@@ -1,141 +1,18 @@
 
1
  import os
2
  import sys
3
  import gradio as gr
4
  from multiprocessing import freeze_support
5
- import importlib
6
- import inspect
7
- import json
8
- import logging
9
 
10
- # === Fix path to include src/txagent
11
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src"))
12
 
13
- # === Import and reload to ensure correct file
14
- import txagent.txagent
15
- importlib.reload(txagent.txagent)
16
- from txagent.txagent import TxAgent
17
-
18
- # === Debug info
19
- print(">>> TxAgent loaded from:", inspect.getfile(TxAgent))
20
- print(">>> TxAgent has run_gradio_chat:", hasattr(TxAgent, "run_gradio_chat"))
21
-
22
- # === Logging
23
- logging.basicConfig(level=logging.INFO)
24
-
25
- # === Environment
26
- current_dir = os.path.abspath(os.path.dirname(__file__))
27
- os.environ["MKL_THREADING_LAYER"] = "GNU"
28
- os.environ["TOKENIZERS_PARALLELISM"] = "false"
29
-
30
- # === Model config
31
- model_name = "mims-harvard/TxAgent-T1-Llama-3.1-8B"
32
- rag_model_name = "mims-harvard/ToolRAG-T1-GTE-Qwen2-1.5B"
33
- new_tool_files = {
34
- "new_tool": os.path.join(current_dir, "data", "new_tool.json")
35
- }
36
-
37
- # === Example prompts
38
- question_examples = [
39
- ["Given a patient with WHIM syndrome on prophylactic antibiotics, is it advisable to co-administer Xolremdi with fluconazole?"],
40
- ["What treatment options exist for HER2+ breast cancer resistant to trastuzumab?"]
41
- ]
42
-
43
- # === Extract tool name and format output
44
- def extract_tool_name_and_clean_content(msg):
45
- tool_name = "Tool Result"
46
- content = msg.get("content") if isinstance(msg, dict) else getattr(msg, "content", "")
47
-
48
- # Attempt to load JSON from tool response
49
- try:
50
- parsed = json.loads(content)
51
- if isinstance(parsed, dict):
52
- tool_name = parsed.get("tool_name", tool_name)
53
- content = parsed.get("content", content)
54
- except Exception as e:
55
- # fallback if content is not JSON
56
- pass
57
-
58
- if isinstance(content, (dict, list)):
59
- content = json.dumps(content, indent=2)
60
-
61
- return f"Tool: {tool_name}", content
62
-
63
- # === Format answer in collapsible box
64
- def format_collapsible(content, title="Answer"):
65
- return (
66
- f"<details style='border: 1px solid #ccc; border-radius: 8px; padding: 10px; margin-top: 10px;'>"
67
- f"<summary style='font-size: 16px; font-weight: bold; color: #3B82F6;'>{title}</summary>"
68
- f"<div style='margin-top: 8px; font-size: 15px; line-height: 1.6; white-space: pre-wrap;'>{content}</div></details>"
69
- )
70
-
71
- # === Build UI
72
- def create_ui(agent):
73
- with gr.Blocks(theme=gr.themes.Soft()) as demo:
74
- gr.Markdown("<h1 style='text-align: center;'>💊 TxAgent: Therapeutic Reasoning</h1>")
75
- gr.Markdown("Ask biomedical or therapeutic questions. Powered by tool-augmented reasoning.")
76
-
77
- chatbot = gr.Chatbot(label="TxAgent", height=600, type="messages")
78
- message_input = gr.Textbox(placeholder="Ask a biomedical question...", show_label=False)
79
- send_button = gr.Button("Send", variant="primary")
80
- conversation_state = gr.State([])
81
-
82
- def handle_chat(message, history, conversation):
83
- generator = agent.run_gradio_chat(
84
- message=message,
85
- history=history,
86
- temperature=0.3,
87
- max_new_tokens=1024,
88
- max_token=8192,
89
- call_agent=False,
90
- conversation=conversation,
91
- max_round=30
92
- )
93
- for update in generator:
94
- formatted = []
95
- for m in update:
96
- role = m.get("role") if isinstance(m, dict) else getattr(m, "role", "assistant")
97
- if role == "assistant":
98
- title, clean = extract_tool_name_and_clean_content(m)
99
- content = format_collapsible(clean, title)
100
- else:
101
- content = m.get("content") if isinstance(m, dict) else getattr(m, "content", "")
102
- formatted.append({"role": role, "content": content})
103
- yield formatted
104
-
105
- inputs = [message_input, chatbot, conversation_state]
106
- send_button.click(fn=handle_chat, inputs=inputs, outputs=chatbot)
107
- message_input.submit(fn=handle_chat, inputs=inputs, outputs=chatbot)
108
-
109
- gr.Examples(examples=question_examples, inputs=message_input)
110
- gr.Markdown("<small style='color: gray;'>DISCLAIMER: This demo is for research purposes only and does not provide medical advice.</small>")
111
-
112
- return demo
113
-
114
- # === Main
115
  if __name__ == "__main__":
116
  freeze_support()
117
- try:
118
- agent = TxAgent(
119
- model_name=model_name,
120
- rag_model_name=rag_model_name,
121
- tool_files_dict=new_tool_files,
122
- force_finish=True,
123
- enable_checker=True,
124
- step_rag_num=10,
125
- seed=100,
126
- additional_default_tools=[]
127
- )
128
- agent.init_model()
129
-
130
- if not hasattr(agent, "run_gradio_chat"):
131
- raise AttributeError("❌ TxAgent is missing `run_gradio_chat`.")
132
-
133
- demo = create_ui(agent)
134
- demo.queue().launch(
135
- server_name="0.0.0.0",
136
- server_port=7860,
137
- show_error=True
138
- )
139
- except Exception as e:
140
- print(f"❌ App failed to start: {e}")
141
- raise
 
1
+ # app.py (Gradio UI)
2
  import os
3
  import sys
4
  import gradio as gr
5
  from multiprocessing import freeze_support
 
 
 
 
6
 
7
+ from ui.ui_core import create_ui
8
+ from backend.agent_instance import init_agent
9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  if __name__ == "__main__":
11
  freeze_support()
12
+ agent = init_agent()
13
+ demo = create_ui(agent)
14
+ demo.queue().launch(
15
+ server_name="0.0.0.0",
16
+ server_port=7860,
17
+ show_error=True
18
+ )