karthikeyan-r commited on
Commit
402b346
·
verified ·
1 Parent(s): d7d1a81

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -17
app.py CHANGED
@@ -92,36 +92,30 @@ if clear_conversation_button:
92
  st.session_state["conversation"] = []
93
  st.success("Conversation cleared.")
94
 
95
- # ----- Display Chat Conversation -----
96
  st.title("Chat Conversation UI")
97
 
98
- # Loop through existing conversation in session_state and display it
99
- for message in st.session_state["conversation"]:
100
- if message["role"] == "user":
101
- with st.chat_message("user"):
102
- st.write(message["content"])
103
- else:
104
- with st.chat_message("assistant"):
105
- st.write(message["content"])
106
 
107
- # ----- Chat Input Logic -----
 
108
  # If we have a T5 pipeline (general QA model):
109
  if st.session_state["qa_pipeline"]:
110
- # Use the new Streamlit chat input
111
  user_input = st.chat_input("Enter your query:")
112
  if user_input:
113
- # 1) Save user message
114
  st.session_state["conversation"].append({"role": "user", "content": user_input})
115
 
116
- # 2) Generate response
117
  with st.chat_message("assistant"):
118
  with st.spinner("Generating response..."):
119
  try:
120
- response = st.session_state["qa_pipeline"](f"Q: {user_input}", max_length=250)
 
 
 
121
  generated_text = response[0]["generated_text"]
122
  except Exception as e:
123
  generated_text = f"Error: {str(e)}"
124
-
125
  st.write(generated_text)
126
 
127
  # 3) Save assistant message
@@ -134,7 +128,7 @@ elif st.session_state["model"] and (model_choice == model_options["1"]):
134
  # 1) Save user message
135
  st.session_state["conversation"].append({"role": "user", "content": user_input})
136
 
137
- # 2) Generate response
138
  with st.chat_message("assistant"):
139
  with st.spinner("Generating response..."):
140
  try:
@@ -176,6 +170,16 @@ elif st.session_state["model"] and (model_choice == model_options["1"]):
176
 
177
  # 3) Save assistant message
178
  st.session_state["conversation"].append({"role": "assistant", "content": answer})
 
 
179
  else:
180
- # If no model is loaded at all
181
  st.info("No model is loaded. Please select a model and click 'Load Model' from the sidebar.")
 
 
 
 
 
 
 
 
 
 
92
  st.session_state["conversation"] = []
93
  st.success("Conversation cleared.")
94
 
95
+ # ----- Title -----
96
  st.title("Chat Conversation UI")
97
 
 
 
 
 
 
 
 
 
98
 
99
+ user_input = None # We'll collect it below
100
+
101
  # If we have a T5 pipeline (general QA model):
102
  if st.session_state["qa_pipeline"]:
 
103
  user_input = st.chat_input("Enter your query:")
104
  if user_input:
105
+ # 1) Save user message to conversation immediately
106
  st.session_state["conversation"].append({"role": "user", "content": user_input})
107
 
108
+ # 2) Generate assistant response
109
  with st.chat_message("assistant"):
110
  with st.spinner("Generating response..."):
111
  try:
112
+ response = st.session_state["qa_pipeline"](
113
+ f"Q: {user_input}",
114
+ max_length=250
115
+ )
116
  generated_text = response[0]["generated_text"]
117
  except Exception as e:
118
  generated_text = f"Error: {str(e)}"
 
119
  st.write(generated_text)
120
 
121
  # 3) Save assistant message
 
128
  # 1) Save user message
129
  st.session_state["conversation"].append({"role": "user", "content": user_input})
130
 
131
+ # 2) Generate assistant response
132
  with st.chat_message("assistant"):
133
  with st.spinner("Generating response..."):
134
  try:
 
170
 
171
  # 3) Save assistant message
172
  st.session_state["conversation"].append({"role": "assistant", "content": answer})
173
+
174
+ # If no model is loaded at all:
175
  else:
 
176
  st.info("No model is loaded. Please select a model and click 'Load Model' from the sidebar.")
177
+
178
+
179
+ for message in st.session_state["conversation"]:
180
+ if message["role"] == "user":
181
+ with st.chat_message("user"):
182
+ st.write(message["content"])
183
+ else:
184
+ with st.chat_message("assistant"):
185
+ st.write(message["content"])