xuandin commited on
Commit
a4443df
·
verified ·
1 Parent(s): a48f89a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +59 -58
app.py CHANGED
@@ -200,66 +200,67 @@ with st.container():
200
  # Placeholder for displaying result/loading
201
  with st.spinner("Verifying..."):
202
  start_time = time.time()
203
-
204
- with torch.no_grad():
205
- # Extract evidence
206
- evidence_start_time = time.time()
207
- evidence = extract_evidence_tfidf_qatc(
208
- claim, context, model_qatc, tokenizer_qatc,
209
- "cuda" if torch.cuda.is_available() else "cpu",
210
- confidence_threshold=tfidf_threshold,
211
- length_ratio_threshold=length_ratio_threshold
212
- )
213
- evidence_time = time.time() - evidence_start_time
214
-
215
- # Classify the claim
216
- verdict_start_time = time.time()
217
- verdict = "NEI"
218
- details = ""
219
- prob3class, pred_tc = classify_claim(
220
- claim, evidence, model_tc, tokenizer_tc,
 
 
 
 
 
 
 
 
221
  "cuda" if torch.cuda.is_available() else "cpu"
222
  )
223
- if pred_tc != 0:
224
- prob2class, pred_bc = classify_claim(
225
- claim, evidence, model_bc, tokenizer_bc,
226
- "cuda" if torch.cuda.is_available() else "cpu"
227
- )
228
- if pred_bc == 0:
229
- verdict = "SUPPORTED"
230
- elif prob2class > prob3class:
231
- verdict = "REFUTED"
232
- else:
233
- verdict = ["NEI", "SUPPORTED", "REFUTED"][pred_tc]
234
- if show_details:
235
- details = f"""
236
- <p><strong>3-Class Probability:</strong> {prob3class.item():.2f}</p>
237
- <p><strong>3-Class Predicted Label:</strong> {['NEI', 'SUPPORTED', 'REFUTED'][pred_tc]}</p>
238
- <p><strong>2-Class Probability:</strong> {prob2class.item():.2f}</p>
239
- <p><strong>2-Class Predicted Label:</strong> {['SUPPORTED', 'REFUTED'][pred_bc]}</p>
240
- """
241
- verdict_time = time.time() - verdict_start_time
242
-
243
- # Store verification history and the latest result
244
- st.session_state.history.append({
245
- "claim": claim,
246
- "evidence": evidence,
247
- "verdict": verdict,
248
- "evidence_time": evidence_time,
249
- "verdict_time": verdict_time,
250
- "details": details
251
- })
252
- st.session_state.latest_result = {
253
- "claim": claim,
254
- "evidence": evidence,
255
- "verdict": verdict,
256
- "evidence_time": evidence_time,
257
- "verdict_time": verdict_time,
258
- "details": details
259
- }
260
-
261
- if torch.cuda.is_available():
262
- torch.cuda.empty_cache()
263
 
264
  # Display the result after verification
265
  res = st.session_state.latest_result
 
200
  # Placeholder for displaying result/loading
201
  with st.spinner("Verifying..."):
202
  start_time = time.time()
203
+
204
+ # Extract evidence
205
+ evidence_start_time = time.time()
206
+ evidence = extract_evidence_tfidf_qatc(
207
+ claim, context, model_qatc, tokenizer_qatc,
208
+ "cuda" if torch.cuda.is_available() else "cpu",
209
+ confidence_threshold=tfidf_threshold,
210
+ length_ratio_threshold=length_ratio_threshold
211
+ )
212
+ evidence_time = time.time() - evidence_start_time
213
+
214
+ # Classify the claim
215
+ verdict_start_time = time.time()
216
+ verdict = "NEI"
217
+ details = ""
218
+ prob3class, pred_tc = classify_claim(
219
+ claim, evidence, model_tc, tokenizer_tc,
220
+ "cuda" if torch.cuda.is_available() else "cpu"
221
+ )
222
+ if pred_tc == 1:
223
+ verdict = "SUPPORTED"
224
+ elif pred_tc == 2:
225
+ verdict = "REFUTED"
226
+ else:
227
+ prob2class, pred_bc = classify_claim(
228
+ claim, evidence, model_bc, tokenizer_bc,
229
  "cuda" if torch.cuda.is_available() else "cpu"
230
  )
231
+ if pred_bc == 0:
232
+ verdict = "SUPPORTED"
233
+ else:
234
+ verdict = "REFUTED"
235
+ if show_details:
236
+ details = f"""
237
+ <p><strong>3-Class Probability:</strong> {prob3class.item():.2f}</p>
238
+ <p><strong>3-Class Predicted Label:</strong> {['NEI', 'SUPPORTED', 'REFUTED'][pred_tc]}</p>
239
+ <p><strong>2-Class Probability:</strong> {prob2class.item():.2f}</p>
240
+ <p><strong>2-Class Predicted Label:</strong> {['SUPPORTED', 'REFUTED'][pred_bc]}</p>
241
+ """
242
+ verdict_time = time.time() - verdict_start_time
243
+
244
+ # Store verification history and the latest result
245
+ st.session_state.history.append({
246
+ "claim": claim,
247
+ "evidence": evidence,
248
+ "verdict": verdict,
249
+ "evidence_time": evidence_time,
250
+ "verdict_time": verdict_time,
251
+ "details": details
252
+ })
253
+ st.session_state.latest_result = {
254
+ "claim": claim,
255
+ "evidence": evidence,
256
+ "verdict": verdict,
257
+ "evidence_time": evidence_time,
258
+ "verdict_time": verdict_time,
259
+ "details": details
260
+ }
261
+
262
+ if torch.cuda.is_available():
263
+ torch.cuda.empty_cache()
 
 
 
 
 
 
 
264
 
265
  # Display the result after verification
266
  res = st.session_state.latest_result