radub23 commited on
Commit
eceb545
·
1 Parent(s): f438e63

Simplify tensor handling with more robust type checking

Browse files
Files changed (1) hide show
  1. app.py +45 -56
app.py CHANGED
@@ -46,66 +46,55 @@ def detect_warning_lamp(image, history: list[tuple[str, str]], system_message):
46
  history.append((None, "Please upload an image first."))
47
  return history
48
 
49
- # Maximum number of retries
50
- max_retries = 3
51
- retry_count = 0
52
-
53
- while retry_count < max_retries:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
  try:
55
- # Convert PIL image to FastAI compatible format
56
- img = PILImage(image)
57
-
58
- # Get model prediction
59
- pred_class, pred_idx, probs = learn_inf.predict(img)
60
-
61
- # Try different approaches to handle tensor conversion
62
- try:
63
- # First approach - direct conversion
64
- confidence = float(probs[pred_idx])
65
- except Exception as e1:
66
- print(f"First conversion approach failed: {e1}")
67
- try:
68
- # Second approach - convert index first
69
- idx = int(pred_idx)
70
- confidence = float(probs[idx])
71
- except Exception as e2:
72
- print(f"Second conversion approach failed: {e2}")
73
- # Third approach - use item() method if available
74
- if hasattr(probs[pred_idx], 'item'):
75
- confidence = probs[pred_idx].item()
76
- else:
77
- # Last resort - use the max probability
78
- confidence = float(max(probs))
79
-
80
- # Format the prediction results
81
- response = f"Detected Warning Lamp: {pred_class}\nConfidence: {confidence:.2%}"
82
-
83
- # Add probabilities for all classes
84
  response += "\n\nProbabilities for all classes:"
85
- for i, (cls, prob) in enumerate(zip(learn_inf.dls.vocab, probs)):
86
- try:
87
- prob_value = float(prob)
 
 
 
88
  response += f"\n- {cls}: {prob_value:.2%}"
89
- except Exception as prob_error:
90
- print(f"Error converting probability for {cls}: {prob_error}")
91
- response += f"\n- {cls}: N/A"
92
-
93
- # Update chat history
94
- history.append((None, response))
95
- return history
96
 
97
- except Exception as e:
98
- retry_count += 1
99
- print(f"Attempt {retry_count} failed with error: {e}")
100
-
101
- if retry_count < max_retries:
102
- print(f"Retrying in 1 second...")
103
- time.sleep(1) # Wait a bit before retrying
104
- else:
105
- error_msg = f"Error processing image after {max_retries} attempts: {str(e)}"
106
- print(f"All retries failed: {error_msg}")
107
- history.append((None, error_msg))
108
- return history
109
 
110
  # Create a custom interface with image upload
111
  with gr.Blocks(title="Warning Lamp Detector", theme=gr.themes.Soft()) as demo:
 
46
  history.append((None, "Please upload an image first."))
47
  return history
48
 
49
+ try:
50
+ # Convert PIL image to FastAI compatible format
51
+ img = PILImage(image)
52
+
53
+ # Get model prediction
54
+ pred_class, pred_idx, probs = learn_inf.predict(img)
55
+
56
+ # Convert tensors to Python types safely
57
+ pred_class_str = str(pred_class) # Convert class name to string
58
+
59
+ # Format the prediction results
60
+ response = f"Detected Warning Lamp: {pred_class_str}"
61
+
62
+ # Try to add confidence if possible
63
+ try:
64
+ # Get the index as an integer
65
+ if isinstance(pred_idx, torch.Tensor):
66
+ idx = pred_idx.item()
67
+ else:
68
+ idx = int(pred_idx)
69
+
70
+ # Get the confidence value
71
+ if isinstance(probs, torch.Tensor) and idx < len(probs):
72
+ confidence = probs[idx].item()
73
+ response += f"\nConfidence: {confidence:.2%}"
74
+ except Exception as conf_error:
75
+ print(f"Could not calculate confidence: {conf_error}")
76
+
77
+ # Add probabilities for all classes if possible
78
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
  response += "\n\nProbabilities for all classes:"
80
+ for i, cls in enumerate(learn_inf.dls.vocab):
81
+ if i < len(probs):
82
+ if isinstance(probs, torch.Tensor):
83
+ prob_value = probs[i].item()
84
+ else:
85
+ prob_value = float(probs[i])
86
  response += f"\n- {cls}: {prob_value:.2%}"
87
+ except Exception as prob_error:
88
+ print(f"Could not list all probabilities: {prob_error}")
 
 
 
 
 
89
 
90
+ # Update chat history
91
+ history.append((None, response))
92
+ return history
93
+ except Exception as e:
94
+ error_msg = f"Error processing image: {str(e)}"
95
+ print(f"Exception in detect_warning_lamp: {e}")
96
+ history.append((None, error_msg))
97
+ return history
 
 
 
 
98
 
99
  # Create a custom interface with image upload
100
  with gr.Blocks(title="Warning Lamp Detector", theme=gr.themes.Soft()) as demo: