radub23
commited on
Commit
·
b426f35
1
Parent(s):
3ff490d
Update Gradio interface for warning lamp detector with image upload functionality
Browse files
app.py
CHANGED
@@ -1,64 +1,82 @@
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
|
|
3 |
|
4 |
"""
|
5 |
-
|
|
|
6 |
"""
|
7 |
-
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
8 |
|
|
|
|
|
9 |
|
10 |
-
def
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
top_p,
|
17 |
-
):
|
18 |
messages = [{"role": "system", "content": system_message}]
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
messages.append({"role": "user", "content": message})
|
27 |
|
28 |
response = ""
|
29 |
-
|
30 |
for message in client.chat_completion(
|
31 |
messages,
|
32 |
-
max_tokens=
|
33 |
stream=True,
|
34 |
-
temperature=
|
35 |
-
top_p=
|
36 |
):
|
37 |
token = message.choices[0].delta.content
|
38 |
-
|
39 |
response += token
|
40 |
yield response
|
41 |
|
42 |
-
|
43 |
-
""
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
)
|
61 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
|
63 |
if __name__ == "__main__":
|
64 |
demo.launch()
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
+
import os
|
4 |
|
5 |
"""
|
6 |
+
Warning Lamp Detector using Hugging Face Inference API
|
7 |
+
This application allows users to upload images of warning lamps and get classification results.
|
8 |
"""
|
|
|
9 |
|
10 |
+
# Initialize the client with your model
|
11 |
+
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
12 |
|
13 |
+
def detect_warning_lamp(image, history: list[tuple[str, str]], system_message):
|
14 |
+
"""
|
15 |
+
Process the uploaded image and return detection results
|
16 |
+
"""
|
17 |
+
# TODO: Replace with actual model inference
|
18 |
+
# This is a placeholder response - you'll need to integrate your actual model
|
|
|
|
|
19 |
messages = [{"role": "system", "content": system_message}]
|
20 |
+
|
21 |
+
# Add the image analysis request
|
22 |
+
messages.append({
|
23 |
+
"role": "user",
|
24 |
+
"content": f"Please analyze this warning lamp image and provide a detailed classification."
|
25 |
+
})
|
|
|
|
|
26 |
|
27 |
response = ""
|
|
|
28 |
for message in client.chat_completion(
|
29 |
messages,
|
30 |
+
max_tokens=512,
|
31 |
stream=True,
|
32 |
+
temperature=0.7,
|
33 |
+
top_p=0.95,
|
34 |
):
|
35 |
token = message.choices[0].delta.content
|
|
|
36 |
response += token
|
37 |
yield response
|
38 |
|
39 |
+
# Create a custom interface with image upload
|
40 |
+
with gr.Blocks(title="Warning Lamp Detector", theme=gr.themes.Soft()) as demo:
|
41 |
+
gr.Markdown("""
|
42 |
+
# 🚨 Warning Lamp Detector
|
43 |
+
Upload an image of a warning lamp to get its classification.
|
44 |
+
|
45 |
+
### Instructions:
|
46 |
+
1. Upload a clear image of the warning lamp
|
47 |
+
2. Wait for the analysis
|
48 |
+
3. View the detailed classification results
|
49 |
+
""")
|
50 |
+
|
51 |
+
with gr.Row():
|
52 |
+
with gr.Column(scale=1):
|
53 |
+
image_input = gr.Image(
|
54 |
+
label="Upload Warning Lamp Image",
|
55 |
+
type="pil",
|
56 |
+
tool="select"
|
57 |
+
)
|
58 |
+
system_message = gr.Textbox(
|
59 |
+
value="You are an expert in warning lamp classification. Analyze the image and provide detailed information about the type, color, and status of the warning lamp.",
|
60 |
+
label="System Message",
|
61 |
+
lines=3
|
62 |
+
)
|
63 |
+
|
64 |
+
with gr.Column(scale=1):
|
65 |
+
chatbot = gr.Chatbot(
|
66 |
+
[],
|
67 |
+
elem_id="chatbot",
|
68 |
+
bubble_full_width=False,
|
69 |
+
avatar_images=(None, "🚨"),
|
70 |
+
height=400
|
71 |
+
)
|
72 |
+
|
73 |
+
# Add a submit button
|
74 |
+
submit_btn = gr.Button("Analyze Warning Lamp", variant="primary")
|
75 |
+
submit_btn.click(
|
76 |
+
detect_warning_lamp,
|
77 |
+
inputs=[image_input, chatbot, system_message],
|
78 |
+
outputs=chatbot
|
79 |
+
)
|
80 |
|
81 |
if __name__ == "__main__":
|
82 |
demo.launch()
|