Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,8 @@ from transformers.image_utils import load_image
|
|
4 |
from threading import Thread
|
5 |
import torch
|
6 |
import spaces
|
|
|
|
|
7 |
|
8 |
MODEL_ID = "TIGER-Lab/VL-Rethinker-7B"
|
9 |
processor = AutoProcessor.from_pretrained(MODEL_ID, trust_remote_code=True)
|
@@ -90,6 +92,8 @@ examples = [
|
|
90 |
[{"text": "Solve this question.", "files": ["example_images/document.png"]}]
|
91 |
]
|
92 |
|
|
|
|
|
93 |
demo = gr.ChatInterface(
|
94 |
fn=model_inference,
|
95 |
description="# **VL-Rethinker-7B**",
|
@@ -101,4 +105,8 @@ demo = gr.ChatInterface(
|
|
101 |
cache_examples=False,
|
102 |
)
|
103 |
|
|
|
|
|
|
|
|
|
104 |
demo.launch(debug=True)
|
|
|
4 |
from threading import Thread
|
5 |
import torch
|
6 |
import spaces
|
7 |
+
from serve_constants import html_header, bibtext, learn_more_markdown, tos_markdown
|
8 |
+
|
9 |
|
10 |
MODEL_ID = "TIGER-Lab/VL-Rethinker-7B"
|
11 |
processor = AutoProcessor.from_pretrained(MODEL_ID, trust_remote_code=True)
|
|
|
92 |
[{"text": "Solve this question.", "files": ["example_images/document.png"]}]
|
93 |
]
|
94 |
|
95 |
+
gr.HTML(html_header)
|
96 |
+
|
97 |
demo = gr.ChatInterface(
|
98 |
fn=model_inference,
|
99 |
description="# **VL-Rethinker-7B**",
|
|
|
105 |
cache_examples=False,
|
106 |
)
|
107 |
|
108 |
+
gr.Markdown(tos_markdown)
|
109 |
+
gr.Markdown(learn_more_markdown)
|
110 |
+
gr.Markdown(bibtext)
|
111 |
+
|
112 |
demo.launch(debug=True)
|