Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -83,7 +83,7 @@ def do_web_search(query: str) -> str:
|
|
83 |
# ๋ชจ๋ธ/ํ๋ก์ธ์ ๋ก๋ฉ
|
84 |
##############################################################################
|
85 |
MAX_CONTENT_CHARS = 4000
|
86 |
-
model_id = os.getenv("MODEL_ID", "
|
87 |
|
88 |
processor = AutoProcessor.from_pretrained(model_id, padding_side="left")
|
89 |
model = Gemma3ForConditionalGeneration.from_pretrained(
|
@@ -618,15 +618,15 @@ button:hover, .btn:hover {
|
|
618 |
"""
|
619 |
|
620 |
title_html = """
|
621 |
-
<h1 align="center" style="margin-bottom: 0.2em; font-size: 1.6em;"> ๐ค
|
622 |
<p align="center" style="font-size:1.1em; color:#555;">
|
623 |
-
|
624 |
-
Operates on an NVIDIA A100 GPU as an independent local server, enhancing security and preventing information leakage.<br>
|
625 |
@Based by 'MS Gemma-3-27b' / @Powered by 'MOUSE-II'(VIDRAFT)
|
626 |
</p>
|
627 |
"""
|
628 |
|
629 |
-
with gr.Blocks(css=css, title="
|
630 |
gr.Markdown(title_html)
|
631 |
|
632 |
# ์น์์น ์ต์
์ ํ๋ฉด์ ํ์ (ํ์ง๋ง ์์คํ
ํ๋กฌํํธ, ํ ํฐ ์ฌ๋ผ์ด๋ ๋ฑ์ ๊ฐ์ถค)
|
@@ -638,7 +638,7 @@ with gr.Blocks(css=css, title="Vidraft-G3-27B") as demo:
|
|
638 |
# ๋ด๋ถ์ ์ผ๋ก ์ฐ์ด์ง๋ง ํ๋ฉด์๋ ๋
ธ์ถ๋์ง ์๋๋ก ์ค์
|
639 |
system_prompt_box = gr.Textbox(
|
640 |
lines=3,
|
641 |
-
value="
|
642 |
visible=False # ํ๋ฉด์์ ๊ฐ์ถค
|
643 |
)
|
644 |
|
|
|
83 |
# ๋ชจ๋ธ/ํ๋ก์ธ์ ๋ก๋ฉ
|
84 |
##############################################################################
|
85 |
MAX_CONTENT_CHARS = 4000
|
86 |
+
model_id = os.getenv("MODEL_ID", "VIDraft/Gemma3-R1945-27B")
|
87 |
|
88 |
processor = AutoProcessor.from_pretrained(model_id, padding_side="left")
|
89 |
model = Gemma3ForConditionalGeneration.from_pretrained(
|
|
|
618 |
"""
|
619 |
|
620 |
title_html = """
|
621 |
+
<h1 align="center" style="margin-bottom: 0.2em; font-size: 1.6em;"> ๐ค Gemma3-R1945-27B </h1>
|
622 |
<p align="center" style="font-size:1.1em; color:#555;">
|
623 |
+
โ
Agentic AI Platform โ
Reasoning & Uncensored โ
Multimodal & VLM โ
Deep-Research & RAG <br>
|
624 |
+
Operates on an โ
'NVIDIA A100 GPU' as an independent local server, enhancing security and preventing information leakage.<br>
|
625 |
@Based by 'MS Gemma-3-27b' / @Powered by 'MOUSE-II'(VIDRAFT)
|
626 |
</p>
|
627 |
"""
|
628 |
|
629 |
+
with gr.Blocks(css=css, title="Gemma3-R1945-27B") as demo:
|
630 |
gr.Markdown(title_html)
|
631 |
|
632 |
# ์น์์น ์ต์
์ ํ๋ฉด์ ํ์ (ํ์ง๋ง ์์คํ
ํ๋กฌํํธ, ํ ํฐ ์ฌ๋ผ์ด๋ ๋ฑ์ ๊ฐ์ถค)
|
|
|
638 |
# ๋ด๋ถ์ ์ผ๋ก ์ฐ์ด์ง๋ง ํ๋ฉด์๋ ๋
ธ์ถ๋์ง ์๋๋ก ์ค์
|
639 |
system_prompt_box = gr.Textbox(
|
640 |
lines=3,
|
641 |
+
value="You are a deep thinking AI, you may use extremely long chains of thought to deeply consider the problem and deliberate with yourself via systematic reasoning processes to help come to a correct solution prior to answering. You should enclose your thoughts and internal monologue inside tags, and then provide your solution or response to the problem. Please answer in Korean.You have the ability to read English sources, but you **must always speak in Korean**.Even if the search results are in English, answer in Korean.",
|
642 |
visible=False # ํ๋ฉด์์ ๊ฐ์ถค
|
643 |
)
|
644 |
|