openfree commited on
Commit
bdad5ad
ยท
verified ยท
1 Parent(s): df610ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -43
app.py CHANGED
@@ -616,16 +616,16 @@ button:hover, .btn:hover {
616
  transform: scale(1.05);
617
  }
618
  """
 
619
  title_html = """
620
- <h1 align="center" style="margin-bottom: 0.2em;"> ๐Ÿค—Vidraft-G3-27B: Reasoning + Multimodal + VLM + DeepResearch + Agent</h1>
621
  <p align="center" style="font-size:1.1em; color:#555;">
622
- Operates on an NVIDIA A100 GPU as an independent local server, enhancing security and preventing information leakage.
623
  @Based by 'MS Gemma-3-27b' / @Powered by 'MOUSE-II'(VIDRAFT)
624
  </p>
625
- """
626
-
627
 
628
- with gr.Blocks(css=css, title="Vidraft-G3-27B ") as demo:
629
  gr.Markdown(title_html)
630
 
631
  # ์›น์„œ์น˜ ์˜ต์…˜์€ ํ™”๋ฉด์— ํ‘œ์‹œ (ํ•˜์ง€๋งŒ ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ, ํ† ํฐ ์Šฌ๋ผ์ด๋” ๋“ฑ์€ ๊ฐ์ถค)
@@ -638,9 +638,9 @@ with gr.Blocks(css=css, title="Vidraft-G3-27B ") as demo:
638
  system_prompt_box = gr.Textbox(
639
  lines=3,
640
  value="๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ์ถœ๋ ฅํ•˜์—ฌ์•ผ ํ•œ๋‹ค. You are a deep thinking AI, you may use extremely long chains of thought to deeply consider the problem and deliberate with yourself via systematic reasoning processes to help come to a correct solution prior to answering. You should enclose your thoughts and internal monologue inside tags, and then provide your solution or response to the problem. Please answer in Korean.You have the ability to read English sources, but you **must always speak in Korean**.Even if the search results are in English, answer in Korean.",
641
-
642
  visible=False # ํ™”๋ฉด์—์„œ ๊ฐ์ถค
643
  )
 
644
  max_tokens_slider = gr.Slider(
645
  label="Max New Tokens",
646
  minimum=100,
@@ -649,52 +649,49 @@ with gr.Blocks(css=css, title="Vidraft-G3-27B ") as demo:
649
  value=1000,
650
  visible=False # ํ™”๋ฉด์—์„œ ๊ฐ์ถค
651
  )
 
652
  web_search_text = gr.Textbox(
653
  lines=1,
654
  label="(Unused) Web Search Query",
655
  placeholder="No direct input needed",
656
  visible=False # ํ™”๋ฉด์—์„œ ๊ฐ์ถค
657
  )
658
- # ์ฑ„ํŒ… ์ธํ„ฐํŽ˜์ด์Šค๋ฅผ ํ™”๋ฉด ์ „์ฒด ํญ์œผ๋กœ ์‚ฌ์šฉ
659
- # ๋“ค์—ฌ์“ฐ๊ธฐ ์ˆ˜์ •
660
- chat = gr.ChatInterface(
661
- fn=run,
662
- type="messages",
663
- chatbot=gr.Chatbot(type="messages", scale=1, allow_tags=["image"]),
664
- textbox=gr.MultimodalTextbox(
665
- file_types=[
666
- ".webp", ".png", ".jpg", ".jpeg", ".gif",
667
- ".mp4", ".csv", ".txt", ".pdf"
 
 
 
 
 
 
 
 
 
 
668
  ],
669
- file_count="multiple",
670
- autofocus=True
671
- ),
672
- multimodal=True,
673
- additional_inputs=[
674
- system_prompt_box,
675
- max_tokens_slider,
676
- web_search_checkbox,
677
- web_search_text,
678
- ],
679
- stop_btn=False,
680
- title='<a href="https://discord.gg/openfreeai" target="_blank">https://discord.gg/openfreeai</a>',
681
- examples=examples,
682
- run_examples_on_click=False,
683
- cache_examples=False,
684
- css_paths=None,
685
- delete_cache=(1800, 1800),
686
- )
687
 
688
- with gr.Row(elem_id="examples_row"):
689
- with gr.Column(scale=12, elem_id="examples_container"):
690
- gr.Markdown("### Example Inputs (click to load)")
691
- gr.Examples(
692
- examples=examples,
693
- inputs=[],
694
- cache_examples=False
695
- )
696
 
697
 
698
  if __name__ == "__main__":
699
  # ๋กœ์ปฌ์—์„œ๋งŒ ์‹คํ–‰ ์‹œ
700
- demo.launch()
 
616
  transform: scale(1.05);
617
  }
618
  """
619
+
620
  title_html = """
621
+ <h1 align="center" style="margin-bottom: 0.2em; font-size: 1.6em;"> ๐Ÿค— Vidraft-G3-27B : Reasoning + Multimodal + VLM + DeepResearch + Agent</h1>
622
  <p align="center" style="font-size:1.1em; color:#555;">
623
+ Operates on an NVIDIA A100 GPU as an independent local server, enhancing security and preventing information leakage.<br>
624
  @Based by 'MS Gemma-3-27b' / @Powered by 'MOUSE-II'(VIDRAFT)
625
  </p>
626
+ """
 
627
 
628
+ with gr.Blocks(css=css, title="Vidraft-G3-27B") as demo:
629
  gr.Markdown(title_html)
630
 
631
  # ์›น์„œ์น˜ ์˜ต์…˜์€ ํ™”๋ฉด์— ํ‘œ์‹œ (ํ•˜์ง€๋งŒ ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ, ํ† ํฐ ์Šฌ๋ผ์ด๋” ๋“ฑ์€ ๊ฐ์ถค)
 
638
  system_prompt_box = gr.Textbox(
639
  lines=3,
640
  value="๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ์ถœ๋ ฅํ•˜์—ฌ์•ผ ํ•œ๋‹ค. You are a deep thinking AI, you may use extremely long chains of thought to deeply consider the problem and deliberate with yourself via systematic reasoning processes to help come to a correct solution prior to answering. You should enclose your thoughts and internal monologue inside tags, and then provide your solution or response to the problem. Please answer in Korean.You have the ability to read English sources, but you **must always speak in Korean**.Even if the search results are in English, answer in Korean.",
 
641
  visible=False # ํ™”๋ฉด์—์„œ ๊ฐ์ถค
642
  )
643
+
644
  max_tokens_slider = gr.Slider(
645
  label="Max New Tokens",
646
  minimum=100,
 
649
  value=1000,
650
  visible=False # ํ™”๋ฉด์—์„œ ๊ฐ์ถค
651
  )
652
+
653
  web_search_text = gr.Textbox(
654
  lines=1,
655
  label="(Unused) Web Search Query",
656
  placeholder="No direct input needed",
657
  visible=False # ํ™”๋ฉด์—์„œ ๊ฐ์ถค
658
  )
659
+
660
+ # ์ฑ„ํŒ… ์ธํ„ฐํŽ˜์ด์Šค ๊ตฌ์„ฑ
661
+ chat = gr.ChatInterface(
662
+ fn=run,
663
+ type="messages",
664
+ chatbot=gr.Chatbot(type="messages", scale=1, allow_tags=["image"]),
665
+ textbox=gr.MultimodalTextbox(
666
+ file_types=[
667
+ ".webp", ".png", ".jpg", ".jpeg", ".gif",
668
+ ".mp4", ".csv", ".txt", ".pdf"
669
+ ],
670
+ file_count="multiple",
671
+ autofocus=True
672
+ ),
673
+ multimodal=True,
674
+ additional_inputs=[
675
+ system_prompt_box,
676
+ max_tokens_slider,
677
+ web_search_checkbox,
678
+ web_search_text,
679
  ],
680
+ stop_btn=False,
681
+ title='<a href="https://discord.gg/openfreeai" target="_blank">https://discord.gg/openfreeai</a>',
682
+ examples=examples,
683
+ run_examples_on_click=False,
684
+ cache_examples=False,
685
+ css_paths=None,
686
+ delete_cache=(1800, 1800),
687
+ )
 
 
 
 
 
 
 
 
 
 
688
 
689
+ # ์˜ˆ์ œ ์„น์…˜ - ์ด๋ฏธ ChatInterface์— examples๊ฐ€ ์„ค์ •๋˜์–ด ์žˆ์œผ๋ฏ€๋กœ ์—ฌ๊ธฐ์„œ๋Š” ์„ค๋ช…๋งŒ ํ‘œ์‹œ
690
+ with gr.Row(elem_id="examples_row"):
691
+ with gr.Column(scale=12, elem_id="examples_container"):
692
+ gr.Markdown("### Example Inputs (click to load)")
 
 
 
 
693
 
694
 
695
  if __name__ == "__main__":
696
  # ๋กœ์ปฌ์—์„œ๋งŒ ์‹คํ–‰ ์‹œ
697
+ demo.launch()