Spaces:
Running
Running
import gradio as gr | |
from huggingface_hub import InferenceClient | |
import os | |
import requests | |
# ์ถ๋ก API ํด๋ผ์ด์ธํธ ์ค์ | |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN")) | |
#hf_client = InferenceClient("CohereForAI/aya-23-35B", token=os.getenv("HF_TOKEN")) | |
def load_code(filename): | |
try: | |
with open(filename, 'r', encoding='utf-8') as file: | |
return file.read() | |
except FileNotFoundError: | |
return f"{filename} ํ์ผ์ ์ฐพ์ ์ ์์ต๋๋ค." | |
except Exception as e: | |
return f"ํ์ผ์ ์ฝ๋ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}" | |
fashion_code = load_code('fashion.cod') | |
uhdimage_code = load_code('uhdimage.cod') | |
def respond( | |
message, | |
history: list[tuple[str, str]], | |
system_message="", # ๊ธฐ๋ณธ๊ฐ ์ถ๊ฐ | |
max_tokens=1024, # ๊ธฐ๋ณธ๊ฐ ์ถ๊ฐ | |
temperature=0.7, # ๊ธฐ๋ณธ๊ฐ ์ถ๊ฐ | |
top_p=0.9, # ๊ธฐ๋ณธ๊ฐ ์ถ๊ฐ | |
): | |
global fashion_code, uhdimage_code | |
system_prefix = """๋ฐ๋์ ํ๊ธ๋ก ๋ต๋ณํ ๊ฒ. ๋๋ ์ฃผ์ด์ง ์์ค์ฝ๋๋ฅผ ๊ธฐ๋ฐ์ผ๋ก "์๋น์ค ์ฌ์ฉ ์ค๋ช ๋ฐ ์๋ด, qna๋ฅผ ํ๋ ์ญํ ์ด๋ค". ์์ฃผ ์น์ ํ๊ณ ์์ธํ๊ฒ 4000ํ ํฐ ์ด์ ์์ฑํ๋ผ. ๋๋ ์ฝ๋๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ์ฌ์ฉ ์ค๋ช ๋ฐ ์ง์ ์๋ต์ ์งํํ๋ฉฐ, ์ด์ฉ์์๊ฒ ๋์์ ์ฃผ์ด์ผ ํ๋ค. ์ด์ฉ์๊ฐ ๊ถ๊ธํด ํ ๋ง ํ ๋ด์ฉ์ ์น์ ํ๊ฒ ์๋ ค์ฃผ๋๋ก ํ๋ผ. ์ฝ๋ ์ ์ฒด ๋ด์ฉ์ ๋ํด์๋ ๋ณด์์ ์ ์งํ๊ณ , ํค ๊ฐ ๋ฐ ์๋ํฌ์ธํธ์ ๊ตฌ์ฒด์ ์ธ ๋ชจ๋ธ์ ๊ณต๊ฐํ์ง ๋ง๋ผ. """ | |
if message.lower() == "ํจ์ ์ฝ๋ ์คํ": | |
system_message = system_message or "" # None์ธ ๊ฒฝ์ฐ ๋น ๋ฌธ์์ด๋ก ์ด๊ธฐํ | |
system_message += f"\n\nํจ์ ์ฝ๋ ๋ด์ฉ:\n{fashion_code}" | |
message = "ํจ์ ๊ฐ์ํผํ ์ ๋ํ ๋ด์ฉ์ ํ์ตํ์๊ณ , ์ค๋ช ํ ์ค๋น๊ฐ ๋์ด์๋ค๊ณ ์๋ฆฌ๊ณ ์๋น์ค URL(https://aiqcamp-fash.hf.space)์ ํตํด ํ ์คํธ ํด๋ณด๋ผ๊ณ ์ถ๋ ฅํ๋ผ." | |
elif message.lower() == "uhd ์ด๋ฏธ์ง ์ฝ๋ ์คํ": | |
system_message = system_message or "" # None์ธ ๊ฒฝ์ฐ ๋น ๋ฌธ์์ด๋ก ์ด๊ธฐํ | |
system_message += f"\n\nUHD ์ด๋ฏธ์ง ์ฝ๋ ๋ด์ฉ:\n{uhdimage_code}" | |
message = "UHD ์ด๋ฏธ์ง ์์ฑ์ ๋ํ ๋ด์ฉ์ ํ์ตํ์๊ณ , ์ค๋ช ํ ์ค๋น๊ฐ ๋์ด์๋ค๊ณ ์๋ฆฌ๊ณ ์๋น์ค URL(https://openfree-ultpixgen.hf.space)์ ํตํด ํ ์คํธ ํด๋ณด๋ผ๊ณ ์ถ๋ ฅํ๋ผ." | |
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}] # prefix ์ถ๊ฐ | |
for val in history: | |
if val[0]: | |
messages.append({"role": "user", "content": val[0]}) | |
if val[1]: | |
messages.append({"role": "assistant", "content": val[1]}) | |
messages.append({"role": "user", "content": message}) | |
response = "" | |
for message in hf_client.chat_completion( | |
messages, | |
max_tokens=max_tokens, | |
stream=True, | |
temperature=temperature, | |
top_p=top_p, | |
): | |
token = message.choices[0].delta.content | |
if token is not None: | |
response += token.strip("") # ํ ํฐ ์ ๊ฑฐ | |
yield response | |
css = """ | |
footer { | |
visibility: hidden; | |
} | |
""" | |
# Gradio ์ธํฐํ์ด์ค ์ค์ ๋ถ๋ถ๋ ์์ | |
demo = gr.ChatInterface( | |
respond, | |
additional_inputs=[ | |
gr.Textbox(label="System Message", value=""), | |
gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens"), | |
gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature"), | |
gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P"), | |
], | |
examples=[ | |
["ํจ์ ์ฝ๋ ์คํ"], | |
["UHD ์ด๋ฏธ์ง ์ฝ๋ ์คํ"], | |
["์ฌ์ฉ ๋ฐฉ๋ฒ์ 4000 ํ ํฐ ์ด์ ์์ธํ ์ค๋ช ํ๋ผ"], | |
["์ฌ์ฉ ๋ฐฉ๋ฒ์ 4000 ํ ํฐ ์ด์ ์ ํ๋ธ ์์ ์คํฌ๋ฆฝํธ ํํ๋ก ์์ฑํ๋ผ"], | |
["์ฌ์ฉ ๋ฐฉ๋ฒ์ SEO ์ต์ ํํ์ฌ ๋ธ๋ก๊ทธ ํฌ์คํธ๋ก 4000 ํ ํฐ ์ด์ ์์ฑํ๋ผ"], | |
["๊ธฐ์กด ์ ์ฌ ์๋น์ค์ ๋น๊ตํ์ฌ ๋ฐฐ๊ฒฝ ๋ฐ ํ์์ฑ, ํน์ฅ์ , ๊ธฐ๋ํจ๊ณผ๋ฅผ ์์ธํ๊ฒ 4000ํ ํฐ ์ด์ ์์ฑํ๋ผ."], | |
["๊ณ์ ์ด์ด์ ๋ต๋ณํ๋ผ"], | |
], | |
css=css, | |
theme="Nymbo/Nymbo_Theme", | |
cache_examples=False, # ์บ์ฑ ๋นํ์ฑํ ์ค์ | |
# css="""footer {visibility: hidden}""", # ์ด๊ณณ์ CSS๋ฅผ ์ถ๊ฐ | |
) | |
if __name__ == "__main__": | |
demo.launch(auth=("gini","pick")) |