syai4.1 / app.py
peterpeter8585's picture
Update app.py
36b39a7 verified
raw
history blame
2.87 kB
from langchain_huggingface import HuggingFaceEndpoint as HF
from subprocess import Popen, PIPE as P
from langchain_experimental.tools.python.tool import PythonREPLTool as PYT
from langchain.agents import load_tools, initialize_agent Agent,AgentExecutor as Ex, AgentType as Type
from langchain.agents.agent_toolkits import create_retriever_tool as crt
from langchain_community.agent_toolkits import FileManagementToolkit as FMT
from langchain.tools import Tool,YoutubeSearchTool as YTS
from langchain.memory import ConversationalBufferMomory as MEM,RedisChatHistory as HIS
from langchain.schema import SystemMessage as SM,HumanMessage as HM
from langchain import hub
import os
from langchain.retrievers import WikipediaRetriever as Wiki
import gradio as gr
chatbot = gr.Chatbot(
label="SYAI4.1",
show_copy_button=True,
likeable=True,
layout="panel"
)
def terminal(c):
a=Popen(c,shell=True,stdin=P,stdout=P,stderr=P)
return a.stdout.read()+a.stderr.read()
tools=FMT().get_tools()
tools.append(PYT())
tools.append(YTS())
tools.extend(load_tools(["requests"]))
tools.extend(load_tools(["llm-math","ddg-search"]))
tools.append(Tool.from_function(func=terminal,name="terminal"))
tools.append(crt(name="wiki",description="위키 백과를 검색하여 정보를 가져온다",retriever=Wiki(lang="ko",top_k_results=1)))
llm=HF(repo_id="peterpeter8585/syai4.0")
prompt=hub.pull("hwchase17/structed-chat-agent")
def chat(message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p, chat_session=""):
messages=[SM(content=system_message+"And, Your name is Chatchat")]
for val in history:
if val[0]:
messages.append(HM(content=val[0]))
if val[1]:
messages.append(AM(content=val[1]))
messages.append(HM(content=message))
history1=HIS(session_id=chat_session, url=os.environ["URL"])
memory=MEM(chat_memory=history1,memory_key="history")
agent=EX(agent=Agent(tools=tools,llm=llm,memory=memory,agent=Type.STRUCTED_CHAT_ZERO_SHOT_REACT_DESCRIPTION),tools=tools,verbose=True,handle_parsing_errors=True)
yield agent.invoke(messages)
ai1=gr.ChatInterface(
chat,
chatbot=chatbot,
additional_inputs=[
gr.Textbox(value="You are a helpful assistant.", label="System message", interactive=True),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.1, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.1,
step=0.05,
label="Top-p (nucleus sampling)",
),
gr.Textbox(label="chat_id(please enter the chat id!)")
],
)
with gr.Blocks(theme="shivi/calm_seafoam") as ai:
gr.TabbedInterface([ai1],["Chatchat"])
ai.launch()