Spaces:
Running
Running
File size: 2,826 Bytes
b472985 1af235e b472985 a98c02f e87659d b472985 a49d96b b472985 a49d96b 36b39a7 a49d96b b472985 a49d96b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
from langchain_huggingface import HuggingFaceEndpoint as HF
from subprocess import Popen, PIPE as P
from langchain_experimental.tools.python.tool import PythonREPLTool as PYT
from langchain.agents import load_tools, initialize_agent as Agent,AgentExecutor as Ex, AgentType as Type
from langchain.agents.agent_toolkits import create_retriever_tool as crt
from langchain_community.agent_toolkits import FileManagementToolkit as FMT
from langchain.tools import Tool
from langchain.memory import ConversationBufferMemory as MEM,RedisChatMessageHistory as HIS
from langchain.schema import SystemMessage as SM,HumanMessage as HM, AIMessage as AM
from langchain import hub
import os
from langchain.retrievers import WikipediaRetriever as Wiki
import gradio as gr
chatbot = gr.Chatbot(
label="SYAI4.1",
show_copy_button=True,
layout="panel"
)
def terminal(c):
a=Popen(c,shell=True,stdin=P,stdout=P,stderr=P)
return a.stdout.read()+a.stderr.read()
tools=FMT().get_tools()
tools.append(PYT())
tools.extend(load_tools(["requests"]))
tools.extend(load_tools(["llm-math","ddg-search"]))
tools.append(Tool.from_function(func=terminal,name="terminal"))
tools.append(crt(name="wiki",description="위키 백과를 검색하여 정보를 가져온다",retriever=Wiki(lang="ko",top_k_results=1)))
llm=HF(repo_id="peterpeter8585/syai4.0")
prompt=hub.pull("hwchase17/structed-chat-agent")
def chat(message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p, chat_session=""):
messages=[SM(content=system_message+"And, Your name is Chatchat")]
for val in history:
if val[0]:
messages.append(HM(content=val[0]))
if val[1]:
messages.append(AM(content=val[1]))
messages.append(HM(content=message))
history1=HIS(session_id=chat_session, url=os.environ["URL"])
memory=MEM(chat_memory=history1,memory_key="history")
agent=EX(agent=Agent(tools=tools,llm=llm,memory=memory,agent=Type.STRUCTED_CHAT_ZERO_SHOT_REACT_DESCRIPTION),tools=tools,verbose=True,handle_parsing_errors=True)
yield agent.invoke(messages)
ai1=gr.ChatInterface(
chat,
chatbot=chatbot,
additional_inputs=[
gr.Textbox(value="You are a helpful assistant.", label="System message", interactive=True),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.1, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.1,
step=0.05,
label="Top-p (nucleus sampling)",
),
gr.Textbox(label="chat_id(please enter the chat id!)")
],
)
with gr.Blocks(theme="shivi/calm_seafoam") as ai:
gr.TabbedInterface([ai1],["Chatchat"])
ai.launch() |