Spaces:
Sleeping
Sleeping
File size: 2,765 Bytes
3fdead9 3be0237 a7d7295 5bf9849 51c2df6 5bf9849 dbe832f 3be0237 51c2df6 2e99c97 c124e9f a858470 e38a633 f645481 2e99c97 c124e9f a858470 e38a633 f645481 2463937 3be0237 f979ae8 51c2df6 f979ae8 51c2df6 4f05ca4 f979ae8 51c2df6 f979ae8 3be0237 4f05ca4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
import gradio as gr
import os
from langchain.chains.question_answering import load_qa_chain
from langchain.document_loaders import UnstructuredURLLoader
from langchain import OpenAI
from langchain import HuggingFaceHub
os.environ[
"HUGGINGFACEHUB_API_TOKEN"] = "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"
os.environ["LANGCHAIN_TRACING_V2"] = "true"
os.environ["LANGCHAIN_ENDPOINT"] = "https://api.smith.langchain.com"
os.environ["LANGCHAIN_API_KEY"] = "ls__ae9b316f4ee9475b84f66c616344d713"
os.environ["LANGCHAIN_PROJECT"] = "Sequential-Chain"
def main():
with gr.Blocks() as demo:
with gr.Tab(label="HuggingFaceHub", id="tab1"): #标签页1
input_url1 = gr.inputs.Textbox(label="输入要总结的 URL", lines=1)
text_button = gr.Button("提交")
text_output_interpret = gr.TextArea(label="结果")
text_button.click(fn=my_inference_function,inputs=input_url1,outputs=text_output_interpret)
with gr.Tab(label="ChatGPT", id="tab2"): #标签页2
input_api_key = gr.inputs.Textbox(label="ChatGPT API Key", lines=1)
input_api_base = gr.inputs.Textbox(label="ChatGPT API 地址(默认无地址)", lines=1)
input_url2 = gr.inputs.Textbox(label="输入要总结的 URL", lines=1)
vid_button = gr.Button("提交")
vid_output_interpret = gr.TextArea(label="结果")
vid_button.click(fn=my_chatgpt_function,inputs=[input_api_key, input_api_base, input_url2],outputs=vid_output_interpret)
demo.launch()
def my_chatgpt_function(api_key, api_base, url):
os.environ["OPENAI_API_KEY"] = api_key
os.environ['OPENAI_API_BASE'] = api_base
llm = OpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=1024)
loader = UnstructuredURLLoader(urls=[url])
data = loader.load()
chain = load_qa_chain(llm=llm, chain_type="stuff")
response = chain.run(input_documents=data,
question="""请用中文总结文章的内容,并以下面模版给出结果:
《文章标题》摘要如下:
## 一句话描述
文章摘要内容
## 文章略读
文章要点""")
return response
def my_inference_function(url):
llm = HuggingFaceHub(repo_id="declare-lab/flan-alpaca-large",
model_kwargs={
"temperature": 0.1,
"max_length": 512
})
loader = UnstructuredURLLoader(urls=[url])
data = loader.load()
chain = load_qa_chain(llm=llm, chain_type="stuff")
response = chain.run(input_documents=data,
question="Summarize this article in one paragraph")
return response
if __name__ == '__main__':
main()
|