ShawnRu commited on
Commit
6a744d9
·
verified ·
1 Parent(s): dac3612

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -103
app.py DELETED
@@ -1,103 +0,0 @@
1
- import os
2
- import json
3
- import gradio as gr
4
- from typing import Literal
5
-
6
- TaskType = Literal["NER", "RE", "EE", "Base"]
7
- ModeType = Literal["direct", "fast-thinking", "slow-thinking", "customized"]
8
-
9
-
10
- class InterFace:
11
- def __init__(self, dm):
12
- """
13
- receive the Pipeline instance as a parameter
14
- """
15
- self.dm = dm
16
-
17
- def collect_and_process_inputs(self, task_description, file=None):
18
- """
19
- collect user inputs, return the processing result directly
20
- """
21
- # if file is not None:
22
- # file_path = self.save_file(file)
23
- # print(f"Your file has been saved, you can find it at: {file_path}")
24
- # else:
25
- # file_path = None
26
- file_path = "/disk/disk_20T/luoyujie/Agent/OneKE-Agent-reconstruct/data/input_files/Harry_Potter_Chapter1.pdf"
27
- dm_response, dm_code = self.dm.get_extraction_result(file_path, task_description)
28
- return json.dumps(dm_response, indent=2), dm_code
29
-
30
- def save_file(self, file):
31
- """
32
- save file to the server and return the file path
33
- """
34
- file_path = os.path.join("saved", file.name)
35
- os.makedirs(os.path.dirname(file_path), exist_ok=True)
36
- with open(file_path, "wb") as f:
37
- f.write(file.read())
38
- return file_path
39
-
40
- def run_interface(self):
41
- """
42
- run Gradio interface
43
- """
44
- task_options = list(TaskType.__args__)
45
- mode_options = list(ModeType.__args__)
46
-
47
- # create the interface
48
- with gr.Blocks() as demo:
49
- gr.HTML("""
50
- <div style="text-align:center;">
51
- <p align="center">
52
- <a href="https://github.com/R10836/DeepKE/blob/main/example/llm/assets/oneke_logo.png">
53
- <img src="https://raw.githubusercontent.com/R10836/DeepKE/refs/heads/main/example/llm/assets/oneke_logo.png" width="240"/>
54
- </a>
55
- </p>
56
- <h1>OneKE: A Dockerized Schema-Guided LLM Agent-based Knowledge Extraction System</h1>
57
- <p>
58
- 📑[<a href="https://huggingface.co">Paper</a>]
59
- 👨‍💻[<a href="https://github.com" target="_blank"><span class="icon"><i class="fab fa-github"></i></span>Code</a>]
60
- 📄[<a href="https://zjunlp.gitbook.io">Docs</a>]
61
- 🤗[<a href="https:/huggingface.cot" target="_blank">Demo</a>]
62
- </p>
63
- </div>
64
- """)
65
-
66
- # section: input area
67
- with gr.Row():
68
- task = gr.Dropdown(choices=task_options, label="Select your Task", value="Base")
69
- mode = gr.Dropdown(choices=mode_options, label="Select your Mode", value="direct")
70
-
71
- task_description = gr.Textbox(label="Instruction", placeholder="Enter task description, e.g., extract key information from this article.")
72
-
73
- text = gr.Textbox(label="Text", placeholder="Or enter text here directly")
74
-
75
- use_file = gr.Checkbox(label="Use File", value=False)
76
- file = gr.File(label="Upload File", visible=False, container=True, scale=2)
77
- use_file.change(
78
- lambda use_file: gr.update(visible=use_file),
79
- inputs=[use_file],
80
- outputs=[file]
81
- )
82
-
83
- # section: output area
84
- code_output = gr.Code(label="Generated Schema", language="python", lines=10, interactive=False)
85
- model_output = gr.Code(label="Final Answer", language="json", lines=10, interactive=False)
86
-
87
- run_button = gr.Button("Submit")
88
- run_button.click(
89
- self.collect_and_process_inputs,
90
- inputs=[task_description, file],
91
- outputs=[model_output, code_output],
92
- )
93
-
94
- # launch the interface
95
- demo.launch(share=False)
96
-
97
-
98
- if __name__ == "__main__":
99
- # 你需要替换 dm 为你的 pipeline 实例
100
- dm_instance = None # 填入你的模型实例
101
- app = InterFace(dm_instance)
102
- app.run_interface()
103
-