dongyubin commited on
Commit
fdd702f
·
1 Parent(s): 67b4536
Files changed (1) hide show
  1. app.py +13 -22
app.py CHANGED
@@ -17,17 +17,21 @@ def main():
17
  input_api_key = gr.inputs.Textbox(label="ChatGPT API Key", lines=1)
18
  input_api_base = gr.inputs.Textbox(label="ChatGPT API 地址(默认无地址)", lines=1)
19
  input_url = gr.inputs.Textbox(label="URL", lines=1)
 
 
 
 
 
 
 
 
 
 
20
 
21
- def my_inference_function(enabled, api_key, api_base, url):
22
  if enabled:
23
- if api_key:
24
- os.environ["OPENAI_API_KEY"] = api_key
25
- else:
26
- os.environ.pop("OPENAI_API_KEY", None)
27
- if api_base:
28
- os.environ['OPENAI_API_BASE'] = api_base
29
- else:
30
- os.environ.pop('OPENAI_API_BASE', None)
31
  llm = OpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=1024)
32
  else:
33
  llm = HuggingFaceHub(repo_id="declare-lab/flan-alpaca-large", model_kwargs={"temperature":0.1, "max_length":512})
@@ -42,18 +46,5 @@ def main():
42
  文章要点""")
43
  return response
44
 
45
- def get_inputs(enabled):
46
- if enabled:
47
- return [input_api_key, input_api_base]
48
- else:
49
- return [input_url]
50
-
51
- inputs = [input_checkbox] + get_inputs(input_checkbox.value)
52
- outputs = "text"
53
- update_interface = lambda enabled: gr.Interface(fn=my_inference_function, inputs=get_inputs(enabled), outputs=outputs)
54
- interface = gr.Interface(fn=my_inference_function, inputs=inputs, outputs=outputs, examples=[["enabled", "api_key", "api_base", "url"]], title="ChatGPT")
55
- interface.launch()
56
-
57
-
58
  if __name__ == '__main__':
59
  main()
 
17
  input_api_key = gr.inputs.Textbox(label="ChatGPT API Key", lines=1)
18
  input_api_base = gr.inputs.Textbox(label="ChatGPT API 地址(默认无地址)", lines=1)
19
  input_url = gr.inputs.Textbox(label="URL", lines=1)
20
+ def get_inputs(enabled):
21
+ if enabled:
22
+ return [input_api_key, input_api_base]
23
+ else:
24
+ return [input_url]
25
+ inputs = [input_checkbox] + get_inputs(input_checkbox.value)
26
+ outputs = "text"
27
+ update_interface = lambda enabled: gr.Interface(fn=my_inference_function, inputs=get_inputs(enabled), outputs=outputs)
28
+ interface = gr.Interface(fn=my_inference_function, inputs=inputs, outputs=outputs, examples=[["enabled", "api_key", "api_base", "url"]], title="ChatGPT")
29
+ interface.launch()
30
 
31
+ def my_inference_function(enabled, api_key, api_base, url):
32
  if enabled:
33
+ os.environ["OPENAI_API_KEY"] = api_key
34
+ os.environ['OPENAI_API_BASE'] = api_base
 
 
 
 
 
 
35
  llm = OpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=1024)
36
  else:
37
  llm = HuggingFaceHub(repo_id="declare-lab/flan-alpaca-large", model_kwargs={"temperature":0.1, "max_length":512})
 
46
  文章要点""")
47
  return response
48
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
  if __name__ == '__main__':
50
  main()