DinisCruz commited on
Commit
efef7be
·
1 Parent(s): b618f08

refactored login details to environment variables

Browse files
Files changed (2) hide show
  1. app.py +8 -12
  2. test_bot/Gradio_Test.py +7 -51
app.py CHANGED
@@ -1,19 +1,15 @@
1
- # import gradio as gr
2
- #
3
- # def greet(name):
4
- # return "Hello ...." + name + "!!"
5
- #
6
- # iface = gr.Interface(fn=greet, inputs="text", outputs="text")
7
- # iface.launch()
8
-
9
- import gradio as gr
10
-
11
  from test_bot.Gradio_Test import Gradio_Test
12
 
 
13
  gradio_test = Gradio_Test()
14
-
15
  demo = gradio_test.create_demo()
16
 
 
 
 
17
  if __name__ == "__main__":
 
18
  #demo.launch()
19
- demo.launch(auth=("admin", "pass1234"))
 
1
+ import os
2
+ from dotenv import load_dotenv
 
 
 
 
 
 
 
 
3
  from test_bot.Gradio_Test import Gradio_Test
4
 
5
+ load_dotenv()
6
  gradio_test = Gradio_Test()
 
7
  demo = gradio_test.create_demo()
8
 
9
+ username = os.getenv('HF_USERNAME')
10
+ password = os.getenv('HF_PASSWORD')
11
+
12
  if __name__ == "__main__":
13
+ demo.launch(auth=(username, password))
14
  #demo.launch()
15
+ #demo.launch(auth=("admin", "pass1234"))
test_bot/Gradio_Test.py CHANGED
@@ -9,54 +9,20 @@ from test_bot.api.Open_API import Open_API
9
  Open_API().setup()
10
 
11
 
 
12
 
13
  class Gradio_Test:
14
 
15
 
16
  def __init__(self):
 
17
  pass
18
 
19
  def title(self):
20
- return "# Meet Bobby Tables (head of Application Security)"
21
 
22
 
23
-
24
- # def predict(self, message, history):
25
- # print('--'*50)
26
- # print("Message:", message)
27
- # print("History:", history)
28
- # print('--' * 50)
29
- # history_openai_format = []
30
- # history_openai_format.append(self.default_prompt())
31
- # for human, assistant in history:
32
- # history_openai_format.append({"role": "user", "content": human})
33
- # history_openai_format.append({"role": "assistant", "content": assistant})
34
- # history_openai_format.append({"role": "user", "content": message})
35
- #
36
- # #pprint(history_openai_format)
37
- # response = openai.ChatCompletion.create(
38
- # model='gpt-3.5-turbo',
39
- # messages=history_openai_format,
40
- # temperature=1.0,
41
- # stream=True
42
- # )
43
- #
44
- # #token_count = list_set(response)
45
- # #print("Number of tokens used:", token_count)
46
- #
47
- # partial_message = ""
48
- # for chunk in response:
49
- # if len(chunk['choices'][0]['delta']) != 0:
50
- # next_content = chunk['choices'][0]['delta']['content']
51
- # partial_message = partial_message + next_content
52
- # yield next_content
53
- # yield partial_message
54
-
55
  def create_demo(self):
56
- # def predict(message, history):
57
- # open_api = Open_API().setup()
58
- # return open_api.create()
59
-
60
  #return gr.ChatInterface(self.predict).queue()
61
  default_text = "Hi, good morning"
62
  chat_predict = Chat_Predict()
@@ -66,19 +32,9 @@ class Gradio_Test:
66
  gr.ChatInterface(chat_predict.predict, textbox=textbox_input)
67
 
68
  demo.queue()
 
69
  return demo
70
- #return
71
-
72
- def create_demo__2(self):
73
- title = self.title()
74
-
75
- with gr.Blocks() as demo:
76
- gr.Markdown(title)
77
- inp = gr.Textbox(placeholder="What is your name?")
78
- out = gr.Textbox()
79
-
80
- inp.change(fn=lambda x: f"Welcome .... , {x}!",
81
- inputs=inp,
82
- outputs=out)
83
 
84
- return demo
 
 
 
9
  Open_API().setup()
10
 
11
 
12
+ TITLE = "# Meet Bobby Tables (head of Application Security). v0.2.0"
13
 
14
  class Gradio_Test:
15
 
16
 
17
  def __init__(self):
18
+ #self.demo = None
19
  pass
20
 
21
  def title(self):
22
+ return TITLE
23
 
24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  def create_demo(self):
 
 
 
 
26
  #return gr.ChatInterface(self.predict).queue()
27
  default_text = "Hi, good morning"
28
  chat_predict = Chat_Predict()
 
32
  gr.ChatInterface(chat_predict.predict, textbox=textbox_input)
33
 
34
  demo.queue()
35
+ #self.demo = demo
36
  return demo
 
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
+ # def launch(self):
39
+ # self.demo.queue()
40
+ # #self.demo.launch()