File size: 2,781 Bytes
2fa7c8f
767ca1c
 
 
 
 
 
 
 
2fa7c8f
 
 
767ca1c
2fa7c8f
 
 
 
767ca1c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2fa7c8f
 
767ca1c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2fa7c8f
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import gradio as gr
import openai
from osbot_utils.utils.Dev import pprint

from test_bot.api.Open_API import Open_API

Open_API().setup()



class Gradio_Test:


    def __init__(self):
        pass

    def title(self):
        return "# Chat GPT Powered demo"

    def default_prompt(self):
        system_prompt = """You are an AI-powered CISO (Chief Information Security Officer) bot, designed to provide guidance and answer questions related to cybersecurity and information security. You have extensive knowledge in securing systems, protecting data, implementing best practices, and addressing security concerns. Users will seek your assistance for advice, information, and solutions on a wide range of security topics. Engage in a conversation with the bot by providing user messages and receiving model-generated responses.

                        User: Hi, I have some security concerns and questions. Can you help me?

                        CISO Bot:"""
        return {"role": "system", "content": system_prompt}

    def predict(self, message, history):
        history_openai_format = []
        history_openai_format.append(self.default_prompt())
        for human, assistant in history:
            history_openai_format.append({"role": "user", "content": human})
            history_openai_format.append({"role": "assistant", "content": assistant})
        history_openai_format.append({"role": "user", "content": message})

        pprint(history_openai_format)
        response = openai.ChatCompletion.create(
            model='gpt-3.5-turbo',
            messages=history_openai_format,
            temperature=1.0,
            stream=True
        )

        partial_message = ""
        for chunk in response:
            if len(chunk['choices'][0]['delta']) != 0:
                partial_message = partial_message + chunk['choices'][0]['delta']['content']
                yield partial_message

    def create_demo(self):
        # def predict(message, history):
        #     open_api = Open_API().setup()
        #     return open_api.create()

        #return gr.ChatInterface(self.predict).queue()

        with gr.Blocks() as demo:
            gr.Markdown(self.title())
            textbox_input = gr.Textbox(value='Hello, good morning' , render=False)
            gr.ChatInterface(self.predict, textbox=textbox_input)

        demo.queue()
        return demo
        #return

    def create_demo__2(self):
        title = self.title()

        with gr.Blocks() as demo:
            gr.Markdown(title)
            inp = gr.Textbox(placeholder="What is your name?")
            out = gr.Textbox()

            inp.change(fn=lambda x: f"Welcome .... , {x}!",
                       inputs=inp,
                       outputs=out)

        return demo