File size: 2,929 Bytes
6255f8a
b8270b1
6255f8a
be39a04
6255f8a
be39a04
6255f8a
 
 
be39a04
6255f8a
 
 
be39a04
b8270b1
 
 
6255f8a
 
b8270b1
 
 
 
6255f8a
 
f92a70f
 
 
 
b8270b1
f92a70f
 
 
 
 
 
 
 
b8270b1
 
f92a70f
 
 
 
b8270b1
 
 
 
 
 
 
 
d02afc3
f92a70f
 
d02afc3
 
f92a70f
 
6255f8a
 
b8270b1
6255f8a
3b45de9
6255f8a
be39a04
6255f8a
 
 
 
 
 
 
f92a70f
6255f8a
 
 
 
 
 
be39a04
 
6255f8a
f92a70f
6255f8a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
import gradio as gr
import os
from llm.gptPlotCreator import PlotCreator

plot_creator = PlotCreator()

def add_text(history, text):
    history = history + [(text, None)]
    return history, ""

def add_file(history, file):
    history = history + [((file.name,), None)]
    return history

def format_history(history):
    return "\n".join([f"Human: {entry[0]}\nAI: {entry[1]}" for entry in history ])

def bot(history):
    # Get the last input from the user
    user_input = history[-1][0] if history and history[-1][0] else None

    print(user_input)

    # Check if it is a string
    if isinstance(user_input, str):
        
        history[-1][1] = "I am figuring out what data types are relevant for the plot...\n"
        yield history
        data_types_str = plot_creator.find_relevant_data_types(user_input)

        history[-1][1] += "I am now generating a script to plot the data...\n"
        yield history
        plot_creator.create_plot(user_input, data_types_str)
        
        history[-1][1] += "I am now running the script I just Generated...\n"
        yield history
        response = plot_creator.run_script()
        
        history = history + [(None, f"Here is the code used to generate the plot:")]
        history = history + [(None, f"{response[1]}")]
        history = history + response[0]
        
        
        yield history
    else:
        file_path = user_input[0]
        plot_creator.set_logfile_name(file_path)
        
        # get only base name
        filename, extension = os.path.splitext(os.path.basename(file_path))
        
        history[-1][0] = f"user uploaded file: {filename}{extension}"
        history[-1][1] = "processing file..."
        yield history

        data_types = plot_creator.parse_mavlink_log()
        history = history + [(None, f"I am done processing the file. Now you can ask me to generate a plot.")]
        yield history

    return history


with gr.Blocks() as demo:
    gr.Markdown("# GPT MAVPlot\n\nThis web-based tool allows users to upload mavlink tlogs in which the chat bot will use to generate plots from. It does this by creating a python script using pymavlink and matplotlib. The output includes the plot and the code used to generate it. ")
    chatbot = gr.Chatbot([], elem_id="chatbot").style(height=750)
    
    with gr.Row():
        with gr.Column(scale=0.85):
            txt = gr.Textbox(
                show_label=False,
                placeholder="Enter text and press enter, or upload an image",
            ).style(container=False)
        with gr.Column(scale=0.15, min_width=0):
            btn = gr.UploadButton("πŸ“", file_types=["file"])
            
    txt.submit(add_text, [chatbot, txt], [chatbot, txt]).then(
        bot, chatbot, chatbot
    )
    btn.upload(add_file, [chatbot, btn], [chatbot]).then(
        bot, chatbot, chatbot
    )

if __name__ == "__main__":
    demo.queue()
    demo.launch()