ericjohnson97 commited on
Commit
1adc43a
Β·
1 Parent(s): 3fd3bcd

added check to make sure that users uploaded a log before trying to parse it

Browse files
Files changed (2) hide show
  1. app.py +5 -4
  2. llm/gptPlotCreator.py +4 -1
app.py CHANGED
@@ -2,15 +2,15 @@ import gradio as gr
2
  import os
3
  from llm.gptPlotCreator import PlotCreator
4
 
5
- plot_creator = PlotCreator()
6
 
7
  def add_text(history, text, plot_creator):
8
  history = history + [(text, None)]
9
  return history, plot_creator, ""
10
 
11
  def add_file(history, file, plot_creator):
12
- print(type(plot_creator))
13
  history = history + [((file.name,), None)]
 
14
  return history, plot_creator
15
 
16
  def format_history(history):
@@ -24,7 +24,7 @@ def bot(history, plot_creator):
24
  print(type(plot_creator))
25
 
26
  # Check if it is a string
27
- if isinstance(user_input, str):
28
 
29
  history[-1][1] = "I am figuring out what data types are relevant for the plot...\n"
30
  yield history, plot_creator
@@ -44,6 +44,8 @@ def bot(history, plot_creator):
44
 
45
 
46
  yield history, plot_creator
 
 
47
  else:
48
  plot_creator = PlotCreator() # access the state variable through `.value`
49
  file_path = user_input[0]
@@ -80,7 +82,6 @@ with gr.Blocks() as demo:
80
  with gr.Column(scale=0.15, min_width=0):
81
  btn = gr.UploadButton("πŸ“", file_types=["file"])
82
 
83
- var = "test"
84
 
85
  txt.submit(add_text, [chatbot, txt, plot_creator], [chatbot, plot_creator, txt]).then(
86
  bot, [chatbot, plot_creator], [chatbot, plot_creator]
 
2
  import os
3
  from llm.gptPlotCreator import PlotCreator
4
 
 
5
 
6
  def add_text(history, text, plot_creator):
7
  history = history + [(text, None)]
8
  return history, plot_creator, ""
9
 
10
  def add_file(history, file, plot_creator):
11
+ print(file.name)
12
  history = history + [((file.name,), None)]
13
+ plot_creator.set_logfile_name(file.name)
14
  return history, plot_creator
15
 
16
  def format_history(history):
 
24
  print(type(plot_creator))
25
 
26
  # Check if it is a string
27
+ if isinstance(user_input, str) and plot_creator.logfile_name != "":
28
 
29
  history[-1][1] = "I am figuring out what data types are relevant for the plot...\n"
30
  yield history, plot_creator
 
44
 
45
 
46
  yield history, plot_creator
47
+ elif not plot_creator.logfile_name:
48
+ yield history + [(None, "Please upload a log file before attempting to create a plot.")], plot_creator
49
  else:
50
  plot_creator = PlotCreator() # access the state variable through `.value`
51
  file_path = user_input[0]
 
82
  with gr.Column(scale=0.15, min_width=0):
83
  btn = gr.UploadButton("πŸ“", file_types=["file"])
84
 
 
85
 
86
  txt.submit(add_text, [chatbot, txt, plot_creator], [chatbot, plot_creator, txt]).then(
87
  bot, [chatbot, plot_creator], [chatbot, plot_creator]
llm/gptPlotCreator.py CHANGED
@@ -29,6 +29,9 @@ class PlotCreator:
29
  """
30
 
31
  last_code = "" # stores the last code generated
 
 
 
32
 
33
  def __init__(self):
34
  """
@@ -45,7 +48,7 @@ class PlotCreator:
45
  # define the input variables and template for the prompt to generate Python scripts
46
  mavlink_data_prompt = PromptTemplate(
47
  input_variables=["data_types", "history", "human_input", "file", "output_file"],
48
- template="You are an AI conversation agent that will be used for generating python scripts to plot mavlink data provided by the user. Please create a python script using matplotlib and pymavlink's mavutil to plot the data provided by the user. Please do not explain the code just return the script. Please plot each independent variable over time in seconds. Please save the plot to file named {output_file} with at least 400 dpi. please use blocking=false in your call to recv_match and be sure to break the loop if a msg in None. here are the relevant data types in the log:\n\n{data_types} \n\nChat History:\n{history} \n\nHUMAN: {human_input} \n\nplease read this data from the file {file}.",
49
  )
50
 
51
  # create an instance of LLMChain with the defined prompt and verbosity
 
29
  """
30
 
31
  last_code = "" # stores the last code generated
32
+ logfile_name = ""
33
+ script_path = ""
34
+ plot_path = ""
35
 
36
  def __init__(self):
37
  """
 
48
  # define the input variables and template for the prompt to generate Python scripts
49
  mavlink_data_prompt = PromptTemplate(
50
  input_variables=["data_types", "history", "human_input", "file", "output_file"],
51
+ template="You are an AI conversation agent that will be used for generating python scripts to plot mavlink data provided by the user. Please create a python script using matplotlib and pymavlink's mavutil to plot the data provided by the user. Please do not explain the code just return the script. Please plot each independent variable over time in seconds. Please save the plot to file named {output_file} with at least 400 dpi and do not call plt.show(). please use blocking=false in your call to recv_match and be sure to break the loop if a msg in None. here are the relevant data types in the log:\n\n{data_types} \n\nChat History:\n{history} \n\nHUMAN: {human_input} \n\nplease read this data from the file {file}.",
52
  )
53
 
54
  # create an instance of LLMChain with the defined prompt and verbosity