ericjohnson97 commited on
Commit
b8270b1
·
1 Parent(s): 3b45de9
Files changed (3) hide show
  1. llm/gptPlotCreator.py +27 -13
  2. llm_plot.py +27 -7
  3. template.env +2 -0
llm/gptPlotCreator.py CHANGED
@@ -19,13 +19,19 @@ from PIL import Image
19
 
20
 
21
  class PlotCreator:
 
 
 
22
  def __init__(self):
23
  load_dotenv()
24
- llm = ChatOpenAI(model_name="gpt-3.5-turbo", max_tokens=2000, temperature=0)
 
 
 
25
 
26
  mavlink_data_prompt = PromptTemplate(
27
- input_variables=["human_input", "file"],
28
- template="You are an AI conversation agent that will be used for generating python scripts to plot mavlink data provided by the user. Please create a python script using matplotlib and pymavlink's mavutil to plot the data provided by the user. Please do not explain the code just return the script. Please plot each independent variable over time in seconds. Please save the plot to file named plot.png with at least 400 dpi. \n\nHUMAN: {human_input} \n\nplease read this data from the file {file}.",
29
  )
30
  self.chain = LLMChain(verbose=True, llm=llm, prompt=mavlink_data_prompt)
31
 
@@ -55,13 +61,13 @@ class PlotCreator:
55
  with open(filename, 'w') as file:
56
  file.write(text)
57
 
58
- @staticmethod
59
- def attempt_to_fix_sctript(filename, error_message):
60
- llm = ChatOpenAI(model_name="gpt-3.5-turbo", max_tokens=2000, temperature=0)
61
 
62
  fix_plot_script_template = PromptTemplate(
63
  input_variables=["error", "script"],
64
- template="You are an AI agent that is designed to debug scripts created to plot mavlink data using matplotlib and pymavlink's mavutil. the following script produced this error: \n\n{script}\n\nThe error is: \n\n{error}\n\nPlease fix the script so that it produces the correct plot.",
65
  )
66
 
67
  # read script from file
@@ -75,16 +81,22 @@ class PlotCreator:
75
  PlotCreator.write_plot_script("plot.py", code[0])
76
 
77
  # run the script
78
- os.system("python plot.py")
 
 
 
79
  return code
80
 
81
- def create_plot(self, human_input):
82
- file = "data/2023-01-04 20-51-25.tlog"
 
 
83
 
84
- # prompt the user for the what plot they would like to generate
85
- # human_input = input("Please enter a description of the plot you would like to generate: ")
86
 
87
- response = self.chain.run({"file": file, "human_input": human_input})
 
88
  print(response)
89
 
90
  # parse the code from the response
@@ -101,4 +113,6 @@ class PlotCreator:
101
  print(e)
102
  code = self.attempt_to_fix_sctript("plot.py", str(e))
103
 
 
 
104
  return [("plot.png", None), code[0]]
 
19
 
20
 
21
  class PlotCreator:
22
+
23
+ last_code = ""
24
+
25
  def __init__(self):
26
  load_dotenv()
27
+ self.model = os.getenv("OPENAI_MODEL")
28
+ # llm = ChatOpenAI(model_name="gpt-3.5-turbo", max_tokens=2000, temperature=0)
29
+ llm = ChatOpenAI(model_name=self.model, max_tokens=2000, temperature=0)
30
+
31
 
32
  mavlink_data_prompt = PromptTemplate(
33
+ input_variables=["history", "human_input", "file"],
34
+ template="You are an AI conversation agent that will be used for generating python scripts to plot mavlink data provided by the user. Please create a python script using matplotlib and pymavlink's mavutil to plot the data provided by the user. Please do not explain the code just return the script. Please plot each independent variable over time in seconds. Please save the plot to file named plot.png in the same directory as plot.py with at least 400 dpi. Also be careful not to write a script that gets stuck in an endless loop.\n\nChat History:\n{history} \n\nHUMAN: {human_input} \n\nplease read this data from the file {file}.",
35
  )
36
  self.chain = LLMChain(verbose=True, llm=llm, prompt=mavlink_data_prompt)
37
 
 
61
  with open(filename, 'w') as file:
62
  file.write(text)
63
 
64
+ def attempt_to_fix_sctript(self, filename, error_message):
65
+ # llm = ChatOpenAI(model_name="gpt-3.5-turbo", max_tokens=2000, temperature=0)
66
+ llm = ChatOpenAI(model_name=self.model , max_tokens=2000, temperature=0)
67
 
68
  fix_plot_script_template = PromptTemplate(
69
  input_variables=["error", "script"],
70
+ template="You are an AI agent that is designed to debug scripts created to plot mavlink data using matplotlib and pymavlink's mavutil. the following script produced this error: \n\n{script}\n\nThe error is: \n\n{error}\n\nPlease fix the script so that it produces the correct plot. please return the fixed script in a markdown code block.",
71
  )
72
 
73
  # read script from file
 
81
  PlotCreator.write_plot_script("plot.py", code[0])
82
 
83
  # run the script
84
+ try:
85
+ subprocess.check_output(["python", "plot.py"], stderr=subprocess.STDOUT)
86
+ except:
87
+ code[0] = "Sorry I was unable to fix the script.\nThis is my attempt to fix it:\n\n" + code[0]
88
  return code
89
 
90
+ def set_logfile_name(self, filename):
91
+ self.logfile_name = filename
92
+
93
+ def create_plot(self, human_input, history):
94
 
95
+ if self.last_code != "":
96
+ history = history + "\n\nLast script generated:\n\n" + self.last_code
97
 
98
+
99
+ response = self.chain.run({"history" : history, "file": self.logfile_name, "human_input": human_input})
100
  print(response)
101
 
102
  # parse the code from the response
 
113
  print(e)
114
  code = self.attempt_to_fix_sctript("plot.py", str(e))
115
 
116
+ self.last_code = code[0]
117
+
118
  return [("plot.png", None), code[0]]
llm_plot.py CHANGED
@@ -1,4 +1,5 @@
1
  import gradio as gr
 
2
  from llm.gptPlotCreator import PlotCreator
3
 
4
  plot_creator = PlotCreator()
@@ -11,21 +12,40 @@ def add_file(history, file):
11
  history = history + [((file.name,), None)]
12
  return history
13
 
 
 
 
14
  def bot(history):
15
  # Get the last input from the user
16
- user_input = history[-1][0]
17
-
 
 
18
  # Check if it is a string
19
  if isinstance(user_input, str):
20
  # Generate the plot
21
- response = plot_creator.create_plot(user_input)
22
- else:
23
- response = "**That's cool!**"
24
 
25
- history[-1][1] = response[0]
26
- history = history + [(None, f"Here is the code used to generate the plot:\n```\n{response[1]}```")]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  return history
28
 
 
29
  with gr.Blocks() as demo:
30
  gr.Markdown("# GPT MAVPlot\n\nThis web-based tool allows users to upload mavlink tlogs in which the chat bot will use to generate plots from. It does this by creating a python script using pymavlink and matplotlib. The output includes the plot and the code used to generate it. ")
31
  chatbot = gr.Chatbot([], elem_id="chatbot").style(height=750)
 
1
  import gradio as gr
2
+ import os
3
  from llm.gptPlotCreator import PlotCreator
4
 
5
  plot_creator = PlotCreator()
 
12
  history = history + [((file.name,), None)]
13
  return history
14
 
15
+ def format_history(history):
16
+ return "\n".join([f"Human: {entry[0]}\nAI: {entry[1]}" for entry in history ])
17
+
18
  def bot(history):
19
  # Get the last input from the user
20
+ user_input = history[-1][0] if history and history[-1][0] else None
21
+
22
+ print(user_input)
23
+
24
  # Check if it is a string
25
  if isinstance(user_input, str):
26
  # Generate the plot
 
 
 
27
 
28
+ print(history)
29
+
30
+ history_str = format_history(history)
31
+ response = plot_creator.create_plot(user_input, history_str)
32
+ print(response)
33
+ history[-1][1] = response[0]
34
+ history = history + [(None, f"Here is the code used to generate the plot:")]
35
+ history = history + [(None, f"{response[1]}")]
36
+ else:
37
+ file_path = user_input[0]
38
+ plot_creator.set_logfile_name(file_path)
39
+
40
+ # get only base name
41
+ filename, extension = os.path.splitext(os.path.basename(file_path))
42
+
43
+ history[-1][0] = f"user uploaded file: {filename}{extension}"
44
+ history[-1][1] = "I will be using the file you uploaded to generate the plot. Please describe the plot you would like to generate."
45
+
46
  return history
47
 
48
+
49
  with gr.Blocks() as demo:
50
  gr.Markdown("# GPT MAVPlot\n\nThis web-based tool allows users to upload mavlink tlogs in which the chat bot will use to generate plots from. It does this by creating a python script using pymavlink and matplotlib. The output includes the plot and the code used to generate it. ")
51
  chatbot = gr.Chatbot([], elem_id="chatbot").style(height=750)
template.env ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ OPENAI_API_KEY=
2
+ OPENAI_MODEL=gpt-3.5-turbo