Tonic commited on
Commit
522f297
·
1 Parent(s): 21a3eb8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -12
app.py CHANGED
@@ -39,12 +39,15 @@ def handle_image_submission(_chatbot, task_history, file):
39
  print("handle_image_submission called")
40
  if file is None:
41
  print("No file uploaded")
42
- return _chatbot, task_history
43
- file_path = save_image(file, uploaded_file_dir)
 
 
44
  history_item = ((file_path,), None)
45
  _chatbot.append(history_item)
46
  task_history.append(history_item)
47
  return predict(_chatbot, task_history)
 
48
 
49
  def _load_model_tokenizer(args):
50
  model_id = args.checkpoint_path
@@ -104,15 +107,15 @@ def _parse_text(text):
104
  return text
105
 
106
  def save_image(image_file, upload_dir):
 
107
  Path(upload_dir).mkdir(parents=True, exist_ok=True)
108
- if image_file is not None:
109
- filename = secrets.token_hex(10) + Path(image_file.name).suffix
110
- file_path = Path(upload_dir) / filename
111
- with open(file_path, "wb") as f:
112
- f.write(image_file.read())
113
- return str(file_path)
114
- else:
115
- return None
116
 
117
  def add_file(history, task_history, file):
118
  if file is None:
@@ -126,20 +129,26 @@ def _launch_demo(args, model, tokenizer):
126
  uploaded_file_dir = os.environ.get("GRADIO_TEMP_DIR") or str(
127
  Path(tempfile.gettempdir()) / "gradio"
128
  )
129
-
130
  def predict(_chatbot, task_history):
 
131
  if not _chatbot:
132
  return _chatbot
133
  chat_query = _chatbot[-1][0]
 
 
134
  if isinstance(chat_query, tuple):
135
  query = [{'image': chat_query[0]}]
136
  else:
137
  query = [{'text': _parse_text(chat_query)}]
 
 
138
  inputs = tokenizer.from_list_format(query)
139
  tokenized_inputs = tokenizer(inputs, return_tensors='pt')
140
  tokenized_inputs = tokenized_inputs.to(model.device)
 
141
  pred = model.generate(**tokenized_inputs)
142
  response = tokenizer.decode(pred.cpu()[0], skip_special_tokens=False)
 
143
  if 'image' in query[0]:
144
  image = tokenizer.draw_bbox_on_latest_picture(response)
145
  if image is not None:
@@ -151,7 +160,6 @@ def _launch_demo(args, model, tokenizer):
151
  _chatbot[-1] = (chat_query, response)
152
  return _chatbot
153
 
154
-
155
  def save_uploaded_image(image_file, upload_dir):
156
  if image is None:
157
  return None
 
39
  print("handle_image_submission called")
40
  if file is None:
41
  print("No file uploaded")
42
+ return _chatbot, task_history
43
+ print("File received:", file)
44
+ file_path = save_image(file, uploaded_file_dir)
45
+ print("File saved at:", file_path)
46
  history_item = ((file_path,), None)
47
  _chatbot.append(history_item)
48
  task_history.append(history_item)
49
  return predict(_chatbot, task_history)
50
+
51
 
52
  def _load_model_tokenizer(args):
53
  model_id = args.checkpoint_path
 
107
  return text
108
 
109
  def save_image(image_file, upload_dir):
110
+ print("save_image called with:", image_file)
111
  Path(upload_dir).mkdir(parents=True, exist_ok=True)
112
+ filename = secrets.token_hex(10) + Path(image_file.name).suffix
113
+ file_path = Path(upload_dir) / filename
114
+ print("Saving to:", file_path)
115
+ with open(image_file.name, "rb") as f_input, open(file_path, "wb") as f_output:
116
+ f_output.write(f_input.read())
117
+ return str(file_path)
118
+
 
119
 
120
  def add_file(history, task_history, file):
121
  if file is None:
 
129
  uploaded_file_dir = os.environ.get("GRADIO_TEMP_DIR") or str(
130
  Path(tempfile.gettempdir()) / "gradio"
131
  )
 
132
  def predict(_chatbot, task_history):
133
+ print("predict called")
134
  if not _chatbot:
135
  return _chatbot
136
  chat_query = _chatbot[-1][0]
137
+ print("Chat query:", chat_query)
138
+
139
  if isinstance(chat_query, tuple):
140
  query = [{'image': chat_query[0]}]
141
  else:
142
  query = [{'text': _parse_text(chat_query)}]
143
+
144
+ print("Query for model:", query)
145
  inputs = tokenizer.from_list_format(query)
146
  tokenized_inputs = tokenizer(inputs, return_tensors='pt')
147
  tokenized_inputs = tokenized_inputs.to(model.device)
148
+
149
  pred = model.generate(**tokenized_inputs)
150
  response = tokenizer.decode(pred.cpu()[0], skip_special_tokens=False)
151
+ print("Model response:", response)
152
  if 'image' in query[0]:
153
  image = tokenizer.draw_bbox_on_latest_picture(response)
154
  if image is not None:
 
160
  _chatbot[-1] = (chat_query, response)
161
  return _chatbot
162
 
 
163
  def save_uploaded_image(image_file, upload_dir):
164
  if image is None:
165
  return None