Tonic commited on
Commit
1747a23
·
1 Parent(s): 76a70ea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -22
app.py CHANGED
@@ -109,29 +109,24 @@ def _launch_demo(args, model, tokenizer):
109
  return _chatbot
110
  chat_query = _chatbot[-1][0]
111
  if isinstance(chat_query, tuple):
112
- print("User uploaded an image.")
113
- query = {'image': chat_query[0]}
114
  else:
115
- print("User: " + _parse_text(chat_query))
116
- query = {'text': chat_query}
117
- history_cp = copy.deepcopy(task_history)
118
- full_response = ""
119
- processed_history = []
120
- for item in task_history:
121
- if isinstance(item, tuple) and len(item) == 2:
122
- processed_history.append(item)
123
- else:
124
- print("Invalid history item:", item)
125
- processed_history = process_history_for_model(task_history)
126
- response, history = model.chat(tokenizer, query=query, history=processed_history)
127
- image = tokenizer.draw_bbox_on_latest_picture(response, history)
128
- if image is not None:
129
- image_path = save_image(image)
130
  _chatbot[-1] = (chat_query, (image_path,))
131
- response = process_response(response)
132
  else:
133
  _chatbot[-1] = (chat_query, response)
134
- task_history = update_task_history(task_history, history, response)
135
  return _chatbot
136
 
137
  def regenerate(_chatbot, task_history):
@@ -257,11 +252,8 @@ including hate speech, violence, pornography, deception, etc.
257
 
258
  def main():
259
  args = _get_args()
260
-
261
  model, tokenizer = _load_model_tokenizer(args)
262
-
263
  _launch_demo(args, model, tokenizer)
264
 
265
-
266
  if __name__ == '__main__':
267
  main()
 
109
  return _chatbot
110
  chat_query = _chatbot[-1][0]
111
  if isinstance(chat_query, tuple):
112
+ query = [{'image': chat_query[0]}]
 
113
  else:
114
+ query = [{'text': _parse_text(chat_query)}]
115
+
116
+ inputs = tokenizer.from_list_format(query)
117
+ tokenized_inputs = tokenizer(inputs, return_tensors='pt')
118
+ tokenized_inputs = tokenized_inputs.to(model.device)
119
+
120
+ pred = model.generate(**tokenized_inputs)
121
+ response = tokenizer.decode(pred.cpu()[0], skip_special_tokens=False)
122
+
123
+ if 'image' in query[0]:
124
+ image = tokenizer.draw_bbox_on_latest_picture(response)
125
+ image_path = save_image(image) # Ensure you have a function to save the image
 
 
 
126
  _chatbot[-1] = (chat_query, (image_path,))
 
127
  else:
128
  _chatbot[-1] = (chat_query, response)
129
+
130
  return _chatbot
131
 
132
  def regenerate(_chatbot, task_history):
 
252
 
253
  def main():
254
  args = _get_args()
 
255
  model, tokenizer = _load_model_tokenizer(args)
 
256
  _launch_demo(args, model, tokenizer)
257
 
 
258
  if __name__ == '__main__':
259
  main()