Tonic commited on
Commit
ed42d17
·
1 Parent(s): 16bb86c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -38
app.py CHANGED
@@ -108,48 +108,33 @@ def _launch_demo(args, model, tokenizer):
108
  if not _chatbot:
109
  return _chatbot
110
  chat_query = _chatbot[-1][0]
111
- query = task_history[-1][0]
112
- print("User: " + _parse_text(query))
113
- history_cp = copy.deepcopy(task_history)
114
- full_response = ""
115
-
116
- history_filter = []
117
- pic_idx = 1
118
- pre = ""
119
- for i, (q, a) in enumerate(history_cp):
120
- if isinstance(q, (tuple, list)):
121
- q = f'Picture {pic_idx}: <img>{q[0]}</img>'
122
- pre += q + '\n'
123
- pic_idx += 1
 
 
 
 
124
  else:
125
- pre += q
126
- history_filter.append((pre, a))
127
- pre = ""
128
- history, message = history_filter[:-1], history_filter[-1][0]
129
- response, history = model.chat(tokenizer, message, history=history)
130
- image = tokenizer.draw_bbox_on_latest_picture(response, history)
131
- if image is not None:
132
- temp_dir = secrets.token_hex(20)
133
- temp_dir = Path(uploaded_file_dir) / temp_dir
134
- temp_dir.mkdir(exist_ok=True, parents=True)
135
- name = f"tmp{secrets.token_hex(5)}.jpg"
136
- filename = temp_dir / name
137
- image.save(str(filename))
138
- _chatbot[-1] = (_parse_text(chat_query), (str(filename),))
139
- chat_response = response.replace("<ref>", "")
140
- chat_response = chat_response.replace(r"</ref>", "")
141
- chat_response = re.sub(BOX_TAG_PATTERN, "", chat_response)
142
- if chat_response != "":
143
- _chatbot.append((None, chat_response))
144
  else:
145
  _chatbot[-1] = (_parse_text(chat_query), response)
146
- full_response = _parse_text(response)
147
 
148
- task_history[-1] = (query, full_response)
149
- print("Qwen-VL-Chat: " + _parse_text(full_response))
150
- task_history = task_history[-10:]
151
  return _chatbot
152
 
 
153
  def regenerate(_chatbot, task_history):
154
  if not task_history:
155
  return _chatbot
@@ -182,16 +167,41 @@ def _launch_demo(args, model, tokenizer):
182
 
183
  def reset_user_input():
184
  return gr.update(value="")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
185
 
186
  def reset_state(task_history):
187
  task_history.clear()
188
  return []
 
 
 
 
 
 
 
 
 
 
189
 
190
  with gr.Blocks() as demo:
191
  gr.Markdown("""# Welcome to Tonic's Qwen-VL-Chat Bot""")
192
  gr.Markdown(
193
- """ This WebUI is based on Qwen-VL-Chat, developed by Alibaba Cloud.
194
- (本WebUI基于Qwen-VL-Chat打造,实现聊天机器人功能。)""")
195
  with gr.Row():
196
  with gr.Column(scale=1):
197
  chatbot = gr.Chatbot(label='Qwen-VL-Chat')
 
108
  if not _chatbot:
109
  return _chatbot
110
  chat_query = _chatbot[-1][0]
111
+ print("User: " + _parse_text(chat_query))
112
+
113
+ history = process_history_for_model(task_history)
114
+
115
+ if isinstance(chat_query, tuple): # Image input
116
+ message = {'image': chat_query[0]}
117
+ else: # Text input
118
+ message = {'text': chat_query}
119
+
120
+ response, updated_history = model.chat(tokenizer, query=message, history=history)
121
+
122
+ if "<box>" in response:
123
+ image = tokenizer.draw_bbox_on_latest_picture(response, updated_history)
124
+ if image is not None:
125
+ image_path = save_image(image)
126
+ _chatbot[-1] = (_parse_text(chat_query), (image_path,))
127
+ response = process_response(response)
128
  else:
129
+ _chatbot[-1] = (_parse_text(chat_query), response)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
130
  else:
131
  _chatbot[-1] = (_parse_text(chat_query), response)
 
132
 
133
+ task_history = update_task_history(task_history, updated_history, response)
134
+
 
135
  return _chatbot
136
 
137
+
138
  def regenerate(_chatbot, task_history):
139
  if not task_history:
140
  return _chatbot
 
167
 
168
  def reset_user_input():
169
  return gr.update(value="")
170
+
171
+ def process_response(response):
172
+ response = response.replace("<ref>", "").replace(r"</ref>", "")
173
+ response = re.sub(BOX_TAG_PATTERN, "", response)
174
+ return response
175
+ def process_history_for_model(task_history):
176
+ processed_history = []
177
+ for query, response in task_history:
178
+ if isinstance(query, tuple):
179
+ processed_history.append({'image': query[0]})
180
+ else:
181
+ processed_history.append({'text': query})
182
+ if response:
183
+ processed_history.append({'text': response})
184
+ return processed_history
185
 
186
  def reset_state(task_history):
187
  task_history.clear()
188
  return []
189
+
190
+ def save_image(image):
191
+ temp_dir = secrets.token_hex(20)
192
+ temp_dir = Path(uploaded_file_dir) / temp_dir
193
+ temp_dir.mkdir(exist_ok=True, parents=True)
194
+ name = f"tmp{secrets.token_hex(5)}.jpg"
195
+ filename = temp_dir / name
196
+ image.save(str(filename))
197
+ return str(filename)
198
+
199
 
200
  with gr.Blocks() as demo:
201
  gr.Markdown("""# Welcome to Tonic's Qwen-VL-Chat Bot""")
202
  gr.Markdown(
203
+ """ Qwen-VL-Chat is a multimodal input model.
204
+ (本WebUI基于Qwen-VL-Chat打造,实现聊天机器人功能 但我必须修复它这么多也许我也得到一些荣誉?)""")
205
  with gr.Row():
206
  with gr.Column(scale=1):
207
  chatbot = gr.Chatbot(label='Qwen-VL-Chat')