ffreemt commited on
Commit
7e17420
·
1 Parent(s): 3c0a531
Files changed (1) hide show
  1. app.py +8 -6
app.py CHANGED
@@ -162,9 +162,11 @@ except Exception:
162
  class DequeCallbackHandler(BaseCallbackHandler):
163
  """Mediate gradio and stream output."""
164
 
165
- def __init__(self, deq: deque = deque()):
166
  """Init deque for FIFO, may need to upgrade to queue.Queue or queue.SimpleQueue."""
167
- self.q = deq
 
 
168
 
169
  def on_llm_start(
170
  self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
@@ -308,8 +310,8 @@ def bot(history):
308
  yield history
309
 
310
 
311
- def predict_api(prompt):
312
- logger.debug(f"{prompt=}")
313
  try:
314
  # user_prompt = prompt
315
  Config(
@@ -330,12 +332,12 @@ def predict_api(prompt):
330
  config=config,
331
  )
332
  # """
333
- conversation = ConversationChain(
334
  llm=LLM,
335
  prompt=prompt,
336
  verbose=True,
337
  )
338
- response = conversation.predict(prompt)
339
  logger.debug(f"api: {response=}")
340
  except Exception as exc:
341
  logger.error(exc)
 
162
  class DequeCallbackHandler(BaseCallbackHandler):
163
  """Mediate gradio and stream output."""
164
 
165
+ def __init__(self, deq_: deque):
166
  """Init deque for FIFO, may need to upgrade to queue.Queue or queue.SimpleQueue."""
167
+ self.q = deq_
168
+
169
+ # def on_chat_model_start(self): self.q.clear()
170
 
171
  def on_llm_start(
172
  self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
 
310
  yield history
311
 
312
 
313
+ def predict_api(user_prompt):
314
+ logger.debug(f"{user_prompt=}")
315
  try:
316
  # user_prompt = prompt
317
  Config(
 
332
  config=config,
333
  )
334
  # """
335
+ conversation1 = ConversationChain(
336
  llm=LLM,
337
  prompt=prompt,
338
  verbose=True,
339
  )
340
+ response = conversation1.predict(user_prompt)
341
  logger.debug(f"api: {response=}")
342
  except Exception as exc:
343
  logger.error(exc)