peinan commited on
Commit
41f3f30
·
1 Parent(s): 90065a6

use streaming for chat

Browse files
Files changed (1) hide show
  1. src/pdfchat/app.py +23 -8
src/pdfchat/app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from dataclasses import dataclass
2
  from pathlib import Path
3
 
@@ -36,6 +37,9 @@ class ChatHistory:
36
  def add_chat(self, chat: Chat):
37
  self.history.append(chat)
38
 
 
 
 
39
 
40
  def open_file(file_path: str) -> str:
41
  file_path = Path(file_path)
@@ -49,17 +53,28 @@ def open_file(file_path: str) -> str:
49
  return text
50
 
51
 
 
 
 
 
 
 
 
 
 
 
52
  def bot(history: ChatHistory, query: str, file_path: str) -> ChatHistory:
53
  history = ChatHistory(history)
54
- if not file_path:
55
- history.add_chat(Chat(query=query, response=None))
56
- return history
57
- document = open_file(file_path)
58
- history.add_chat(Chat(query=query, response=document))
59
  logger.info(history)
60
 
61
- # TODO: use streaming inference
62
- return history
 
 
 
63
 
64
 
65
  with gr.Blocks() as app:
@@ -122,4 +137,4 @@ with gr.Blocks() as app:
122
  fn=lambda model_name, document: None,
123
  )
124
 
125
- app.queue().launch(debug=True)
 
1
+ import time
2
  from dataclasses import dataclass
3
  from pathlib import Path
4
 
 
37
  def add_chat(self, chat: Chat):
38
  self.history.append(chat)
39
 
40
+ def clear_last_response(self):
41
+ self.history[-1].response = ""
42
+
43
 
44
  def open_file(file_path: str) -> str:
45
  file_path = Path(file_path)
 
53
  return text
54
 
55
 
56
+ def get_response(query: str, document: str | None) -> str:
57
+ response = ""
58
+ if not document:
59
+ response = "No document is uploaded. Please upload a document."
60
+ else:
61
+ response = f"Your document: {document}"
62
+
63
+ return response
64
+
65
+
66
  def bot(history: ChatHistory, query: str, file_path: str) -> ChatHistory:
67
  history = ChatHistory(history)
68
+ document = open_file(file_path) if file_path else None
69
+ response = get_response(query, document)
70
+ history.add_chat(Chat(query=query, response=response))
 
 
71
  logger.info(history)
72
 
73
+ history.clear_last_response()
74
+ for char in response:
75
+ history[-1].response += char
76
+ time.sleep(0.02)
77
+ yield history
78
 
79
 
80
  with gr.Blocks() as app:
 
137
  fn=lambda model_name, document: None,
138
  )
139
 
140
+ app.queue().launch()