HanLee commited on
Commit
6c98e1c
·
1 Parent(s): 99b7cde

feat: 02_05e

Browse files
Files changed (2) hide show
  1. README.md +1 -1
  2. app/app.py +22 -0
README.md CHANGED
@@ -35,4 +35,4 @@ chainlit run app/app.py -w
35
 
36
  - [Langchain PDF Loaders](https://python.langchain.com/docs/modules/data_connection/document_loaders/pdf)
37
  - [Langchain Text Splitters](https://python.langchain.com/docs/modules/data_connection/document_transformers/#text-splitters)
38
- - [Chainlit's documentation](https://docs.chainlit.io/get-started/pure-python)
 
35
 
36
  - [Langchain PDF Loaders](https://python.langchain.com/docs/modules/data_connection/document_loaders/pdf)
37
  - [Langchain Text Splitters](https://python.langchain.com/docs/modules/data_connection/document_transformers/#text-splitters)
38
+ - [Chainlit Ask File Message](https://docs.chainlit.io/api-reference/ask/ask-for-file)
app/app.py CHANGED
@@ -72,6 +72,28 @@ def process_file(*, file: AskFileResponse) -> List[Document]:
72
 
73
  @cl.on_chat_start
74
  async def on_chat_start():
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  model = ChatOpenAI(
76
  model="gpt-3.5-turbo-1106",
77
  streaming=True
 
72
 
73
  @cl.on_chat_start
74
  async def on_chat_start():
75
+ ######################################################################
76
+ # Exercise 1c:
77
+ # At the start of our Chat with PDF app, we will first ask users to
78
+ # upload the PDF file they want to ask questions against.
79
+ #
80
+ # Please use Chainlit's AskFileMessage and get the file from users.
81
+ # Note for this course, we only want to deal with one single file.
82
+ ######################################################################
83
+ files = None
84
+ while files is None:
85
+ files = await cl.AskFileMessage(
86
+ content="Please Upload the PDF file you want to chat with...",
87
+ accept=["application/pdf"],
88
+ max_size_mb=20,
89
+ ).send()
90
+ file = files[0]
91
+
92
+ # Send message to user to let them know we are processing the file
93
+ msg = cl.Message(content=f"Processing `{file.name}`...")
94
+ await msg.send()
95
+ ######################################################################
96
+
97
  model = ChatOpenAI(
98
  model="gpt-3.5-turbo-1106",
99
  streaming=True