feat: bar
Browse files- app/app.py +39 -0
app/app.py
CHANGED
@@ -55,6 +55,39 @@ def process_file(*, file: AskFileResponse) -> List[Document]:
|
|
55 |
return docs
|
56 |
|
57 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
58 |
@cl.on_chat_start
|
59 |
async def on_chat_start():
|
60 |
"""This function is written to prepare the environments for the chat
|
@@ -77,6 +110,12 @@ async def on_chat_start():
|
|
77 |
msg = cl.Message(content=f"Processing `{file.name}`...")
|
78 |
await msg.send()
|
79 |
|
|
|
|
|
|
|
|
|
|
|
|
|
80 |
model = ChatOpenAI(
|
81 |
model="gpt-3.5-turbo-16k-0613",
|
82 |
streaming=True
|
|
|
55 |
return docs
|
56 |
|
57 |
|
58 |
+
def create_search_engine(*, file: AskFileResponse) -> VectorStore:
|
59 |
+
|
60 |
+
# Process and save data in the user session
|
61 |
+
docs = process_file(file=file)
|
62 |
+
cl.user_session.set("docs", docs)
|
63 |
+
|
64 |
+
encoder = OpenAIEmbeddings(
|
65 |
+
model="text-embedding-ada-002"
|
66 |
+
)
|
67 |
+
|
68 |
+
# Initialize Chromadb client and settings, reset to ensure we get a clean
|
69 |
+
# search engine
|
70 |
+
client = chromadb.EphemeralClient()
|
71 |
+
client_settings=Settings(
|
72 |
+
allow_reset=True,
|
73 |
+
anonymized_telemetry=False
|
74 |
+
)
|
75 |
+
search_engine = Chroma(
|
76 |
+
client=client,
|
77 |
+
client_settings=client_settings
|
78 |
+
)
|
79 |
+
search_engine._client.reset()
|
80 |
+
|
81 |
+
search_engine = Chroma.from_documents(
|
82 |
+
client=client,
|
83 |
+
documents=docs,
|
84 |
+
embedding=encoder,
|
85 |
+
client_settings=client_settings
|
86 |
+
)
|
87 |
+
|
88 |
+
return search_engine
|
89 |
+
|
90 |
+
|
91 |
@cl.on_chat_start
|
92 |
async def on_chat_start():
|
93 |
"""This function is written to prepare the environments for the chat
|
|
|
110 |
msg = cl.Message(content=f"Processing `{file.name}`...")
|
111 |
await msg.send()
|
112 |
|
113 |
+
try:
|
114 |
+
search_engine = await cl.make_async(create_search_engine)(file=file)
|
115 |
+
except Exception as e:
|
116 |
+
await cl.Message(content=f"Error: {e}").send()
|
117 |
+
raise SystemError
|
118 |
+
|
119 |
model = ChatOpenAI(
|
120 |
model="gpt-3.5-turbo-16k-0613",
|
121 |
streaming=True
|