Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -11,6 +11,8 @@ from aimakerspace.openai_utils.embedding import EmbeddingModel
|
|
11 |
from aimakerspace.vectordatabase import VectorDatabase
|
12 |
from aimakerspace.openai_utils.chatmodel import ChatOpenAI
|
13 |
import chainlit as cl
|
|
|
|
|
14 |
|
15 |
system_template = """\
|
16 |
Use the following context to answer a users question. If you cannot find the answer in the context, say you don't know the answer."""
|
@@ -25,6 +27,67 @@ Question:
|
|
25 |
"""
|
26 |
user_role_prompt = UserRolePrompt(user_prompt_template)
|
27 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
class RetrievalAugmentedQAPipeline:
|
29 |
def __init__(self, llm: ChatOpenAI(), vector_db_retriever: VectorDatabase) -> None:
|
30 |
self.llm = llm
|
@@ -130,10 +193,27 @@ async def on_chat_start():
|
|
130 |
async def main(message):
|
131 |
chain = cl.user_session.get("chain")
|
132 |
|
133 |
-
msg = cl.Message(content="")
|
134 |
-
result = await chain.arun_pipeline(message.content)
|
135 |
|
136 |
-
async for stream_resp in result["response"]:
|
137 |
-
await msg.stream_token(stream_resp)
|
138 |
|
139 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
from aimakerspace.vectordatabase import VectorDatabase
|
12 |
from aimakerspace.openai_utils.chatmodel import ChatOpenAI
|
13 |
import chainlit as cl
|
14 |
+
from chainlit import user_session
|
15 |
+
from chainlit.element import Text
|
16 |
|
17 |
system_template = """\
|
18 |
Use the following context to answer a users question. If you cannot find the answer in the context, say you don't know the answer."""
|
|
|
27 |
"""
|
28 |
user_role_prompt = UserRolePrompt(user_prompt_template)
|
29 |
|
30 |
+
@cl.on_chat_start
|
31 |
+
async def init_sidebar():
|
32 |
+
# μ¬μ΄λλ° ν€λ κΎΈλ―ΈκΈ°
|
33 |
+
await cl.Sidebar(
|
34 |
+
cl.Text(content="π **νμΌ μ
λ‘λ μΉμ
**", style="heading3"),
|
35 |
+
cl.FilePicker(
|
36 |
+
accept=[".pdf", ".txt"],
|
37 |
+
max_size_mb=2,
|
38 |
+
on_upload=handle_upload,
|
39 |
+
label="π€ PDF/TXT μ
λ‘λ",
|
40 |
+
description="μ΅λ 2MB νμΌλ§ μ
λ‘λ κ°λ₯ν©λλ€"
|
41 |
+
),
|
42 |
+
cl.Separator(),
|
43 |
+
cl.Text(content="π **λ¬Έμ λΆμ μν**", style="heading4"),
|
44 |
+
cl.ProgressRing(id="progress", visible=False),
|
45 |
+
cl.Text(id="status", content="λκΈ° μ€...", style="caption"),
|
46 |
+
title="π λ¬Έμ μ§μ μμ€ν
",
|
47 |
+
persistent=True # π μ¬μ΄λλ° κ³ μ μ€μ
|
48 |
+
).send()
|
49 |
+
|
50 |
+
|
51 |
+
async def handle_upload(file: AskFileResponse):
|
52 |
+
# μ§ν μν μ
λ°μ΄νΈ
|
53 |
+
status = user_session.get("status")
|
54 |
+
progress = user_session.get("progress")
|
55 |
+
|
56 |
+
await status.update(content=f"π {file.name} λΆμ μ€...")
|
57 |
+
await progress.update(visible=True)
|
58 |
+
|
59 |
+
try:
|
60 |
+
# νμΌ μ²λ¦¬ λ‘μ§
|
61 |
+
texts = process_file(file)
|
62 |
+
|
63 |
+
# λ²‘ν° DB ꡬμΆ
|
64 |
+
vector_db = VectorDatabase()
|
65 |
+
vector_db = await vector_db.abuild_from_list(texts)
|
66 |
+
|
67 |
+
# μΈμ
μ μ μ₯
|
68 |
+
user_session.set("vector_db", vector_db)
|
69 |
+
|
70 |
+
# μν μ
λ°μ΄νΈ
|
71 |
+
await status.update(content=f"β
{len(texts)}κ° μ²ν¬ μ²λ¦¬ μλ£!")
|
72 |
+
await progress.update(visible=False)
|
73 |
+
|
74 |
+
# νμΌ μ 보 μμ½ νμ
|
75 |
+
await cl.Accordion(
|
76 |
+
title="π μ
λ‘λ λ¬Έμ μ 보",
|
77 |
+
content=[
|
78 |
+
cl.Text(f"νμΌλͺ
: {file.name}"),
|
79 |
+
cl.Text(f"ν¬κΈ°: {file.size/1024:.1f}KB"),
|
80 |
+
cl.Text(f"λΆμ μκ°: {datetime.now().strftime('%H:%M:%S')}")
|
81 |
+
],
|
82 |
+
expanded=False
|
83 |
+
).send()
|
84 |
+
|
85 |
+
except Exception as e:
|
86 |
+
await cl.Error(
|
87 |
+
title="νμΌ μ²λ¦¬ μ€λ₯",
|
88 |
+
content=f"{str(e)}"
|
89 |
+
).send()
|
90 |
+
|
91 |
class RetrievalAugmentedQAPipeline:
|
92 |
def __init__(self, llm: ChatOpenAI(), vector_db_retriever: VectorDatabase) -> None:
|
93 |
self.llm = llm
|
|
|
193 |
async def main(message):
|
194 |
chain = cl.user_session.get("chain")
|
195 |
|
|
|
|
|
196 |
|
|
|
|
|
197 |
|
198 |
+
|
199 |
+
# μλ΅ μ€νμΌ κ°μ
|
200 |
+
msg = cl.Message(
|
201 |
+
content="",
|
202 |
+
actions=[
|
203 |
+
cl.Action(name="source", value="π μμ€ λ³΄κΈ°"),
|
204 |
+
cl.Action(name="feedback", value="π¬ νΌλλ°± λ¨κΈ°κΈ°")
|
205 |
+
]
|
206 |
+
)
|
207 |
+
|
208 |
+
async for token in result["response"]:
|
209 |
+
await msg.stream_token(token, is_final=False)
|
210 |
+
|
211 |
+
# μ΅μ’
λ©μμ§ ν¬λ§·ν
|
212 |
+
final_content = f"""
|
213 |
+
π§ **AI λΆμ κ²°κ³Ό**
|
214 |
+
{msg.content}
|
215 |
+
|
216 |
+
π μ°Έμ‘° λ¬Έμ₯:
|
217 |
+
{chr(10).join([f'- {ctx[0][:50]}...' for ctx in result['context']])}
|
218 |
+
"""
|
219 |
+
await msg.update(content=final_content)
|