aiqtech commited on
Commit
87dacef
Β·
verified Β·
1 Parent(s): 711ee65

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +85 -5
app.py CHANGED
@@ -11,6 +11,8 @@ from aimakerspace.openai_utils.embedding import EmbeddingModel
11
  from aimakerspace.vectordatabase import VectorDatabase
12
  from aimakerspace.openai_utils.chatmodel import ChatOpenAI
13
  import chainlit as cl
 
 
14
 
15
  system_template = """\
16
  Use the following context to answer a users question. If you cannot find the answer in the context, say you don't know the answer."""
@@ -25,6 +27,67 @@ Question:
25
  """
26
  user_role_prompt = UserRolePrompt(user_prompt_template)
27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  class RetrievalAugmentedQAPipeline:
29
  def __init__(self, llm: ChatOpenAI(), vector_db_retriever: VectorDatabase) -> None:
30
  self.llm = llm
@@ -130,10 +193,27 @@ async def on_chat_start():
130
  async def main(message):
131
  chain = cl.user_session.get("chain")
132
 
133
- msg = cl.Message(content="")
134
- result = await chain.arun_pipeline(message.content)
135
 
136
- async for stream_resp in result["response"]:
137
- await msg.stream_token(stream_resp)
138
 
139
- await msg.send()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  from aimakerspace.vectordatabase import VectorDatabase
12
  from aimakerspace.openai_utils.chatmodel import ChatOpenAI
13
  import chainlit as cl
14
+ from chainlit import user_session
15
+ from chainlit.element import Text
16
 
17
  system_template = """\
18
  Use the following context to answer a users question. If you cannot find the answer in the context, say you don't know the answer."""
 
27
  """
28
  user_role_prompt = UserRolePrompt(user_prompt_template)
29
 
30
+ @cl.on_chat_start
31
+ async def init_sidebar():
32
+ # μ‚¬μ΄λ“œλ°” 헀더 κΎΈλ―ΈκΈ°
33
+ await cl.Sidebar(
34
+ cl.Text(content="πŸ“ **파일 μ—…λ‘œλ“œ μ„Ήμ…˜**", style="heading3"),
35
+ cl.FilePicker(
36
+ accept=[".pdf", ".txt"],
37
+ max_size_mb=2,
38
+ on_upload=handle_upload,
39
+ label="πŸ“€ PDF/TXT μ—…λ‘œλ“œ",
40
+ description="μ΅œλŒ€ 2MB 파일만 μ—…λ‘œλ“œ κ°€λŠ₯ν•©λ‹ˆλ‹€"
41
+ ),
42
+ cl.Separator(),
43
+ cl.Text(content="πŸ” **λ¬Έμ„œ 뢄석 μƒνƒœ**", style="heading4"),
44
+ cl.ProgressRing(id="progress", visible=False),
45
+ cl.Text(id="status", content="λŒ€κΈ° 쀑...", style="caption"),
46
+ title="πŸ“š λ¬Έμ„œ 질의 μ‹œμŠ€ν…œ",
47
+ persistent=True # πŸ‘ˆ μ‚¬μ΄λ“œλ°” κ³ μ • μ„€μ •
48
+ ).send()
49
+
50
+
51
+ async def handle_upload(file: AskFileResponse):
52
+ # μ§„ν–‰ μƒνƒœ μ—…λ°μ΄νŠΈ
53
+ status = user_session.get("status")
54
+ progress = user_session.get("progress")
55
+
56
+ await status.update(content=f"πŸ” {file.name} 뢄석 쀑...")
57
+ await progress.update(visible=True)
58
+
59
+ try:
60
+ # 파일 처리 둜직
61
+ texts = process_file(file)
62
+
63
+ # 벑터 DB ꡬ좕
64
+ vector_db = VectorDatabase()
65
+ vector_db = await vector_db.abuild_from_list(texts)
66
+
67
+ # μ„Έμ…˜μ— μ €μž₯
68
+ user_session.set("vector_db", vector_db)
69
+
70
+ # μƒνƒœ μ—…λ°μ΄νŠΈ
71
+ await status.update(content=f"βœ… {len(texts)}개 청크 처리 μ™„λ£Œ!")
72
+ await progress.update(visible=False)
73
+
74
+ # 파일 정보 μš”μ•½ ν‘œμ‹œ
75
+ await cl.Accordion(
76
+ title="πŸ“„ μ—…λ‘œλ“œ λ¬Έμ„œ 정보",
77
+ content=[
78
+ cl.Text(f"파일λͺ…: {file.name}"),
79
+ cl.Text(f"크기: {file.size/1024:.1f}KB"),
80
+ cl.Text(f"뢄석 μ‹œκ°„: {datetime.now().strftime('%H:%M:%S')}")
81
+ ],
82
+ expanded=False
83
+ ).send()
84
+
85
+ except Exception as e:
86
+ await cl.Error(
87
+ title="파일 처리 였λ₯˜",
88
+ content=f"{str(e)}"
89
+ ).send()
90
+
91
  class RetrievalAugmentedQAPipeline:
92
  def __init__(self, llm: ChatOpenAI(), vector_db_retriever: VectorDatabase) -> None:
93
  self.llm = llm
 
193
  async def main(message):
194
  chain = cl.user_session.get("chain")
195
 
 
 
196
 
 
 
197
 
198
+
199
+ # 응닡 μŠ€νƒ€μΌ κ°œμ„ 
200
+ msg = cl.Message(
201
+ content="",
202
+ actions=[
203
+ cl.Action(name="source", value="πŸ“‘ μ†ŒμŠ€ 보기"),
204
+ cl.Action(name="feedback", value="πŸ’¬ ν”Όλ“œλ°± 남기기")
205
+ ]
206
+ )
207
+
208
+ async for token in result["response"]:
209
+ await msg.stream_token(token, is_final=False)
210
+
211
+ # μ΅œμ’… λ©”μ‹œμ§€ ν¬λ§·νŒ…
212
+ final_content = f"""
213
+ 🧠 **AI 뢄석 κ²°κ³Ό**
214
+ {msg.content}
215
+
216
+ πŸ“Œ μ°Έμ‘° λ¬Έμž₯:
217
+ {chr(10).join([f'- {ctx[0][:50]}...' for ctx in result['context']])}
218
+ """
219
+ await msg.update(content=final_content)