min24ss commited on
Commit
9e7fc2a
ยท
verified ยท
1 Parent(s): f5e3afe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -19
app.py CHANGED
@@ -26,7 +26,6 @@ df['text'] = df.apply(
26
  lambda x: f"[{x['์—ํ”ผ์†Œ๋“œ']}] #{x['row_id']} {x['type']} {x['scene_text']}",
27
  axis=1
28
  )
29
- texts = df['text'].tolist()
30
 
31
  # ====== FAISS ์•ˆ์ „ ๋กœ๋“œ ======
32
  embedding_model = HuggingFaceEmbeddings(model_name='jhgan/ko-sroberta-multitask')
@@ -47,7 +46,7 @@ if load_path:
47
  vectorstore = FAISS.load_local(load_path, embedding_model, allow_dangerous_deserialization=True)
48
  print(f"[INFO] FAISS ์ธ๋ฑ์Šค ๋กœ๋“œ ์™„๋ฃŒ โ†’ {load_path}")
49
  else:
50
- raise FileNotFoundError("FAISS index.faiss ํŒŒ์ผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค. ์••์ถ• ๊ตฌ์กฐ๋ฅผ ํ™•์ธํ•˜์„ธ์š”.")
51
 
52
  # ====== ๋ชจ๋ธ ๋กœ๋“œ (CPU ์ „์šฉ) ======
53
  model_name = "kakaocorp/kanana-nano-2.1b-instruct"
@@ -70,17 +69,23 @@ qa_chain = RetrievalQA.from_chain_type(
70
  chain_type_kwargs={"prompt": custom_prompt}
71
  )
72
 
73
- # ====== ์„ ํƒ์ง€ ======
74
  choices = [
75
- "ํ™ฉ๋™์„ ๋ฌด๋ฆฌ๋ฅผ ๋ชจ๋‘ ์ฒ˜์น˜ํ•œ๋‹ค.",
76
- "์ง„ํ˜ธ๋ฅผ ํฌํ•จํ•œ ํ™ฉ๋™์„ ๋ฌด๋ฆฌ๋ฅผ ๋ชจ๋‘ ์ฒ˜์น˜ํ•œ๋‹ค.",
77
- "์ „๋ถ€ ๊ธฐ์ ˆ ์‹œํ‚ค๊ณ  ์‚ด๋ ค๋‘”๋‹ค.",
78
- "์‹œ์Šคํ…œ์„ ๊ฑฐ๋ถ€ํ•˜๊ณ  ๊ทธ๋ƒฅ ๋„๋ง์นœ๋‹ค."
79
  ]
80
 
81
- # ====== Gradio ํ•จ์ˆ˜ ======
82
- def run_episode(selection):
83
- user_choice = choices[int(selection) - 1]
 
 
 
 
 
 
84
  result = qa_chain({"query": user_choice})
85
  retrieved_context = "\n".join([doc.page_content for doc in result["source_documents"]])
86
 
@@ -94,15 +99,13 @@ def run_episode(selection):
94
  """
95
 
96
  response = llm_pipeline(prompt)[0]["generated_text"]
97
- return f"[์„ฑ์ง„์šฐ ์‘๋‹ต]\n{response}"
98
-
99
- # ====== Gradio UI ======
100
- demo = gr.Interface(
101
- fn=run_episode,
102
- inputs=gr.Dropdown(choices=["1", "2", "3", "4"], label="์„ ํƒ ๋ฒˆํ˜ธ", type="value"),
103
- outputs="text",
104
- title="์„ฑ์ง„์šฐ ์„ ํƒ ์‹œ๋ฎฌ๋ ˆ์ด์…˜",
105
- description="๋ฒˆํ˜ธ๋ฅผ ์„ ํƒํ•˜๋ฉด ์„ฑ์ง„์šฐ์˜ ์‘๋‹ต์ด ์ƒ์„ฑ๋ฉ๋‹ˆ๋‹ค."
106
  )
107
 
108
  if __name__ == "__main__":
 
26
  lambda x: f"[{x['์—ํ”ผ์†Œ๋“œ']}] #{x['row_id']} {x['type']} {x['scene_text']}",
27
  axis=1
28
  )
 
29
 
30
  # ====== FAISS ์•ˆ์ „ ๋กœ๋“œ ======
31
  embedding_model = HuggingFaceEmbeddings(model_name='jhgan/ko-sroberta-multitask')
 
46
  vectorstore = FAISS.load_local(load_path, embedding_model, allow_dangerous_deserialization=True)
47
  print(f"[INFO] FAISS ์ธ๋ฑ์Šค ๋กœ๋“œ ์™„๋ฃŒ โ†’ {load_path}")
48
  else:
49
+ raise FileNotFoundError("FAISS index.faiss ํŒŒ์ผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.")
50
 
51
  # ====== ๋ชจ๋ธ ๋กœ๋“œ (CPU ์ „์šฉ) ======
52
  model_name = "kakaocorp/kanana-nano-2.1b-instruct"
 
69
  chain_type_kwargs={"prompt": custom_prompt}
70
  )
71
 
72
+ # ====== ๋Œ€ํ™”ํ˜• ์‘๋‹ต ํ•จ์ˆ˜ ======
73
  choices = [
74
+ "1. ํ™ฉ๋™์„ ๋ฌด๋ฆฌ๋ฅผ ๋ชจ๋‘ ์ฒ˜์น˜ํ•œ๋‹ค.",
75
+ "2. ์ง„ํ˜ธ๋ฅผ ํฌํ•จํ•œ ํ™ฉ๋™์„ ๋ฌด๋ฆฌ๋ฅผ ๋ชจ๋‘ ์ฒ˜์น˜ํ•œ๋‹ค.",
76
+ "3. ์ „๋ถ€ ๊ธฐ์ ˆ ์‹œํ‚ค๊ณ  ์‚ด๋ ค๋‘”๋‹ค.",
77
+ "4. ์‹œ์Šคํ…œ์„ ๊ฑฐ๋ถ€ํ•˜๊ณ  ๊ทธ๋ƒฅ ๋„๋ง์นœ๋‹ค."
78
  ]
79
 
80
+ def respond(message, history):
81
+ try:
82
+ sel_num = int(message.strip())
83
+ if sel_num < 1 or sel_num > len(choices):
84
+ return "โŒ ์˜ฌ๋ฐ”๋ฅธ ๋ฒˆํ˜ธ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”. (1~4)"
85
+ except ValueError:
86
+ return "โŒ ๋ฒˆํ˜ธ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”. (์˜ˆ: 1, 2, 3, 4)"
87
+
88
+ user_choice = choices[sel_num - 1]
89
  result = qa_chain({"query": user_choice})
90
  retrieved_context = "\n".join([doc.page_content for doc in result["source_documents"]])
91
 
 
99
  """
100
 
101
  response = llm_pipeline(prompt)[0]["generated_text"]
102
+ return f"[์„ฑ์ง„์šฐ]\n{response}"
103
+
104
+ # ====== Gradio ChatInterface ======
105
+ demo = gr.ChatInterface(
106
+ respond,
107
+ title="์„ฑ์ง„์šฐ ์„ ํƒ ์‹œ๋ฎฌ๋ ˆ์ด์…˜ (์นด์นด์˜คํ†ก ์Šคํƒ€์ผ)",
108
+ description="1~4๋ฒˆ ์ค‘ ํ•˜๋‚˜๋ฅผ ์ž…๋ ฅํ•˜๋ฉด ์„ฑ์ง„์šฐ์˜ ์‘๋‹ต์ด ๋Œ€ํ™” ํ˜•์‹์œผ๋กœ ๋‚˜ํƒ€๋‚ฉ๋‹ˆ๋‹ค."
 
 
109
  )
110
 
111
  if __name__ == "__main__":