seawolf2357 commited on
Commit
c4359cc
ยท
verified ยท
1 Parent(s): b0f79c1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -24
app.py CHANGED
@@ -118,34 +118,41 @@ async def generate_response(message):
118
  {"role": "user", "content": f"์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_input}\n\n๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ:\n{search_results}"}
119
  ]
120
 
121
- # ์ด์ „ ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ์ถ”๊ฐ€
122
- messages.extend(conversation_history)
 
 
 
123
 
124
  logging.debug(f'Messages to be sent to the model: {messages}')
125
 
126
  loop = asyncio.get_event_loop()
127
- response = await loop.run_in_executor(None, lambda: hf_client.chat_completion(
128
- messages, max_tokens=1000, stream=True, temperature=0.7, top_p=0.85))
129
-
130
- full_response = []
131
- for part in response:
132
- logging.debug(f'Part received from stream: {part}')
133
- if part.choices and part.choices[0].delta and part.choices[0].delta.content:
134
- full_response.append(part.choices[0].delta.content)
135
-
136
- full_response_text = ''.join(full_response)
137
- logging.debug(f'Full model response: {full_response_text}')
138
-
139
- # ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ์—…๋ฐ์ดํŠธ
140
- conversation_history.append({"role": "user", "content": user_input})
141
- conversation_history.append({"role": "assistant", "content": full_response_text})
142
-
143
- # ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ๊ธธ์ด ์ œํ•œ (์˜ˆ: ์ตœ๊ทผ 10๊ฐœ ๋ฉ”์‹œ์ง€๋งŒ ์œ ์ง€)
144
- conversation_history = conversation_history[-10:]
145
-
146
- logging.debug(f'Conversation history updated: {conversation_history}')
147
-
148
- return f"{user_mention}, {full_response_text}"
 
 
 
 
149
 
150
  if __name__ == "__main__":
151
  # Discord ํด๋ผ์ด์–ธํŠธ ์‹คํ–‰
 
118
  {"role": "user", "content": f"์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_input}\n\n๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ:\n{search_results}"}
119
  ]
120
 
121
+ # ์ด์ „ ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ์ถ”๊ฐ€ (์ตœ๋Œ€ 5๊ฐœ๊นŒ์ง€๋งŒ)
122
+ for i in range(min(len(conversation_history), 10), 0, -2):
123
+ messages.append(conversation_history[-i])
124
+ if i > 1:
125
+ messages.append(conversation_history[-i+1])
126
 
127
  logging.debug(f'Messages to be sent to the model: {messages}')
128
 
129
  loop = asyncio.get_event_loop()
130
+ try:
131
+ response = await loop.run_in_executor(None, lambda: hf_client.chat_completion(
132
+ messages, max_tokens=1000, stream=True, temperature=0.7, top_p=0.85))
133
+
134
+ full_response = []
135
+ for part in response:
136
+ logging.debug(f'Part received from stream: {part}')
137
+ if part.choices and part.choices[0].delta and part.choices[0].delta.content:
138
+ full_response.append(part.choices[0].delta.content)
139
+
140
+ full_response_text = ''.join(full_response)
141
+ logging.debug(f'Full model response: {full_response_text}')
142
+
143
+ # ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ์—…๋ฐ์ดํŠธ
144
+ conversation_history.append({"role": "user", "content": user_input})
145
+ conversation_history.append({"role": "assistant", "content": full_response_text})
146
+
147
+ # ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ๊ธธ์ด ์ œํ•œ (์ตœ๊ทผ 10๊ฐœ ๋ฉ”์‹œ์ง€๋งŒ ์œ ์ง€)
148
+ conversation_history = conversation_history[-10:]
149
+
150
+ logging.debug(f'Conversation history updated: {conversation_history}')
151
+
152
+ return f"{user_mention}, {full_response_text}"
153
+ except Exception as e:
154
+ logging.error(f"Error in generate_response: {e}")
155
+ return f"{user_mention}, ์ฃ„์†กํ•ฉ๋‹ˆ๋‹ค. ์‘๋‹ต์„ ์ƒ์„ฑํ•˜๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค. ๋‹ค์‹œ ์‹œ๋„ํ•ด ์ฃผ์„ธ์š”."
156
 
157
  if __name__ == "__main__":
158
  # Discord ํด๋ผ์ด์–ธํŠธ ์‹คํ–‰