seawolf2357 commited on
Commit
2c3555b
ยท
verified ยท
1 Parent(s): 1350f16

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -27
app.py CHANGED
@@ -45,7 +45,7 @@ def find_most_similar_data(query):
45
 
46
  return most_similar
47
 
48
- async def respond_with_prefix(message, history, max_tokens=2048, temperature=1.0, top_p=1.0) -> Iterator[str]:
49
  global conversation_history
50
 
51
  # ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ์—…๋ฐ์ดํŠธ
@@ -110,22 +110,23 @@ async def respond_with_prefix(message, history, max_tokens=2048, temperature=1.0
110
  messages.append({"role": "assistant", "content": f"Related Information: {similar_data}"})
111
 
112
  try:
113
- response = await asyncio.to_thread(
114
- lambda: client.chat.completions.create(
115
- model="gpt-4o-mini",
116
- messages=messages,
117
- response_format={"type": "text"},
118
- temperature=temperature,
119
- max_tokens=max_tokens,
120
- top_p=top_p,
121
- frequency_penalty=0,
122
- presence_penalty=0,
123
- stream=True
124
- )
 
125
  )
126
 
127
  partial_message = ""
128
- async for chunk in response:
129
  if hasattr(chunk.choices[0].delta, 'content') and chunk.choices[0].delta.content:
130
  content = chunk.choices[0].delta.content
131
  partial_message += content
@@ -144,19 +145,19 @@ def clear_history():
144
  conversation_history = []
145
  return None
146
 
147
- demo = gr.ChatInterface(
148
- fn=respond_with_prefix,
149
- additional_inputs=[
150
- gr.Slider(minimum=1, maximum=4096, value=2048, label="Max Tokens"),
151
- gr.Slider(minimum=0.1, maximum=2.0, value=1.0, label="Temperature"),
152
- gr.Slider(minimum=0.1, maximum=1.0, value=1.0, label="Top-P")
153
- ],
154
- theme="Nymbo/Nymbo_Theme"
155
- )
156
-
157
- # Clear ๋ฒ„ํŠผ ์ถ”๊ฐ€
158
- clear_button = gr.Button("Clear History")
159
- clear_button.click(fn=clear_history)
160
 
161
  if __name__ == "__main__":
162
  demo.queue(max_size=4).launch()
 
45
 
46
  return most_similar
47
 
48
+ async def respond_with_prefix(message, history, max_tokens=3648, temperature=1.0, top_p=1.0) -> Iterator[str]:
49
  global conversation_history
50
 
51
  # ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ์—…๋ฐ์ดํŠธ
 
110
  messages.append({"role": "assistant", "content": f"Related Information: {similar_data}"})
111
 
112
  try:
113
+ response = client.chat.completions.create(
114
+ model="gpt-4o-mini",
115
+ messages=[
116
+ {"role": "system", "content": system_prefix},
117
+ *conversation_history
118
+ ],
119
+ response_format={"type": "text"},
120
+ temperature=temperature,
121
+ max_tokens=max_tokens,
122
+ top_p=top_p,
123
+ frequency_penalty=0,
124
+ presence_penalty=0,
125
+ stream=True
126
  )
127
 
128
  partial_message = ""
129
+ for chunk in response:
130
  if hasattr(chunk.choices[0].delta, 'content') and chunk.choices[0].delta.content:
131
  content = chunk.choices[0].delta.content
132
  partial_message += content
 
145
  conversation_history = []
146
  return None
147
 
148
+ with gr.Blocks(theme="Nymbo/Nymbo_Theme") as demo:
149
+ chatbot = gr.ChatInterface(
150
+ fn=respond_with_prefix,
151
+ additional_inputs=[
152
+ gr.Slider(minimum=1, maximum=4096, value=2048, label="Max Tokens"),
153
+ gr.Slider(minimum=0.1, maximum=2.0, value=1.0, label="Temperature"),
154
+ gr.Slider(minimum=0.1, maximum=1.0, value=1.0, label="Top-P")
155
+ ],
156
+ )
157
+
158
+ with gr.Row():
159
+ clear_button = gr.Button("Clear History")
160
+ clear_button.click(fn=clear_history, outputs=chatbot.chatbot)
161
 
162
  if __name__ == "__main__":
163
  demo.queue(max_size=4).launch()