seawolf2357 commited on
Commit
53356dd
ยท
verified ยท
1 Parent(s): 5431d38

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -27
app.py CHANGED
@@ -45,7 +45,7 @@ def find_most_similar_data(query):
45
 
46
  return most_similar
47
 
48
- async def respond_with_prefix(message, history, max_tokens=2048, temperature=1.0, top_p=1.0) -> Iterator[str]:
49
  global conversation_history
50
 
51
  # ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ์—…๋ฐ์ดํŠธ
@@ -111,22 +111,23 @@ async def respond_with_prefix(message, history, max_tokens=2048, temperature=1.0
111
  messages.append({"role": "assistant", "content": f"Related Information: {similar_data}"})
112
 
113
  try:
114
- response = await asyncio.to_thread(
115
- lambda: client.chat.completions.create(
116
- model="gpt-4o-mini",
117
- messages=messages,
118
- response_format={"type": "text"},
119
- temperature=temperature,
120
- max_tokens=max_tokens,
121
- top_p=top_p,
122
- frequency_penalty=0,
123
- presence_penalty=0,
124
- stream=True
125
- )
 
126
  )
127
 
128
  partial_message = ""
129
- async for chunk in response:
130
  if hasattr(chunk.choices[0].delta, 'content') and chunk.choices[0].delta.content:
131
  content = chunk.choices[0].delta.content
132
  partial_message += content
@@ -145,19 +146,19 @@ def clear_history():
145
  conversation_history = []
146
  return None
147
 
148
- demo = gr.ChatInterface(
149
- fn=respond_with_prefix,
150
- additional_inputs=[
151
- gr.Slider(minimum=1, maximum=4096, value=2048, label="Max Tokens"),
152
- gr.Slider(minimum=0.1, maximum=2.0, value=1.0, label="Temperature"),
153
- gr.Slider(minimum=0.1, maximum=1.0, value=1.0, label="Top-P")
154
- ],
155
- theme="Nymbo/Nymbo_Theme"
156
- )
157
-
158
- # Clear ๋ฒ„ํŠผ ์ถ”๊ฐ€
159
- clear_button = gr.Button("Clear History")
160
- clear_button.click(fn=clear_history)
161
 
162
  if __name__ == "__main__":
163
  demo.queue(max_size=4).launch()
 
45
 
46
  return most_similar
47
 
48
+ async def respond_with_prefix(message, history, max_tokens=3648, temperature=1.0, top_p=1.0) -> Iterator[str]:
49
  global conversation_history
50
 
51
  # ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ ์—…๋ฐ์ดํŠธ
 
111
  messages.append({"role": "assistant", "content": f"Related Information: {similar_data}"})
112
 
113
  try:
114
+ response = client.chat.completions.create(
115
+ model="gpt-4o-mini",
116
+ messages=[
117
+ {"role": "system", "content": system_prefix},
118
+ *conversation_history
119
+ ],
120
+ response_format={"type": "text"},
121
+ temperature=temperature,
122
+ max_tokens=max_tokens,
123
+ top_p=top_p,
124
+ frequency_penalty=0,
125
+ presence_penalty=0,
126
+ stream=True
127
  )
128
 
129
  partial_message = ""
130
+ for chunk in response:
131
  if hasattr(chunk.choices[0].delta, 'content') and chunk.choices[0].delta.content:
132
  content = chunk.choices[0].delta.content
133
  partial_message += content
 
146
  conversation_history = []
147
  return None
148
 
149
+ with gr.Blocks(theme="Nymbo/Nymbo_Theme") as demo:
150
+ chatbot = gr.ChatInterface(
151
+ fn=respond_with_prefix,
152
+ additional_inputs=[
153
+ gr.Slider(minimum=1, maximum=4096, value=2048, label="Max Tokens"),
154
+ gr.Slider(minimum=0.1, maximum=2.0, value=1.0, label="Temperature"),
155
+ gr.Slider(minimum=0.1, maximum=1.0, value=1.0, label="Top-P")
156
+ ],
157
+ )
158
+
159
+ with gr.Row():
160
+ clear_button = gr.Button("Clear History")
161
+ clear_button.click(fn=clear_history, outputs=chatbot.chatbot)
162
 
163
  if __name__ == "__main__":
164
  demo.queue(max_size=4).launch()