Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -45,7 +45,7 @@ def find_most_similar_data(query):
|
|
| 45 |
|
| 46 |
return most_similar
|
| 47 |
|
| 48 |
-
async def respond_with_prefix(message, history, max_tokens=
|
| 49 |
global conversation_history
|
| 50 |
|
| 51 |
# ๋ํ ํ์คํ ๋ฆฌ ์
๋ฐ์ดํธ
|
|
@@ -111,22 +111,23 @@ async def respond_with_prefix(message, history, max_tokens=2048, temperature=1.0
|
|
| 111 |
messages.append({"role": "assistant", "content": f"Related Information: {similar_data}"})
|
| 112 |
|
| 113 |
try:
|
| 114 |
-
response =
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
|
| 120 |
-
|
| 121 |
-
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
|
|
|
| 126 |
)
|
| 127 |
|
| 128 |
partial_message = ""
|
| 129 |
-
|
| 130 |
if hasattr(chunk.choices[0].delta, 'content') and chunk.choices[0].delta.content:
|
| 131 |
content = chunk.choices[0].delta.content
|
| 132 |
partial_message += content
|
|
@@ -145,19 +146,19 @@ def clear_history():
|
|
| 145 |
conversation_history = []
|
| 146 |
return None
|
| 147 |
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
)
|
| 157 |
-
|
| 158 |
-
|
| 159 |
-
clear_button = gr.Button("Clear History")
|
| 160 |
-
clear_button.click(fn=clear_history)
|
| 161 |
|
| 162 |
if __name__ == "__main__":
|
| 163 |
demo.queue(max_size=4).launch()
|
|
|
|
| 45 |
|
| 46 |
return most_similar
|
| 47 |
|
| 48 |
+
async def respond_with_prefix(message, history, max_tokens=3648, temperature=1.0, top_p=1.0) -> Iterator[str]:
|
| 49 |
global conversation_history
|
| 50 |
|
| 51 |
# ๋ํ ํ์คํ ๋ฆฌ ์
๋ฐ์ดํธ
|
|
|
|
| 111 |
messages.append({"role": "assistant", "content": f"Related Information: {similar_data}"})
|
| 112 |
|
| 113 |
try:
|
| 114 |
+
response = client.chat.completions.create(
|
| 115 |
+
model="gpt-4o-mini",
|
| 116 |
+
messages=[
|
| 117 |
+
{"role": "system", "content": system_prefix},
|
| 118 |
+
*conversation_history
|
| 119 |
+
],
|
| 120 |
+
response_format={"type": "text"},
|
| 121 |
+
temperature=temperature,
|
| 122 |
+
max_tokens=max_tokens,
|
| 123 |
+
top_p=top_p,
|
| 124 |
+
frequency_penalty=0,
|
| 125 |
+
presence_penalty=0,
|
| 126 |
+
stream=True
|
| 127 |
)
|
| 128 |
|
| 129 |
partial_message = ""
|
| 130 |
+
for chunk in response:
|
| 131 |
if hasattr(chunk.choices[0].delta, 'content') and chunk.choices[0].delta.content:
|
| 132 |
content = chunk.choices[0].delta.content
|
| 133 |
partial_message += content
|
|
|
|
| 146 |
conversation_history = []
|
| 147 |
return None
|
| 148 |
|
| 149 |
+
with gr.Blocks(theme="Nymbo/Nymbo_Theme") as demo:
|
| 150 |
+
chatbot = gr.ChatInterface(
|
| 151 |
+
fn=respond_with_prefix,
|
| 152 |
+
additional_inputs=[
|
| 153 |
+
gr.Slider(minimum=1, maximum=4096, value=2048, label="Max Tokens"),
|
| 154 |
+
gr.Slider(minimum=0.1, maximum=2.0, value=1.0, label="Temperature"),
|
| 155 |
+
gr.Slider(minimum=0.1, maximum=1.0, value=1.0, label="Top-P")
|
| 156 |
+
],
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
with gr.Row():
|
| 160 |
+
clear_button = gr.Button("Clear History")
|
| 161 |
+
clear_button.click(fn=clear_history, outputs=chatbot.chatbot)
|
| 162 |
|
| 163 |
if __name__ == "__main__":
|
| 164 |
demo.queue(max_size=4).launch()
|