Update app.py
Browse files
app.py
CHANGED
@@ -135,7 +135,7 @@ def bot(history,message):
|
|
135 |
def bot_response(message):
|
136 |
"""
|
137 |
Generates a response from the LLM model.
|
138 |
-
|
139 |
"""
|
140 |
messages = [
|
141 |
{"role": "system", "content": "You are a helpful AI assistant."},
|
@@ -219,10 +219,9 @@ with gr.Blocks(
|
|
219 |
components=[chatbot],
|
220 |
value="Clear chat",
|
221 |
)
|
222 |
-
|
223 |
-
chat_msg = chat_input.change(add_message, [chatbot, chat_input], [chatbot, chat_input])
|
224 |
bot_msg = chat_msg.then(bot, [chatbot, chat_input], chatbot, api_name="bot_response")
|
225 |
-
bot_msg.then(lambda: gr.Textbox(interactive=False), None, [chat_input])
|
226 |
|
227 |
submit_button.click(
|
228 |
fn=transcribe,
|
|
|
135 |
def bot_response(message):
|
136 |
"""
|
137 |
Generates a response from the LLM model.
|
138 |
+
max_new_tokens, temperature and top_p are set to 512, 0.6 and 0.9 respectively.
|
139 |
"""
|
140 |
messages = [
|
141 |
{"role": "system", "content": "You are a helpful AI assistant."},
|
|
|
219 |
components=[chatbot],
|
220 |
value="Clear chat",
|
221 |
)
|
222 |
+
chat_msg = chat_input.change(add_message, [chatbot, chat_input], [chatbot, chat_input])
|
|
|
223 |
bot_msg = chat_msg.then(bot, [chatbot, chat_input], chatbot, api_name="bot_response")
|
224 |
+
# bot_msg.then(lambda: gr.Textbox(interactive=False), None, [chat_input])
|
225 |
|
226 |
submit_button.click(
|
227 |
fn=transcribe,
|