Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -1565,8 +1565,8 @@ def create_chat_demo_rag(title=None, description=None):
|
|
| 1565 |
|
| 1566 |
# with gr.Blocks(title="RAG") as rag_demo:
|
| 1567 |
additional_inputs = [
|
| 1568 |
-
|
| 1569 |
-
gr.Textbox(value=None, label='Document path', lines=1, interactive=False),
|
| 1570 |
gr.Number(value=temperature, label='Temperature (higher -> more random)'),
|
| 1571 |
gr.Number(value=max_tokens, label='Max generated tokens (increase if want more generation)'),
|
| 1572 |
# gr.Number(value=frequence_penalty, label='Frequency penalty (> 0 encourage new tokens over repeated tokens)'),
|
|
@@ -1575,7 +1575,6 @@ def create_chat_demo_rag(title=None, description=None):
|
|
| 1575 |
gr.Number(value=0, label='current_time', visible=False),
|
| 1576 |
]
|
| 1577 |
|
| 1578 |
-
|
| 1579 |
demo_rag_chat = gr.ChatInterface(
|
| 1580 |
chat_response_stream_rag_multiturn,
|
| 1581 |
chatbot=gr.Chatbot(
|
|
@@ -1598,9 +1597,9 @@ def create_chat_demo_rag(title=None, description=None):
|
|
| 1598 |
# examples=CHAT_EXAMPLES,
|
| 1599 |
cache_examples=False
|
| 1600 |
)
|
| 1601 |
-
with demo_rag_chat:
|
| 1602 |
-
|
| 1603 |
-
|
| 1604 |
|
| 1605 |
# return demo_chat
|
| 1606 |
return demo_rag_chat
|
|
@@ -1714,13 +1713,13 @@ def launch_demo():
|
|
| 1714 |
interface_list=[
|
| 1715 |
demo_chat,
|
| 1716 |
demo_chat_rag,
|
| 1717 |
-
demo_free_form
|
| 1718 |
# demo_file_upload,
|
| 1719 |
],
|
| 1720 |
tab_names=[
|
| 1721 |
"Chat Interface",
|
| 1722 |
-
"RAG Chat Interface"
|
| 1723 |
-
"Text completion"
|
| 1724 |
# "Batch Inference",
|
| 1725 |
],
|
| 1726 |
title=f"{model_title}",
|
|
|
|
| 1565 |
|
| 1566 |
# with gr.Blocks(title="RAG") as rag_demo:
|
| 1567 |
additional_inputs = [
|
| 1568 |
+
gr.File(label='Upload Document', file_count='single', file_types=['pdf', 'docx', 'txt', 'json']),
|
| 1569 |
+
# gr.Textbox(value=None, label='Document path', lines=1, interactive=False),
|
| 1570 |
gr.Number(value=temperature, label='Temperature (higher -> more random)'),
|
| 1571 |
gr.Number(value=max_tokens, label='Max generated tokens (increase if want more generation)'),
|
| 1572 |
# gr.Number(value=frequence_penalty, label='Frequency penalty (> 0 encourage new tokens over repeated tokens)'),
|
|
|
|
| 1575 |
gr.Number(value=0, label='current_time', visible=False),
|
| 1576 |
]
|
| 1577 |
|
|
|
|
| 1578 |
demo_rag_chat = gr.ChatInterface(
|
| 1579 |
chat_response_stream_rag_multiturn,
|
| 1580 |
chatbot=gr.Chatbot(
|
|
|
|
| 1597 |
# examples=CHAT_EXAMPLES,
|
| 1598 |
cache_examples=False
|
| 1599 |
)
|
| 1600 |
+
# with demo_rag_chat:
|
| 1601 |
+
# upload_button = gr.UploadButton("Click to Upload document", file_types=['pdf', 'docx', 'txt', 'json'], file_count="single")
|
| 1602 |
+
# upload_button.upload(upload_file, upload_button, additional_inputs[0])
|
| 1603 |
|
| 1604 |
# return demo_chat
|
| 1605 |
return demo_rag_chat
|
|
|
|
| 1713 |
interface_list=[
|
| 1714 |
demo_chat,
|
| 1715 |
demo_chat_rag,
|
| 1716 |
+
demo_free_form,
|
| 1717 |
# demo_file_upload,
|
| 1718 |
],
|
| 1719 |
tab_names=[
|
| 1720 |
"Chat Interface",
|
| 1721 |
+
"RAG Chat Interface",
|
| 1722 |
+
"Text completion",
|
| 1723 |
# "Batch Inference",
|
| 1724 |
],
|
| 1725 |
title=f"{model_title}",
|