Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -70,7 +70,7 @@ def bot_streaming(message, history, max_new_tokens=2048):
|
|
70 |
yield buffer
|
71 |
|
72 |
|
73 |
-
demo = gr.ChatInterface(fn=bot_streaming, title="
|
74 |
[{"text": "Which era does this piece belong to? Give details about the era.", "files":["./examples/rococo.jpg"]},
|
75 |
200],
|
76 |
[{"text": "Where do the droughts happen according to this diagram?", "files":["./examples/weather_events.png"]},
|
@@ -92,7 +92,7 @@ demo = gr.ChatInterface(fn=bot_streaming, title="Multimodal Llama", examples=[
|
|
92 |
)
|
93 |
],
|
94 |
cache_examples=False,
|
95 |
-
description="
|
96 |
stop_btn="Stop Generation",
|
97 |
fill_height=True,
|
98 |
multimodal=True)
|
|
|
70 |
yield buffer
|
71 |
|
72 |
|
73 |
+
demo = gr.ChatInterface(fn=bot_streaming, title="Document Analyzer", examples=[
|
74 |
[{"text": "Which era does this piece belong to? Give details about the era.", "files":["./examples/rococo.jpg"]},
|
75 |
200],
|
76 |
[{"text": "Where do the droughts happen according to this diagram?", "files":["./examples/weather_events.png"]},
|
|
|
92 |
)
|
93 |
],
|
94 |
cache_examples=False,
|
95 |
+
description="MllM ",
|
96 |
stop_btn="Stop Generation",
|
97 |
fill_height=True,
|
98 |
multimodal=True)
|