Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -10,6 +10,26 @@ client = AsyncOpenAI(
|
|
10 |
|
11 |
assistantID = "asst_7xyER9PDcv13UJ22U2zz4x1z"
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
class EventHandler(AsyncAssistantEventHandler):
|
14 |
def __init__(self) -> None:
|
15 |
super().__init__()
|
@@ -98,11 +118,13 @@ async def gradio_chat_interface(user_input):
|
|
98 |
# Set up Gradio interface with streaming
|
99 |
interface = gr.Interface(
|
100 |
fn=gradio_chat_interface,
|
101 |
-
inputs=
|
102 |
outputs="markdown",
|
103 |
-
title=
|
104 |
-
description=
|
105 |
-
|
|
|
|
|
106 |
)
|
107 |
|
108 |
# Launch the Gradio app
|
|
|
10 |
|
11 |
assistantID = "asst_7xyER9PDcv13UJ22U2zz4x1z"
|
12 |
|
13 |
+
mytitle = "<h1 align=center>Wat war lass am Land 🇱🇺 an op der Welt 🌎 ?</h1>"
|
14 |
+
|
15 |
+
mydescription="""
|
16 |
+
<h3 align='center'>Wat fir een Thema interesséiert Iech : 🐶 🏃🏻♂️ 🌗 🍇 🌈 🍽️ 🏆 🚘 ✈️ 🩺 </h3>
|
17 |
+
<table width=100%>
|
18 |
+
<tr>
|
19 |
+
<th width=50% bgcolor="Moccasin">Stell deng Froen op Lëtzebuergesch, oder an enger anerer Sprooch :</th>
|
20 |
+
<th bgcolor="Khaki">Äntwert vum OpenAI File-Search Assistent : </th>
|
21 |
+
</tr>
|
22 |
+
</table>
|
23 |
+
"""
|
24 |
+
|
25 |
+
myarticle ="""
|
26 |
+
<h3>Hannergrënn :</h3>
|
27 |
+
<p>Hannergrënn : Dës HuggingFace Space Demo gouf vum <a href="https://github.com/mbarnig">Marco Barnig</a> realiséiert. Als kënstlech Intelligenz gëtt, mëttels API, den <a href="https://platform.openai.com/docs/models">OpenAI Modell</a> gpt-4o-mini-2024-07-18 benotzt, deen als Kontext bis 128.000 Tokens ka benotzen, eng Äntwert op eng Fro vu maximal 16.384 Tokens ka ginn a bis zu 200.000 Tokens pro Minutt <a href="https://platform.openai.com/settings/organization/limits">(TPM)</a> ka beaarbechten. Fir dës Demo gëtt nëmmen eng News-JSON-Datei mat enger Gréisst vun 30 MB benotzt. Et ass méiglech bis zu 10.000 Dateien op en OpenAI Assistent opzelueden.</p>
|
28 |
+
"""
|
29 |
+
|
30 |
+
myinput = gr.Textbox(lines=3, label="Wat wëllt Der wëssen ?")
|
31 |
+
|
32 |
+
|
33 |
class EventHandler(AsyncAssistantEventHandler):
|
34 |
def __init__(self) -> None:
|
35 |
super().__init__()
|
|
|
118 |
# Set up Gradio interface with streaming
|
119 |
interface = gr.Interface(
|
120 |
fn=gradio_chat_interface,
|
121 |
+
inputs=myinput,
|
122 |
outputs="markdown",
|
123 |
+
title=mytitle,
|
124 |
+
description=mydescription,
|
125 |
+
article=myarticle,
|
126 |
+
live=False,
|
127 |
+
allow_flagging="never"
|
128 |
)
|
129 |
|
130 |
# Launch the Gradio app
|