Update app.py
Browse files
app.py
CHANGED
@@ -96,12 +96,25 @@ with gr.Blocks(title="Athena Playground Chat", css=css, theme=theme) as demo:
|
|
96 |
gr.Markdown("# π Athena Playground Chat")
|
97 |
gr.Markdown("*Powered by HuggingFace ZeroGPU*")
|
98 |
|
99 |
-
#
|
100 |
-
model_choice = gr.
|
101 |
-
|
102 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
|
104 |
-
#
|
105 |
chat_interface = gr.ChatInterface(
|
106 |
fn=respond,
|
107 |
additional_inputs=[model_choice, max_length, temperature],
|
@@ -124,26 +137,10 @@ with gr.Blocks(title="Athena Playground Chat", css=css, theme=theme) as demo:
|
|
124 |
type="messages"
|
125 |
)
|
126 |
|
127 |
-
#
|
128 |
with gr.Accordion("Configurations", open=False):
|
129 |
-
|
130 |
-
|
131 |
-
choices=list(MODELS.keys()),
|
132 |
-
value="Athena-R3X 8B",
|
133 |
-
info="Select which Athena model to use"
|
134 |
-
)
|
135 |
-
max_length = gr.Slider(
|
136 |
-
32, 2048, value=512,
|
137 |
-
label="π Max Tokens",
|
138 |
-
info="Maximum number of tokens to generate"
|
139 |
-
)
|
140 |
-
temperature = gr.Slider(
|
141 |
-
0.1, 2.0, value=0.7,
|
142 |
-
label="π¨ Creativity",
|
143 |
-
info="Higher values = more creative responses"
|
144 |
-
)
|
145 |
-
# Link the config components to the chat interface's additional_inputs
|
146 |
-
chat_interface.additional_inputs = [model_choice, max_length, temperature]
|
147 |
|
148 |
if __name__ == "__main__":
|
149 |
demo.launch()
|
|
|
96 |
gr.Markdown("# π Athena Playground Chat")
|
97 |
gr.Markdown("*Powered by HuggingFace ZeroGPU*")
|
98 |
|
99 |
+
# 1. Declare config components FIRST
|
100 |
+
model_choice = gr.Dropdown(
|
101 |
+
label="π± Model",
|
102 |
+
choices=list(MODELS.keys()),
|
103 |
+
value="Athena-R3X 8B",
|
104 |
+
info="Select which Athena model to use"
|
105 |
+
)
|
106 |
+
max_length = gr.Slider(
|
107 |
+
32, 2048, value=512,
|
108 |
+
label="π Max Tokens",
|
109 |
+
info="Maximum number of tokens to generate"
|
110 |
+
)
|
111 |
+
temperature = gr.Slider(
|
112 |
+
0.1, 2.0, value=0.7,
|
113 |
+
label="π¨ Creativity",
|
114 |
+
info="Higher values = more creative responses"
|
115 |
+
)
|
116 |
|
117 |
+
# 2. Create the chat interface, passing the controls as additional_inputs
|
118 |
chat_interface = gr.ChatInterface(
|
119 |
fn=respond,
|
120 |
additional_inputs=[model_choice, max_length, temperature],
|
|
|
137 |
type="messages"
|
138 |
)
|
139 |
|
140 |
+
# 3. Place the controls in an Accordion for display (they are still linked!)
|
141 |
with gr.Accordion("Configurations", open=False):
|
142 |
+
gr.Markdown("### Change Model and Generation Settings")
|
143 |
+
gr.Row([model_choice, max_length, temperature])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
144 |
|
145 |
if __name__ == "__main__":
|
146 |
demo.launch()
|