Spaces:
Running
Running
import gradio as gr | |
# Custom CSS for gradient background and styling | |
custom_css = """ | |
.gradio-container { | |
background: linear-gradient(135deg, #ffecd2 0%, #fcb69f 25%, #fbc2eb 50%, #a6c1ee 75%, #c2e9fb 100%); | |
background-size: 400% 400%; | |
animation: gradient-animation 15s ease infinite; | |
min-height: 100vh; | |
} | |
@keyframes gradient-animation { | |
0% { background-position: 0% 50%; } | |
50% { background-position: 100% 50%; } | |
100% { background-position: 0% 50%; } | |
} | |
.dark .gradio-container { | |
background: linear-gradient(135deg, #2a2a3e 0%, #3a3a5e 25%, #4a4a6e 50%, #5a5a7e 75%, #6a6a8e 100%); | |
background-size: 400% 400%; | |
animation: gradient-animation 15s ease infinite; | |
} | |
/* Style for content areas */ | |
.main-container { | |
background-color: rgba(255, 255, 255, 0.92); | |
backdrop-filter: blur(10px); | |
border-radius: 20px; | |
padding: 20px; | |
box-shadow: 0 4px 20px 0 rgba(31, 38, 135, 0.15); | |
border: 1px solid rgba(255, 255, 255, 0.3); | |
margin: 10px; | |
} | |
.dark .main-container { | |
background-color: rgba(40, 40, 40, 0.95); | |
border: 1px solid rgba(255, 255, 255, 0.1); | |
} | |
""" | |
def chat_fn(message, history, model_choice): | |
"""Placeholder chat function""" | |
return f"Response from {model_choice}: {message}" | |
with gr.Blocks(fill_height=True, theme="soft", css=custom_css) as demo: | |
# State variable to track current model | |
current_model = gr.State("openai/gpt-oss-120b") | |
with gr.Row(): | |
# Sidebar | |
with gr.Column(scale=1): | |
with gr.Group(elem_classes="main-container"): | |
gr.Markdown("# ๐ Inference Provider") | |
gr.Markdown( | |
"This Space showcases OpenAI GPT-OSS models, served by the Cerebras API. " | |
"Sign in with your Hugging Face account to use this API." | |
) | |
# Model selection | |
model_dropdown = gr.Dropdown( | |
choices=[ | |
"openai/gpt-oss-120b", | |
"openai/gpt-oss-20b" | |
], | |
value="openai/gpt-oss-120b", | |
label="๐ Select Model", | |
info="Choose between different model sizes" | |
) | |
# Login button | |
login_button = gr.LoginButton("Sign in with Hugging Face", size="lg") | |
# Additional options | |
with gr.Accordion("โ๏ธ Advanced Options", open=False): | |
gr.Markdown("*These options will be available after model implementation*") | |
temperature = gr.Slider( | |
minimum=0, | |
maximum=2, | |
value=0.7, | |
step=0.1, | |
label="Temperature" | |
) | |
max_tokens = gr.Slider( | |
minimum=1, | |
maximum=4096, | |
value=512, | |
step=1, | |
label="Max Tokens" | |
) | |
# Main chat area | |
with gr.Column(scale=3): | |
with gr.Group(elem_classes="main-container"): | |
gr.Markdown("## ๐ฌ Chat Interface") | |
# Display current model | |
model_display = gr.Markdown(f"### Model: openai/gpt-oss-120b") | |
# Single chat interface that works with both models | |
chatbot = gr.Chatbot( | |
height=400, | |
show_label=False, | |
elem_classes="main-container" | |
) | |
with gr.Row(): | |
msg = gr.Textbox( | |
placeholder="Type your message here...", | |
show_label=False, | |
scale=9 | |
) | |
submit_btn = gr.Button("Send", variant="primary", scale=1) | |
clear_btn = gr.Button("๐๏ธ Clear Chat", variant="secondary") | |
# Update model display when dropdown changes | |
def update_model_display(model_choice): | |
return f"### Model: {model_choice}", model_choice | |
model_dropdown.change( | |
fn=update_model_display, | |
inputs=[model_dropdown], | |
outputs=[model_display, current_model] | |
) | |
# Chat functionality | |
def respond(message, chat_history, model): | |
if not message: | |
return "", chat_history | |
# Add user message to history | |
chat_history = chat_history + [[message, None]] | |
# Generate bot response (placeholder - replace with actual model call) | |
bot_response = f"[{model.split('/')[-1]}]: This is a placeholder response. In production, this would connect to the {model} via Fireworks AI API." | |
# Update the last message with bot response | |
chat_history[-1][1] = bot_response | |
return "", chat_history | |
# Submit message on button click or enter | |
submit_btn.click( | |
fn=respond, | |
inputs=[msg, chatbot, current_model], | |
outputs=[msg, chatbot] | |
) | |
msg.submit( | |
fn=respond, | |
inputs=[msg, chatbot, current_model], | |
outputs=[msg, chatbot] | |
) | |
# Clear chat | |
clear_btn.click( | |
fn=lambda: (None, ""), | |
inputs=[], | |
outputs=[chatbot, msg] | |
) | |
# Show info message on login | |
login_button.click( | |
fn=lambda: gr.Info("Please authenticate with Hugging Face to access the models"), | |
inputs=[], | |
outputs=[] | |
) | |
demo.launch() |