OpenAI-gpt-oss / app.py
openfree's picture
Create app.py
9b2f298 verified
raw
history blame
4.24 kB
import gradio as gr
# Custom CSS for gradient background and styling
custom_css = """
.gradio-container {
background: linear-gradient(135deg, #667eea 0%, #764ba2 25%, #f093fb 50%, #4facfe 75%, #00f2fe 100%);
background-size: 400% 400%;
animation: gradient-animation 15s ease infinite;
min-height: 100vh;
}
@keyframes gradient-animation {
0% { background-position: 0% 50%; }
50% { background-position: 100% 50%; }
100% { background-position: 0% 50%; }
}
.dark .gradio-container {
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 25%, #0f3460 50%, #533483 75%, #e94560 100%);
background-size: 400% 400%;
animation: gradient-animation 15s ease infinite;
}
/* Style for the main content area */
.main-container {
background-color: rgba(255, 255, 255, 0.95);
backdrop-filter: blur(10px);
border-radius: 20px;
padding: 20px;
box-shadow: 0 8px 32px 0 rgba(31, 38, 135, 0.37);
border: 1px solid rgba(255, 255, 255, 0.18);
}
.dark .main-container {
background-color: rgba(30, 30, 30, 0.95);
border: 1px solid rgba(255, 255, 255, 0.1);
}
/* Sidebar styling */
.sidebar {
background-color: rgba(255, 255, 255, 0.9);
backdrop-filter: blur(10px);
border-radius: 15px;
padding: 20px;
margin: 10px;
}
.dark .sidebar {
background-color: rgba(40, 40, 40, 0.9);
}
"""
def load_model(model_name, signed_in):
"""Function to load different models based on selection"""
if not signed_in:
return gr.Info("Please sign in to use the models")
# Here you would implement the actual model loading logic
# For now, we'll return a placeholder
return f"Model {model_name} loaded successfully!"
with gr.Blocks(fill_height=True, theme="Nymbo/Nymbo_Theme", css=custom_css) as demo:
with gr.Row():
with gr.Column(scale=1):
with gr.Group(elem_classes="sidebar"):
gr.Markdown("# ๐Ÿš€ Inference Provider")
gr.Markdown(
"This Space showcases OpenAI GPT-OSS models, served by the Cerebras API. "
"Sign in with your Hugging Face account to use this API."
)
# Model selection dropdown
model_dropdown = gr.Dropdown(
choices=[
"openai/gpt-oss-120b",
"openai/gpt-oss-20b"
],
value="openai/gpt-oss-120b",
label="Select Model",
info="Choose between different model sizes"
)
# Login button
button = gr.LoginButton("Sign in with Hugging Face", size="lg")
# Additional options
with gr.Accordion("โš™๏ธ Advanced Options", open=False):
temperature = gr.Slider(
minimum=0,
maximum=2,
value=0.7,
step=0.1,
label="Temperature"
)
max_tokens = gr.Slider(
minimum=1,
maximum=4096,
value=512,
step=1,
label="Max Tokens"
)
with gr.Column(scale=3):
with gr.Group(elem_classes="main-container"):
# Dynamic model loading based on selection
@gr.render(inputs=[model_dropdown, button])
def render_model_interface(selected_model, login_status):
if selected_model == "openai/gpt-oss-120b":
gr.load(
"models/openai/gpt-oss-120b",
accept_token=login_status,
provider="fireworks-ai"
)
elif selected_model == "openai/gpt-oss-20b":
gr.load(
"models/openai/gpt-oss-20b",
accept_token=login_status,
provider="fireworks-ai"
)
demo.launch()