add cerebras qwen models
Browse files
app.py
CHANGED
@@ -373,6 +373,11 @@ AVAILABLE_MODELS = [
|
|
373 |
"name": "Qwen3-Coder-480B-A35B",
|
374 |
"id": "Qwen/Qwen3-Coder-480B-A35B-Instruct",
|
375 |
"description": "Qwen3-Coder-480B-A35B-Instruct model for advanced code generation and programming tasks"
|
|
|
|
|
|
|
|
|
|
|
376 |
}
|
377 |
]
|
378 |
|
@@ -448,6 +453,10 @@ def get_inference_client(model_id, provider="auto"):
|
|
448 |
"""Return an InferenceClient with provider based on model_id and user selection."""
|
449 |
if model_id == "moonshotai/Kimi-K2-Instruct":
|
450 |
provider = "groq"
|
|
|
|
|
|
|
|
|
451 |
return InferenceClient(
|
452 |
provider=provider,
|
453 |
api_key=HF_TOKEN,
|
|
|
373 |
"name": "Qwen3-Coder-480B-A35B",
|
374 |
"id": "Qwen/Qwen3-Coder-480B-A35B-Instruct",
|
375 |
"description": "Qwen3-Coder-480B-A35B-Instruct model for advanced code generation and programming tasks"
|
376 |
+
},
|
377 |
+
{
|
378 |
+
"name": "Qwen3-32B",
|
379 |
+
"id": "Qwen/Qwen3-32B",
|
380 |
+
"description": "Qwen3-32B model for code generation and general tasks"
|
381 |
}
|
382 |
]
|
383 |
|
|
|
453 |
"""Return an InferenceClient with provider based on model_id and user selection."""
|
454 |
if model_id == "moonshotai/Kimi-K2-Instruct":
|
455 |
provider = "groq"
|
456 |
+
elif model_id == "Qwen/Qwen3-235B-A22B":
|
457 |
+
provider = "cerebras"
|
458 |
+
elif model_id == "Qwen/Qwen3-32B":
|
459 |
+
provider = "cerebras"
|
460 |
return InferenceClient(
|
461 |
provider=provider,
|
462 |
api_key=HF_TOKEN,
|