from openai import OpenAI | |
client = OpenAI(api_key="sk-rOjB00dtKBbSYIfgewn_KA", base_url="https://internal-skyscriptllm.skyreels.ai") | |
# request sent to model set on litellm proxy, `litellm --model` | |
response = client.chat.completions.create(model="gpt-4o-2024-08-06", messages = [ | |
{ | |
"role": "user", | |
"content": "this is a test request, write a short poem" | |
} | |
]) | |
print(response.choices[0].message.content) |