mcp2-backend / inference.py
aymnsk's picture
Update inference.py
3c816b5 verified
raw
history blame
904 Bytes
# inference.py
import requests
import os
API_KEY = os.getenv("DEEPSEEK_KEY", "").strip()
def deepseek_query(prompt):
url = "https://api.deepseek.com/v1/chat/completions"
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
data = {
"model": "deepseek-chat",
"messages": [
{"role": "system", "content": "You are a helpful, creative AI agent."},
{"role": "user", "content": prompt}
]
}
try:
response = requests.post(url, json=data, headers=headers)
result = response.json()
# ✅ Debug: Print whole response if there's an issue
if "choices" not in result:
return f"[ERROR] choices not found in response: {result}"
return result["choices"][0]["message"]["content"]
except Exception as e:
return f"[ERROR] {str(e)}"