File size: 3,226 Bytes
52de7b8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
# multi_inference.py

import requests
import os

# API keys (ideally you should load these from Hugging Face Secrets)
DEEPSEEK_KEYS = [
    "sk-5b4ecbbd66864a88925525fab14430d1",
    "sk-9c216dd03c9945218f12475a0fa6c8c4",
    "sk-79f0d8a98b88447880c01d012c572831",
    "sk-2be33a5e561b4788b9446e4e4e99bc2e"
]

OPENAI_KEYS = [
    "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",
    "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",
    "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",
    "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",
    "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"
]


def try_deepseek(prompt, key):
    try:
        url = "https://api.deepseek.com/v1/chat/completions"
        headers = {
            "Authorization": f"Bearer {key}",
            "Content-Type": "application/json"
        }
        data = {
            "model": "deepseek-chat",
            "messages": [
                {"role": "system", "content": "You are a helpful assistant."},
                {"role": "user", "content": prompt}
            ]
        }
        res = requests.post(url, headers=headers, json=data)
        result = res.json()
        if "choices" in result:
            return result["choices"][0]["message"]["content"]
        return f"[ERROR] DeepSeek: {result.get('error', {}).get('message', 'Unknown Error')}"
    except Exception as e:
        return f"[ERROR] DeepSeek Exception: {str(e)}"


def try_openai(prompt, key):
    try:
        url = "https://api.openai.com/v1/chat/completions"
        headers = {
            "Authorization": f"Bearer {key}",
            "Content-Type": "application/json"
        }
        data = {
            "model": "gpt-3.5-turbo",
            "messages": [
                {"role": "system", "content": "You are a helpful assistant."},
                {"role": "user", "content": prompt}
            ]
        }
        res = requests.post(url, headers=headers, json=data)
        result = res.json()
        if "choices" in result:
            return result["choices"][0]["message"]["content"]
        return f"[ERROR] OpenAI: {result.get('error', {}).get('message', 'Unknown Error')}"
    except Exception as e:
        return f"[ERROR] OpenAI Exception: {str(e)}"


# Main smart fallback wrapper
def multi_query(prompt):
    for key in DEEPSEEK_KEYS:
        output = try_deepseek(prompt, key)
        if not output.startswith("[ERROR]"):
            return output

    for key in OPENAI_KEYS:
        output = try_openai(prompt, key)
        if not output.startswith("[ERROR]"):
            return output

    return "[ALL APIs FAILED: Out of quota or network error]"