File size: 2,624 Bytes
52de7b8
 
 
 
 
d529e8c
52de7b8
d529e8c
 
 
 
 
52de7b8
 
d529e8c
52de7b8
d529e8c
 
 
 
 
52de7b8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
# multi_inference.py

import requests
import os

# Load DeepSeek keys from Hugging Face Secrets
DEEPSEEK_KEYS = [
    os.getenv("DEEPSEEK_KEY_1", "").strip(),
    os.getenv("DEEPSEEK_KEY_2", "").strip(),
    os.getenv("DEEPSEEK_KEY_3", "").strip(),
    os.getenv("DEEPSEEK_KEY_4", "").strip(),
    os.getenv("DEEPSEEK_KEY_5", "").strip()
]

# Load OpenAI keys named as open1, open2, etc.
OPENAI_KEYS = [
    os.getenv("open1", "").strip(),
    os.getenv("open2", "").strip(),
    os.getenv("open3", "").strip(),
    os.getenv("open4", "").strip(),
    os.getenv("open5", "").strip()
]


def try_deepseek(prompt, key):
    try:
        url = "https://api.deepseek.com/v1/chat/completions"
        headers = {
            "Authorization": f"Bearer {key}",
            "Content-Type": "application/json"
        }
        data = {
            "model": "deepseek-chat",
            "messages": [
                {"role": "system", "content": "You are a helpful assistant."},
                {"role": "user", "content": prompt}
            ]
        }
        res = requests.post(url, headers=headers, json=data)
        result = res.json()
        if "choices" in result:
            return result["choices"][0]["message"]["content"]
        return f"[ERROR] DeepSeek: {result.get('error', {}).get('message', 'Unknown Error')}"
    except Exception as e:
        return f"[ERROR] DeepSeek Exception: {str(e)}"


def try_openai(prompt, key):
    try:
        url = "https://api.openai.com/v1/chat/completions"
        headers = {
            "Authorization": f"Bearer {key}",
            "Content-Type": "application/json"
        }
        data = {
            "model": "gpt-3.5-turbo",
            "messages": [
                {"role": "system", "content": "You are a helpful assistant."},
                {"role": "user", "content": prompt}
            ]
        }
        res = requests.post(url, headers=headers, json=data)
        result = res.json()
        if "choices" in result:
            return result["choices"][0]["message"]["content"]
        return f"[ERROR] OpenAI: {result.get('error', {}).get('message', 'Unknown Error')}"
    except Exception as e:
        return f"[ERROR] OpenAI Exception: {str(e)}"


# Main smart fallback wrapper
def multi_query(prompt):
    for key in DEEPSEEK_KEYS:
        output = try_deepseek(prompt, key)
        if not output.startswith("[ERROR]"):
            return output

    for key in OPENAI_KEYS:
        output = try_openai(prompt, key)
        if not output.startswith("[ERROR]"):
            return output

    return "[ALL APIs FAILED: Out of quota or network error]"