Spaces:
Paused
Paused
New API provider: Better
Browse files- g4f/Provider/Providers/Better.py +56 -0
- g4f/Provider/__init__.py +1 -0
- g4f/models.py +6 -6
g4f/Provider/Providers/Better.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import json
|
| 3 |
+
import requests
|
| 4 |
+
from typing import Dict, get_type_hints
|
| 5 |
+
|
| 6 |
+
url = 'https://openai-proxy-api.vercel.app/v1/'
|
| 7 |
+
model = {
|
| 8 |
+
'gpt-3.5-turbo',
|
| 9 |
+
'gpt-3.5-turbo-0613'
|
| 10 |
+
'gpt-3.5-turbo-16k',
|
| 11 |
+
'gpt-3.5-turbo-16k-0613',
|
| 12 |
+
'gpt-4',
|
| 13 |
+
}
|
| 14 |
+
|
| 15 |
+
supports_stream = True
|
| 16 |
+
needs_auth = False
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
| 20 |
+
headers = {
|
| 21 |
+
'Content-Type': 'application/json',
|
| 22 |
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36 Edg/114.0.1823.58',
|
| 23 |
+
'Referer': 'https://chat.ylokh.xyz/',
|
| 24 |
+
'Origin': 'https://chat.ylokh.xyz',
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
+
json_data = {
|
| 28 |
+
'messages': messages,
|
| 29 |
+
'temperature': 1.0,
|
| 30 |
+
'model': model,
|
| 31 |
+
'stream': stream,
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
response = requests.post(
|
| 35 |
+
'https://openai-proxy-api.vercel.app/v1/chat/completions', headers=headers, json=json_data, stream=True
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
for token in response.iter_lines():
|
| 39 |
+
decoded = token.decode('utf-8')
|
| 40 |
+
if decoded.startswith('data: '):
|
| 41 |
+
data_str = decoded.replace('data: ', '')
|
| 42 |
+
data = json.loads(data_str)
|
| 43 |
+
print(data)
|
| 44 |
+
if 'choices' in data and 'delta' in data['choices'][0]:
|
| 45 |
+
delta = data['choices'][0]['delta']
|
| 46 |
+
content = delta.get('content', '')
|
| 47 |
+
finish_reason = delta.get('finish_reason', '')
|
| 48 |
+
|
| 49 |
+
if finish_reason == 'stop':
|
| 50 |
+
break
|
| 51 |
+
if content:
|
| 52 |
+
yield content
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + '(%s)' % ', '.join(
|
| 56 |
+
[f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
|
g4f/Provider/__init__.py
CHANGED
|
@@ -2,6 +2,7 @@ from . import Provider
|
|
| 2 |
from .Providers import (
|
| 3 |
Aichat,
|
| 4 |
Bard,
|
|
|
|
| 5 |
Bing,
|
| 6 |
ChatgptAi,
|
| 7 |
ChatgptLogin,
|
|
|
|
| 2 |
from .Providers import (
|
| 3 |
Aichat,
|
| 4 |
Bard,
|
| 5 |
+
Better,
|
| 6 |
Bing,
|
| 7 |
ChatgptAi,
|
| 8 |
ChatgptLogin,
|
g4f/models.py
CHANGED
|
@@ -10,24 +10,24 @@ class Model:
|
|
| 10 |
class gpt_35_turbo:
|
| 11 |
name: str = 'gpt-3.5-turbo'
|
| 12 |
base_provider: str = 'openai'
|
| 13 |
-
best_provider: Provider.Provider = Provider.
|
| 14 |
-
best_providers: list = [Provider.
|
| 15 |
|
| 16 |
class gpt_35_turbo_0613:
|
| 17 |
name: str = 'gpt-3.5-turbo-0613'
|
| 18 |
base_provider: str = 'openai'
|
| 19 |
-
best_provider: Provider.Provider = Provider.
|
| 20 |
|
| 21 |
class gpt_35_turbo_16k_0613:
|
| 22 |
name: str = 'gpt-3.5-turbo-16k-0613'
|
| 23 |
base_provider: str = 'openai'
|
| 24 |
-
best_provider: Provider.Provider = Provider.
|
| 25 |
-
best_providers: list = [Provider.Easychat, Provider.Ezcht, Provider.
|
| 26 |
|
| 27 |
class gpt_35_turbo_16k:
|
| 28 |
name: str = 'gpt-3.5-turbo-16k'
|
| 29 |
base_provider: str = 'openai'
|
| 30 |
-
best_provider: Provider.Provider = Provider.
|
| 31 |
|
| 32 |
class gpt_4_dev:
|
| 33 |
name: str = 'gpt-4-for-dev'
|
|
|
|
| 10 |
class gpt_35_turbo:
|
| 11 |
name: str = 'gpt-3.5-turbo'
|
| 12 |
base_provider: str = 'openai'
|
| 13 |
+
best_provider: Provider.Provider = Provider.Better
|
| 14 |
+
best_providers: list = [Provider.Better, Provider.Lockchat, Provider.Yqcloud, Provider.Forefront,]
|
| 15 |
|
| 16 |
class gpt_35_turbo_0613:
|
| 17 |
name: str = 'gpt-3.5-turbo-0613'
|
| 18 |
base_provider: str = 'openai'
|
| 19 |
+
best_provider: Provider.Provider = Provider.Better
|
| 20 |
|
| 21 |
class gpt_35_turbo_16k_0613:
|
| 22 |
name: str = 'gpt-3.5-turbo-16k-0613'
|
| 23 |
base_provider: str = 'openai'
|
| 24 |
+
best_provider: Provider.Provider = Provider.Better
|
| 25 |
+
best_providers: list = [Provider.Easychat, Provider.Ezcht, Provider.Better]
|
| 26 |
|
| 27 |
class gpt_35_turbo_16k:
|
| 28 |
name: str = 'gpt-3.5-turbo-16k'
|
| 29 |
base_provider: str = 'openai'
|
| 30 |
+
best_provider: Provider.Provider = Provider.Better
|
| 31 |
|
| 32 |
class gpt_4_dev:
|
| 33 |
name: str = 'gpt-4-for-dev'
|