ddosxd's picture
Update main.py
c1c1cd2 verified
#Meow
import openai as closeai
from flask import Flask, request, Response, jsonify
import os
from rich import print
import json
import requests
from time import sleep
settings = {
'node': {
'id':os.environ.get('nodeId'),
'models': os.environ.get('nodeModel')
},
'api': {
'host': os.environ.get('apibase'),
'key': os.environ.get('apikey')
},
'security': {
'passw':os.environ.get('apipassw')
},
'tg': {
'token':os.environ.get('tgtoken'),
'chat':os.environ.get('tgchat'),
'topic':os.environ.get('tgtopic')
},
'web': {
'port': os.environ.get('webport', 7860),
'host': os.environ.get('webhost', '0.0.0.0'),
'debug': os.environ.get('webdebug', False)
}
}
def send_telegram_request(method, params=None):
url = f'https://api.telegram.org/bot{settings["tg"]["token"]}/{method}'
response = requests.post(url, json=params)
data = response.json()
print(data)
return data
def send_message(text):
return send_telegram_request('sendMessage', {
'chat_id': settings["tg"]["chat"],
'text': text,
'message_thread_id': settings["tg"]["topic"]
})
app = Flask(__name__)
closeai.api_base = settings["api"]["host"]
closeai.api_key = settings["api"]["key"]
@app.route("/")
def index():
return f'Hi, its a node {settings["node"]["id"]} with {settings["node"]["models"]}.<br><br>Its just api proxy for openai if your ip banned by openai or other reverse proxy.<br>This space is not maintained, pls dont use it<br><br>It was a good time...'
@app.route("/chat/completions", methods=['POST'])
def chat_completions():
streaming = request.json.get('stream', False)
auth = request.headers.get('Authorization', 'Bearer anonim')
if auth != f'Bearer {settings["security"]["passw"]}':
if streaming:
er = 'Not authorized'
def errorStream(er):
yield 'data: %s\n\n' % json.dumps({"status":"!=200","error":str(er)}, separators=(',' ':'))
return app.response_class(errorStream(er), mimetype='text/event-stream')
else:
return 'Not authorized'
model = request.json.get('model', 'gpt-4')
messages = request.json.get('messages')
response = ''
try:
response = closeai.ChatCompletion.create(model=model, stream=streaming, messages=messages, allow_fallback=False)
except Exception as er:
print(er)
send_message(str(er))
if '429' in str(er):sleep(45)
def errorStream(er):
yield 'data: %s\n\n' % json.dumps({"status":"!=200","error":str(er)}, separators=(',' ':'))
return app.response_class(errorStream(er), mimetype='text/event-stream')
if not streaming:
return {
'model': model,
'result': response["choices"][0]["message"]["content"],
**response
}
def stream():
for token in response:
completion_data = {
'model': model,
'token': token,
'status':200,
**token
}
yield 'data: %s\n\n' % json.dumps(completion_data, separators=(',' ':'))
return app.response_class(stream(), mimetype='text/event-stream')
@app.route("/v1/chat/completions", methods=['POST'])
def v_chat_completions():
streaming = request.json.get('stream', False)
auth = request.headers.get('Authorization', 'Bearer anonim')
if auth != f'Bearer {settings["security"]["passw"]}':
return json.dumps({'error':'Not authorized'})
model = request.json.get('model', 'gpt-3.5-turbo')
messages = request.json.get('messages')
response = ''
try:
response = closeai.ChatCompletion.create(model=model, stream=streaming, messages=messages)
except Exception as er:
send_message(str(er))
return json.dumps({"error":str(er)})
if not streaming:
return json.dumps(response)
def stream():
for token in response:
completion_data = token
yield 'data: %s\n\n' % json.dumps(completion_data, separators=(',' ':'))
return app.response_class(stream(), mimetype='text/event-stream')
if __name__ == '__main__':
app.run(**settings['web'])