File size: 6,449 Bytes
5333aaa |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 |
from flask import Flask, request, Response, stream_with_context
import requests
import json
import uuid
import time
import random
import os
app = Flask(__name__)
def get_token():
with open("token.txt", encoding="utf-8") as f:
return random.choice(f.read().split("\n"))
def create_merlin_request(openai_req):
messages = openai_req['messages']
context_messages = [
f"{msg['role']}: {msg['content']}"
for msg in messages[:-1]
]
return {
"attachments": [],
"chatId": str(uuid.uuid1()),
"language": "AUTO",
"message": {
"content": messages[-1]['content'],
"context": "\n".join(context_messages),
"childId": str(uuid.uuid4()),
"id": str(uuid.uuid4()),
"parentId": "root"
},
"mode": "UNIFIED_CHAT",
"model": openai_req.get('model', 'claude-3-haiku'),
"metadata": {
"largeContext": False,
"merlinMagic": False,
"proFinderMode": False,
"webAccess": False
}
}
def process_merlin_response(response_text):
try:
merlin_resp = json.loads(response_text[6:])
content = merlin_resp['data']['content']
if isinstance(content, str):
content = content.encode('latin1').decode('utf-8')
return content
return content
except Exception:
return ""
def process_non_stream_response(response):
try:
full_content = ""
for line in response.text.split('\n'):
if line.startswith('data: '):
content = process_merlin_response(line)
if content and content != " ":
full_content += content
if not full_content:
return Response("Empty response from server", status=500)
return {
"id": str(uuid.uuid4()),
"object": "chat.completion",
"created": int(time.time()),
"model": "claude-3-haiku",
"choices": [{
"message": {
"role": "assistant",
"content": full_content
},
"finish_reason": "stop",
"index": 0
}]
}
except Exception as e:
print(f"Error processing response: {str(e)}")
return Response("Failed to process response", status=500)
@app.route('/', methods=['GET'])
def home():
return {"status": "GetMerlin2Api Service Running...", "message": "MoLoveSze..."}
@app.route('/v1/chat/completions', methods=['POST'])
def chat():
try:
auth_token = os.getenv('AUTH_TOKEN')
if auth_token:
request_token = request.headers.get('Authorization', '')
if request_token != f"Bearer {auth_token}":
return Response("Unauthorized", status=401)
token = get_token()
if not token:
return Response("Failed to get token", status=500)
openai_req = request.json
merlin_req = create_merlin_request(openai_req)
headers = {
"Content-Type": "application/json",
"Accept": "text/event-stream",
"Authorization": f"Bearer {token}",
"x-merlin-version": "web-merlin",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
"host": "arcane.getmerlin.in"
}
response = requests.post(
"https://arcane.getmerlin.in/v1/thread/unified",
json=merlin_req,
headers=headers,
stream=True
)
if response.status_code != 200:
return Response(f"Merlin API error: {response.text}", status=response.status_code)
if not openai_req.get('stream', False):
return process_non_stream_response(response)
def generate():
try:
buffer = ""
for chunk in response.iter_content(chunk_size=1024, decode_unicode=True):
if chunk:
buffer += chunk
while '\n' in buffer:
line, buffer = buffer.split('\n', 1)
if line.startswith('data: '):
try:
content = process_merlin_response(line)
if content:
openai_resp = {
"id": str(uuid.uuid4()),
"object": "chat.completion.chunk",
"created": int(time.time()),
"model": openai_req.get('model', 'claude-3-haiku'),
"choices": [{
"delta": {
"content": content
},
"index": 0,
"finish_reason": None
}]
}
yield f"data: {json.dumps(openai_resp, ensure_ascii=False)}\n\n"
except Exception as e:
print(f"Error processing chunk: {str(e)}")
continue
final_resp = {
"choices": [{
"delta": {"content": ""},
"index": 0,
"finish_reason": "stop"
}]
}
yield f"data: {json.dumps(final_resp)}\n\n"
yield "data: [DONE]\n\n"
except Exception as e:
print(f"Error in generate: {str(e)}")
return
return Response(
stream_with_context(generate()),
content_type='text/event-stream'
)
except Exception as e:
print(f"Error in chat endpoint: {str(e)}")
return Response(f"Internal server error: {str(e)}", status=500)
if __name__ == '__main__':
port = int(os.getenv('PORT', 7860)) # HF Spaces 默认端口
app.run(host='0.0.0.0', port=port) |