File size: 7,154 Bytes
1ba587d fc96b33 1ba587d aef06f1 47eab6b 1ba587d 47eab6b 1ba587d d2d3314 1ba587d 47eab6b 1ba587d d2d3314 47eab6b d2d3314 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 |
from fastapi import FastAPI, Form, Request
from fastapi.responses import HTMLResponse, StreamingResponse
import os
import json
import requests
import random
app = FastAPI()
# Load environment variables with defaults
MODEL = "gpt-4o-mini"
API_URL = os.getenv("API_URL", "https://api.openai.com/v1/chat/completions") # Default to OpenAI API
DISABLED = os.getenv("DISABLED") == 'True'
OPENAI_API_KEYS = os.getenv("OPENAI_API_KEYS", "").split(",")
NUM_THREADS = int(os.getenv("NUM_THREADS", 1))
# HTML with embedded CSS and JS
HTML_CONTENT = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>GPT-4o Mini Chat</title>
<style>
body { font-family: Arial, sans-serif; background-color: #f4f4f4; margin: 0; padding: 20px; }
.container { max-width: 800px; margin: auto; background: white; padding: 20px; border-radius: 8px; box-shadow: 0 0 10px rgba(0, 0, 0, 0.1); }
h1 { color: #333; text-align: center; }
.chatbox { height: 400px; overflow-y: auto; border: 1px solid #ccc; padding: 10px; margin-bottom: 20px; }
.message { margin: 10px 0; padding: 8px; border-radius: 5px; }
.user { background: #e6f3ff; }
.assistant { background: #f0f0f0; }
form { display: flex; flex-direction: column; gap: 10px; }
input, select, button { padding: 8px; font-size: 16px; }
button { background-color: #007bff; color: white; border: none; border-radius: 5px; cursor: pointer; }
button:hover { background-color: #0056b3; }
button:disabled { background-color: #ccc; cursor: not-allowed; }
</style>
</head>
<body>
<div class="container">
<h1>GPT-4o Mini: Research Preview</h1>
<div id="chatbox" class="chatbox"></div>
<form id="chat-form" action="/chat" method="post">
<input type="text" id="input" name="input" placeholder="Type your message..." required>
<select name="top_p">
<option value="1.0">Top P: 1.0</option>
<option value="0.9">Top P: 0.9</option>
<option value="0.8">Top P: 0.8</option>
</select>
<select name="temperature">
<option value="1.0">Temperature: 1.0</option>
<option value="0.7">Temperature: 0.7</option>
<option value="0.3">Temperature: 0.3</option>
</select>
<button type="submit" id="submit-btn">Send</button>
</form>
</div>
<script>
const chatbox = document.getElementById("chatbox");
const form = document.getElementById("chat-form");
const input = document.getElementById("input");
const submitBtn = document.getElementById("submit-btn");
let history = JSON.parse(localStorage.getItem("chatHistory")) || [];
history.forEach(msg => addMessage(msg.role, msg.content));
form.addEventListener("submit", async (e) => {
e.preventDefault();
const userInput = input.value;
const topP = form.top_p.value;
const temperature = form.temperature.value;
addMessage("user", userInput);
input.value = "";
submitBtn.disabled = true;
const response = await fetch("/chat", {
method: "POST",
headers: { "Content-Type": "application/x-www-form-urlencoded" },
body: `input=${encodeURIComponent(userInput)}&top_p=${topP}&temperature=${temperature}`
});
const reader = response.body.getReader();
let assistantMessage = "";
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
assistantMessage += decoder.decode(value);
updateLastMessage("assistant", assistantMessage);
}
history.push({ role: "user", content: userInput }, { role: "assistant", content: assistantMessage });
localStorage.setItem("chatHistory", JSON.stringify(history));
submitBtn.disabled = false;
});
function addMessage(role, content) {
const div = document.createElement("div");
div.className = `message ${role}`;
div.textContent = content;
chatbox.appendChild(div);
chatbox.scrollTop = chatbox.scrollHeight;
}
function updateLastMessage(role, content) {
const lastMsg = chatbox.lastElementChild;
if (lastMsg && lastMsg.className.includes(role)) {
lastMsg.textContent = content;
} else {
addMessage(role, content);
}
chatbox.scrollTop = chatbox.scrollHeight;
}
</script>
</body>
</html>
"""
@app.get("/", response_class=HTMLResponse)
async def home():
if DISABLED:
return "<h1 style='color:red;text-align:center'>This app has reached OpenAI's usage limit. Please check back tomorrow.</h1>"
return HTML_CONTENT
@app.post("/chat")
async def chat(input: str = Form(...), top_p: float = Form(1.0), temperature: float = Form(1.0)):
if DISABLED:
return StreamingResponse(iter(["Usage limit reached."]), media_type="text/plain")
if not API_URL:
return StreamingResponse(iter(["Error: API_URL is not set in the environment."]), media_type="text/plain")
if not OPENAI_API_KEYS or OPENAI_API_KEYS == [""]:
return StreamingResponse(iter(["Error: No valid OPENAI_API_KEYS provided."]), media_type="text/plain")
payload = {
"model": MODEL,
"messages": [{"role": "user", "content": input}],
"temperature": temperature,
"top_p": top_p,
"n": 1,
"stream": True,
"presence_penalty": 0,
"frequency_penalty": 0,
}
OPENAI_API_KEY = random.choice(OPENAI_API_KEYS)
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENAI_API_KEY}",
}
def stream_response():
try:
response = requests.post(API_URL, headers=headers, json=payload, stream=True)
response.raise_for_status()
for chunk in response.iter_lines():
if chunk:
chunk_data = chunk.decode('utf-8')
if chunk_data.startswith("data: "):
chunk_json = json.loads(chunk_data[6:])
if "choices" in chunk_json and "delta" in chunk_json["choices"][0] and "content" in chunk_json["choices"][0]["delta"]:
yield chunk_json["choices"][0]["delta"]["content"]
except requests.exceptions.MissingSchema:
yield "Error: Invalid API_URL. Please provide a valid URL (e.g., https://api.openai.com/v1/chat/completions)."
except Exception as e:
yield f"Error: {str(e)}"
return StreamingResponse(stream_response(), media_type="text/plain")
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860) |