|
from fastapi import FastAPI, Form, Request |
|
from fastapi.responses import HTMLResponse, StreamingResponse |
|
import os |
|
import json |
|
import requests |
|
import random |
|
|
|
app = FastAPI() |
|
|
|
|
|
MODEL = "gpt-4o-mini" |
|
API_URL = os.getenv("API_URL", "https://api.openai.com/v1/chat/completions") |
|
DISABLED = os.getenv("DISABLED") == 'True' |
|
OPENAI_API_KEYS = os.getenv("OPENAI_API_KEYS", "").split(",") |
|
NUM_THREADS = int(os.getenv("NUM_THREADS", 1)) |
|
|
|
|
|
HTML_CONTENT = """ |
|
<!DOCTYPE html> |
|
<html lang="en"> |
|
<head> |
|
<meta charset="UTF-8"> |
|
<meta name="viewport" content="width=device-width, initial-scale=1.0"> |
|
<title>GPT-4o Mini Chat</title> |
|
<style> |
|
body { font-family: Arial, sans-serif; background-color: #f4f4f4; margin: 0; padding: 20px; } |
|
.container { max-width: 800px; margin: auto; background: white; padding: 20px; border-radius: 8px; box-shadow: 0 0 10px rgba(0, 0, 0, 0.1); } |
|
h1 { color: #333; text-align: center; } |
|
.chatbox { height: 400px; overflow-y: auto; border: 1px solid #ccc; padding: 10px; margin-bottom: 20px; } |
|
.message { margin: 10px 0; padding: 8px; border-radius: 5px; } |
|
.user { background: #e6f3ff; } |
|
.assistant { background: #f0f0f0; } |
|
form { display: flex; flex-direction: column; gap: 10px; } |
|
input, select, button { padding: 8px; font-size: 16px; } |
|
button { background-color: #007bff; color: white; border: none; border-radius: 5px; cursor: pointer; } |
|
button:hover { background-color: #0056b3; } |
|
button:disabled { background-color: #ccc; cursor: not-allowed; } |
|
</style> |
|
</head> |
|
<body> |
|
<div class="container"> |
|
<h1>GPT-4o Mini: Research Preview</h1> |
|
<div id="chatbox" class="chatbox"></div> |
|
<form id="chat-form" action="/chat" method="post"> |
|
<input type="text" id="input" name="input" placeholder="Type your message..." required> |
|
<select name="top_p"> |
|
<option value="1.0">Top P: 1.0</option> |
|
<option value="0.9">Top P: 0.9</option> |
|
<option value="0.8">Top P: 0.8</option> |
|
</select> |
|
<select name="temperature"> |
|
<option value="1.0">Temperature: 1.0</option> |
|
<option value="0.7">Temperature: 0.7</option> |
|
<option value="0.3">Temperature: 0.3</option> |
|
</select> |
|
<button type="submit" id="submit-btn">Send</button> |
|
</form> |
|
</div> |
|
<script> |
|
const chatbox = document.getElementById("chatbox"); |
|
const form = document.getElementById("chat-form"); |
|
const input = document.getElementById("input"); |
|
const submitBtn = document.getElementById("submit-btn"); |
|
let history = JSON.parse(localStorage.getItem("chatHistory")) || []; |
|
|
|
history.forEach(msg => addMessage(msg.role, msg.content)); |
|
|
|
form.addEventListener("submit", async (e) => { |
|
e.preventDefault(); |
|
const userInput = input.value; |
|
const topP = form.top_p.value; |
|
const temperature = form.temperature.value; |
|
|
|
addMessage("user", userInput); |
|
input.value = ""; |
|
submitBtn.disabled = true; |
|
|
|
const response = await fetch("/chat", { |
|
method: "POST", |
|
headers: { "Content-Type": "application/x-www-form-urlencoded" }, |
|
body: `input=${encodeURIComponent(userInput)}&top_p=${topP}&temperature=${temperature}` |
|
}); |
|
|
|
const reader = response.body.getReader(); |
|
let assistantMessage = ""; |
|
const decoder = new TextDecoder(); |
|
|
|
while (true) { |
|
const { done, value } = await reader.read(); |
|
if (done) break; |
|
assistantMessage += decoder.decode(value); |
|
updateLastMessage("assistant", assistantMessage); |
|
} |
|
|
|
history.push({ role: "user", content: userInput }, { role: "assistant", content: assistantMessage }); |
|
localStorage.setItem("chatHistory", JSON.stringify(history)); |
|
submitBtn.disabled = false; |
|
}); |
|
|
|
function addMessage(role, content) { |
|
const div = document.createElement("div"); |
|
div.className = `message ${role}`; |
|
div.textContent = content; |
|
chatbox.appendChild(div); |
|
chatbox.scrollTop = chatbox.scrollHeight; |
|
} |
|
|
|
function updateLastMessage(role, content) { |
|
const lastMsg = chatbox.lastElementChild; |
|
if (lastMsg && lastMsg.className.includes(role)) { |
|
lastMsg.textContent = content; |
|
} else { |
|
addMessage(role, content); |
|
} |
|
chatbox.scrollTop = chatbox.scrollHeight; |
|
} |
|
</script> |
|
</body> |
|
</html> |
|
""" |
|
|
|
@app.get("/", response_class=HTMLResponse) |
|
async def home(): |
|
if DISABLED: |
|
return "<h1 style='color:red;text-align:center'>This app has reached OpenAI's usage limit. Please check back tomorrow.</h1>" |
|
return HTML_CONTENT |
|
|
|
@app.post("/chat") |
|
async def chat(input: str = Form(...), top_p: float = Form(1.0), temperature: float = Form(1.0)): |
|
if DISABLED: |
|
return StreamingResponse(iter(["Usage limit reached."]), media_type="text/plain") |
|
|
|
if not API_URL: |
|
return StreamingResponse(iter(["Error: API_URL is not set in the environment."]), media_type="text/plain") |
|
|
|
if not OPENAI_API_KEYS or OPENAI_API_KEYS == [""]: |
|
return StreamingResponse(iter(["Error: No valid OPENAI_API_KEYS provided."]), media_type="text/plain") |
|
|
|
payload = { |
|
"model": MODEL, |
|
"messages": [{"role": "user", "content": input}], |
|
"temperature": temperature, |
|
"top_p": top_p, |
|
"n": 1, |
|
"stream": True, |
|
"presence_penalty": 0, |
|
"frequency_penalty": 0, |
|
} |
|
|
|
OPENAI_API_KEY = random.choice(OPENAI_API_KEYS) |
|
headers = { |
|
"Content-Type": "application/json", |
|
"Authorization": f"Bearer {OPENAI_API_KEY}", |
|
} |
|
|
|
def stream_response(): |
|
try: |
|
response = requests.post(API_URL, headers=headers, json=payload, stream=True) |
|
response.raise_for_status() |
|
for chunk in response.iter_lines(): |
|
if chunk: |
|
chunk_data = chunk.decode('utf-8') |
|
if chunk_data.startswith("data: "): |
|
chunk_json = json.loads(chunk_data[6:]) |
|
if "choices" in chunk_json and "delta" in chunk_json["choices"][0] and "content" in chunk_json["choices"][0]["delta"]: |
|
yield chunk_json["choices"][0]["delta"]["content"] |
|
except requests.exceptions.MissingSchema: |
|
yield "Error: Invalid API_URL. Please provide a valid URL (e.g., https://api.openai.com/v1/chat/completions)." |
|
except Exception as e: |
|
yield f"Error: {str(e)}" |
|
|
|
return StreamingResponse(stream_response(), media_type="text/plain") |
|
|
|
if __name__ == "__main__": |
|
import uvicorn |
|
uvicorn.run(app, host="0.0.0.0", port=7860) |