luck210 commited on
Commit
d2d3314
·
verified ·
1 Parent(s): ad5ef88

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -5
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from fastapi import FastAPI, Form, Request
2
  from fastapi.responses import HTMLResponse, StreamingResponse
3
  import os
@@ -12,7 +13,7 @@ MODEL = "gpt-4o-mini"
12
  API_URL = os.getenv("API_URL")
13
  DISABLED = os.getenv("DISABLED") == 'True'
14
  OPENAI_API_KEYS = os.getenv("OPENAI_API_KEYS", "").split(",")
15
- NUM_THREADS = int(os.getenv("NUM_THREADS", 1)) # Default to 1 if not set
16
 
17
  # HTML with embedded CSS and JS
18
  HTML_CONTENT = """
@@ -63,7 +64,6 @@ HTML_CONTENT = """
63
  const submitBtn = document.getElementById("submit-btn");
64
  let history = JSON.parse(localStorage.getItem("chatHistory")) || [];
65
 
66
- // Load existing history
67
  history.forEach(msg => addMessage(msg.role, msg.content));
68
 
69
  form.addEventListener("submit", async (e) => {
@@ -120,14 +120,12 @@ HTML_CONTENT = """
120
  </html>
121
  """
122
 
123
- # Serve the chat interface
124
  @app.get("/", response_class=HTMLResponse)
125
  async def home():
126
  if DISABLED:
127
  return "<h1 style='color:red;text-align:center'>This app has reached OpenAI's usage limit. Please check back tomorrow.</h1>"
128
  return HTML_CONTENT
129
 
130
- # Handle chat input and stream response
131
  @app.post("/chat")
132
  async def chat(input: str = Form(...), top_p: float = Form(1.0), temperature: float = Form(1.0)):
133
  if DISABLED:
@@ -157,4 +155,15 @@ async def chat(input: str = Form(...), top_p: float = Form(1.0), temperature: fl
157
  for chunk in response.iter_lines():
158
  if chunk:
159
  chunk_data = chunk.decode('utf-8')
160
- if chunk_data.startswith(
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
  from fastapi import FastAPI, Form, Request
3
  from fastapi.responses import HTMLResponse, StreamingResponse
4
  import os
 
13
  API_URL = os.getenv("API_URL")
14
  DISABLED = os.getenv("DISABLED") == 'True'
15
  OPENAI_API_KEYS = os.getenv("OPENAI_API_KEYS", "").split(",")
16
+ NUM_THREADS = int(os.getenv("NUM_THREADS", 1))
17
 
18
  # HTML with embedded CSS and JS
19
  HTML_CONTENT = """
 
64
  const submitBtn = document.getElementById("submit-btn");
65
  let history = JSON.parse(localStorage.getItem("chatHistory")) || [];
66
 
 
67
  history.forEach(msg => addMessage(msg.role, msg.content));
68
 
69
  form.addEventListener("submit", async (e) => {
 
120
  </html>
121
  """
122
 
 
123
  @app.get("/", response_class=HTMLResponse)
124
  async def home():
125
  if DISABLED:
126
  return "<h1 style='color:red;text-align:center'>This app has reached OpenAI's usage limit. Please check back tomorrow.</h1>"
127
  return HTML_CONTENT
128
 
 
129
  @app.post("/chat")
130
  async def chat(input: str = Form(...), top_p: float = Form(1.0), temperature: float = Form(1.0)):
131
  if DISABLED:
 
155
  for chunk in response.iter_lines():
156
  if chunk:
157
  chunk_data = chunk.decode('utf-8')
158
+ if chunk_data.startswith("data: "):
159
+ chunk_json = json.loads(chunk_data[6:])
160
+ if "choices" in chunk_json and "delta" in chunk_json["choices"][0] and "content" in chunk_json["choices"][0]["delta"]:
161
+ yield chunk_json["choices"][0]["delta"]["content"]
162
+ except Exception as e:
163
+ yield f"Error: {str(e)}"
164
+
165
+ return StreamingResponse(stream_response(), media_type="text/plain")
166
+
167
+ if __name__ == "__main__":
168
+ import uvicorn
169
+ uvicorn.run(app, host="0.0.0.0", port=7860)