snackshell commited on
Commit
726ab04
·
verified ·
1 Parent(s): efe67f8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -1
app.py CHANGED
@@ -1,8 +1,13 @@
 
 
1
  from fastapi import FastAPI, HTTPException, Depends, Header, Request
2
  from fastapi.responses import StreamingResponse
3
  from pydantic import BaseModel
4
  from typing import List
 
 
5
  from g4f import ChatCompletion
 
6
 
7
  app = FastAPI()
8
 
@@ -11,9 +16,37 @@ models = [
11
  "gpt-4o", "gpt-4o-mini", "gpt-4",
12
  "gpt-4-turbo", "gpt-3.5-turbo",
13
  "claude-3.7-sonnet", "o3-mini", "o1", "grok-3", "gemini-2.5-pro-exp-03-25", "claude-3.5",
14
- "llama-3.1-405b"
15
  ]
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  # Request model
18
  class Message(BaseModel):
19
  role: str
 
1
+ import g4f.api
2
+ import g4f.Provider
3
  from fastapi import FastAPI, HTTPException, Depends, Header, Request
4
  from fastapi.responses import StreamingResponse
5
  from pydantic import BaseModel
6
  from typing import List
7
+ from __future__ import annotations
8
+ from g4f.typing import Messages, AsyncResult
9
  from g4f import ChatCompletion
10
+ from g4f.Provider import BackendApi
11
 
12
  app = FastAPI()
13
 
 
16
  "gpt-4o", "gpt-4o-mini", "gpt-4",
17
  "gpt-4-turbo", "gpt-3.5-turbo",
18
  "claude-3.7-sonnet", "o3-mini", "o1", "grok-3", "gemini-2.5-pro-exp-03-25", "claude-3.5",
19
+ "llama-3.1-405b", "deepseek-r1", *g4f.Provider.OpenaiAccount.get_models(), "flux", "flux-pro"
20
  ]
21
 
22
+ url = "https://ahe.hopto.org"
23
+ headers = {"Authorization": "Basic Z2dnOmc0Zl8="}
24
+
25
+ BackendApi.working = True
26
+ BackendApi.ssl = False
27
+ BackendApi.url = url
28
+ BackendApi.headers = headers
29
+
30
+ class BackendApi(BackendApi):
31
+ working = True
32
+ ssl = False
33
+ url = url
34
+ headers = headers
35
+ image_models = ["flux", "flux-pro"]
36
+ models = ["deepseek-r1", *g4f.Provider.OpenaiAccount.get_models(), "flux", "flux-pro"]
37
+
38
+ @classmethod
39
+ async def create_async_generator(
40
+ cls,
41
+ model: str,
42
+ messages: Messages,
43
+ **kwargs
44
+ ) -> AsyncResult:
45
+ if model in g4f.Provider.OpenaiAccount.get_models():
46
+ kwargs["provider"] = "OpenaiAccount"
47
+ async for chunk in super().create_async_generator(model, messages, **kwargs):
48
+ yield chunk
49
+
50
  # Request model
51
  class Message(BaseModel):
52
  role: str