awacke1 commited on
Commit
e40373e
·
1 Parent(s): b7926b3

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +5 -7
main.py CHANGED
@@ -9,17 +9,16 @@ from pydantic import BaseModel
9
  from PIL import Image
10
  from io import BytesIO
11
 
12
-
13
  app = FastAPI()
14
-
15
  token = os.environ.get("HF_TOKEN")
16
-
17
  class Item(BaseModel):
18
  prompt: str
19
  steps: int
20
  guidance: float
21
  modelID: str
22
 
 
23
  @app.post("/api")
24
  async def inference(item: Item):
25
  print("check")
@@ -35,22 +34,21 @@ async def inference(item: Item):
35
  prompt = "mdjrny-v4 style, " + item.prompt
36
  data = {"inputs":prompt, "options":{"wait_for_model": True, "use_cache": False}}
37
  API_URL = "https://api-inference.huggingface.co/models/" + item.modelID
38
-
39
  headers = {"Authorization": f"Bearer " + token}
40
  api_data = json.dumps(data)
41
  response = requests.request("POST", API_URL, headers=headers, data=api_data)
42
-
43
  image_stream = BytesIO(response.content)
44
  image = Image.open(image_stream)
45
  image.save("response.png")
46
  with open('response.png', 'rb') as f:
47
  base64image = base64.b64encode(f.read())
48
-
49
  return {"output": base64image}
50
 
 
51
  app.mount("/", StaticFiles(directory="web-build", html=True), name="build")
52
-
53
  @app.get('/')
 
54
  def homepage() -> FileResponse:
55
  return FileResponse(path="/app/build/index.html", media_type="text/html")
56
 
 
9
  from PIL import Image
10
  from io import BytesIO
11
 
12
+ # Create fastapi service stack with a python cclass to generalize model interacctions with React
13
  app = FastAPI()
 
14
  token = os.environ.get("HF_TOKEN")
 
15
  class Item(BaseModel):
16
  prompt: str
17
  steps: int
18
  guidance: float
19
  modelID: str
20
 
21
+ # FastAPI endpoint with api action
22
  @app.post("/api")
23
  async def inference(item: Item):
24
  print("check")
 
34
  prompt = "mdjrny-v4 style, " + item.prompt
35
  data = {"inputs":prompt, "options":{"wait_for_model": True, "use_cache": False}}
36
  API_URL = "https://api-inference.huggingface.co/models/" + item.modelID
 
37
  headers = {"Authorization": f"Bearer " + token}
38
  api_data = json.dumps(data)
39
  response = requests.request("POST", API_URL, headers=headers, data=api_data)
 
40
  image_stream = BytesIO(response.content)
41
  image = Image.open(image_stream)
42
  image.save("response.png")
43
  with open('response.png', 'rb') as f:
44
  base64image = base64.b64encode(f.read())
 
45
  return {"output": base64image}
46
 
47
+ # URL top level - render doc out of web-build directory to kick it off
48
  app.mount("/", StaticFiles(directory="web-build", html=True), name="build")
49
+ # Run that gauntlet
50
  @app.get('/')
51
+ # Python function to get web page as File Response.
52
  def homepage() -> FileResponse:
53
  return FileResponse(path="/app/build/index.html", media_type="text/html")
54