awacke1's picture
Update main.py
e40373e
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
import json
import requests
import base64
import os
from pydantic import BaseModel
from PIL import Image
from io import BytesIO
# Create fastapi service stack with a python cclass to generalize model interacctions with React
app = FastAPI()
token = os.environ.get("HF_TOKEN")
class Item(BaseModel):
prompt: str
steps: int
guidance: float
modelID: str
# FastAPI endpoint with api action
@app.post("/api")
async def inference(item: Item):
print("check")
if "dallinmackay" in item.modelID:
prompt = "lvngvncnt, " + item.prompt
if "nousr" in item.modelID:
prompt = "nousr robot, " + item.prompt
if "nitrosocke" in item.modelID:
prompt = "arcane, " + item.prompt
if "dreamlike" in item.modelID:
prompt = "photo, " + item.prompt
if "prompthero" in item.modelID:
prompt = "mdjrny-v4 style, " + item.prompt
data = {"inputs":prompt, "options":{"wait_for_model": True, "use_cache": False}}
API_URL = "https://api-inference.huggingface.co/models/" + item.modelID
headers = {"Authorization": f"Bearer " + token}
api_data = json.dumps(data)
response = requests.request("POST", API_URL, headers=headers, data=api_data)
image_stream = BytesIO(response.content)
image = Image.open(image_stream)
image.save("response.png")
with open('response.png', 'rb') as f:
base64image = base64.b64encode(f.read())
return {"output": base64image}
# URL top level - render doc out of web-build directory to kick it off
app.mount("/", StaticFiles(directory="web-build", html=True), name="build")
# Run that gauntlet
@app.get('/')
# Python function to get web page as File Response.
def homepage() -> FileResponse:
return FileResponse(path="/app/build/index.html", media_type="text/html")