lisa-on-cuda / app.py
alessandro trinca tornidor
feat: zeroGPU spaces support (drop docker, uses gradio sdk)
b0660fb
raw
history blame
1.48 kB
import logging
import os
import sys
import gradio as gr
import uvicorn
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from spaces import GPU as SPACES_GPU
from lisa_on_cuda import routes
from lisa_on_cuda.utils import app_helpers, session_logger, utils
LOGLEVEL = os.getenv('LOGLEVEL', 'INFO').upper()
session_logger.change_logging(LOGLEVEL)
CUSTOM_GRADIO_PATH = "/"
app = FastAPI(title="lisa_app", version="1.0")
app.include_router(routes.router)
os.makedirs(utils.FASTAPI_STATIC, exist_ok=True)
app.mount("/static", StaticFiles(directory=utils.FASTAPI_STATIC), name="static")
templates = Jinja2Templates(directory="templates")
app_helpers.app_logger.info(f"sys.argv:{sys.argv}.")
args = app_helpers.parse_args([])
app_helpers.app_logger.info(f"prepared default arguments:{args}.")
inference_fn = app_helpers.get_inference_model_by_args(args, inference_decorator=SPACES_GPU)
app_helpers.app_logger.info(f"prepared inference_fn function:{inference_fn.__name__}, creating gradio interface...")
io = app_helpers.get_gradio_interface(inference_fn)
app_helpers.app_logger.info("created gradio interface")
app = gr.mount_gradio_app(app, io, path=CUSTOM_GRADIO_PATH)
app_helpers.app_logger.info("mounted gradio app within fastapi")
if __name__ == '__main__':
try:
uvicorn.run(app, host="0.0.0.0", port=7860)
except Exception as ex:
logging.error(f"ex_:{ex}.")
raise ex