File size: 1,477 Bytes
326115c
a84a5a1
 
b0660fb
719ecfd
b0660fb
719ecfd
a84a5a1
 
b0660fb
e5c9ee0
b0660fb
 
326115c
b0660fb
 
326115c
c5fe4a2
 
8959fb9
3bd20e4
b0660fb
ae85d7f
 
a84a5a1
 
 
2640499
8959fb9
2640499
b0660fb
2640499
8959fb9
2640499
f623930
2640499
b0660fb
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import logging
import os
import sys

import gradio as gr
import uvicorn
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from spaces import GPU as SPACES_GPU

from lisa_on_cuda import routes
from lisa_on_cuda.utils import app_helpers, session_logger, utils

LOGLEVEL = os.getenv('LOGLEVEL', 'INFO').upper()
session_logger.change_logging(LOGLEVEL)

CUSTOM_GRADIO_PATH = "/"
app = FastAPI(title="lisa_app", version="1.0")
app.include_router(routes.router)


os.makedirs(utils.FASTAPI_STATIC, exist_ok=True)
app.mount("/static", StaticFiles(directory=utils.FASTAPI_STATIC), name="static")
templates = Jinja2Templates(directory="templates")


app_helpers.app_logger.info(f"sys.argv:{sys.argv}.")
args = app_helpers.parse_args([])
app_helpers.app_logger.info(f"prepared default arguments:{args}.")
inference_fn = app_helpers.get_inference_model_by_args(args, inference_decorator=SPACES_GPU)
app_helpers.app_logger.info(f"prepared inference_fn function:{inference_fn.__name__}, creating gradio interface...")
io = app_helpers.get_gradio_interface(inference_fn)
app_helpers.app_logger.info("created gradio interface")
app = gr.mount_gradio_app(app, io, path=CUSTOM_GRADIO_PATH)
app_helpers.app_logger.info("mounted gradio app within fastapi")


if __name__ == '__main__':
    try:
        uvicorn.run(app, host="0.0.0.0", port=7860)
    except Exception as ex:
        logging.error(f"ex_:{ex}.")
        raise ex