Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -13,32 +13,11 @@ import subprocess
|
|
| 13 |
import shutil
|
| 14 |
import base64
|
| 15 |
import logging
|
| 16 |
-
from fastapi import FastAPI
|
| 17 |
-
from fastapi.responses import JSONResponse
|
| 18 |
-
from fastapi.middleware.wsgi import WSGIMiddleware
|
| 19 |
-
import uvicorn
|
| 20 |
-
|
| 21 |
-
# Job status dictionary
|
| 22 |
-
generation_status = {}
|
| 23 |
-
|
| 24 |
|
| 25 |
# Set up logging
|
| 26 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 27 |
logger = logging.getLogger(__name__)
|
| 28 |
|
| 29 |
-
# FastAPI app to wrap both Gradio and API routes
|
| 30 |
-
api_app = FastAPI()
|
| 31 |
-
|
| 32 |
-
@api_app.get("/generation_status/{job_id}")
|
| 33 |
-
async def check_generation_status(job_id: str):
|
| 34 |
-
if job_id in generation_status:
|
| 35 |
-
return JSONResponse(content=generation_status[job_id])
|
| 36 |
-
else:
|
| 37 |
-
return JSONResponse(content={"status": "not_found"}, status_code=404)
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
# Install additional dependencies
|
| 43 |
try:
|
| 44 |
subprocess.run("pip install spandrel==0.4.1 --no-deps", shell=True, check=True)
|
|
@@ -405,14 +384,6 @@ def image_to_3d(
|
|
| 405 |
mesh_path = os.path.join(save_dir, f"polygenixai_{get_random_hex()}.glb")
|
| 406 |
mesh.export(mesh_path)
|
| 407 |
logger.info(f"Saved mesh to {mesh_path}")
|
| 408 |
-
|
| 409 |
-
# Track job status
|
| 410 |
-
job_id = req.session_hash
|
| 411 |
-
generation_status[job_id] = {
|
| 412 |
-
"status": "ready",
|
| 413 |
-
"mesh_path": mesh_path
|
| 414 |
-
}
|
| 415 |
-
|
| 416 |
|
| 417 |
torch.cuda.empty_cache()
|
| 418 |
return mesh_path
|
|
@@ -420,8 +391,6 @@ def image_to_3d(
|
|
| 420 |
logger.error(f"Error in image_to_3d: {str(e)}")
|
| 421 |
raise
|
| 422 |
|
| 423 |
-
|
| 424 |
-
|
| 425 |
@spaces.GPU(duration=5)
|
| 426 |
@torch.no_grad()
|
| 427 |
def run_texture(image: Image, mesh_path: str, seed: int, req: gr.Request):
|
|
@@ -756,9 +725,6 @@ except Exception as e:
|
|
| 756 |
logger.error(f"Failed to initialize Gradio Blocks interface: {str(e)}")
|
| 757 |
raise
|
| 758 |
|
| 759 |
-
# Mount the Gradio Blocks demo interface as WSGI
|
| 760 |
-
api_app.mount("/", WSGIMiddleware(demo))
|
| 761 |
-
|
| 762 |
if __name__ == "__main__":
|
| 763 |
try:
|
| 764 |
logger.info("Launching Gradio application")
|
|
|
|
| 13 |
import shutil
|
| 14 |
import base64
|
| 15 |
import logging
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
# Set up logging
|
| 18 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 19 |
logger = logging.getLogger(__name__)
|
| 20 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
# Install additional dependencies
|
| 22 |
try:
|
| 23 |
subprocess.run("pip install spandrel==0.4.1 --no-deps", shell=True, check=True)
|
|
|
|
| 384 |
mesh_path = os.path.join(save_dir, f"polygenixai_{get_random_hex()}.glb")
|
| 385 |
mesh.export(mesh_path)
|
| 386 |
logger.info(f"Saved mesh to {mesh_path}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 387 |
|
| 388 |
torch.cuda.empty_cache()
|
| 389 |
return mesh_path
|
|
|
|
| 391 |
logger.error(f"Error in image_to_3d: {str(e)}")
|
| 392 |
raise
|
| 393 |
|
|
|
|
|
|
|
| 394 |
@spaces.GPU(duration=5)
|
| 395 |
@torch.no_grad()
|
| 396 |
def run_texture(image: Image, mesh_path: str, seed: int, req: gr.Request):
|
|
|
|
| 725 |
logger.error(f"Failed to initialize Gradio Blocks interface: {str(e)}")
|
| 726 |
raise
|
| 727 |
|
|
|
|
|
|
|
|
|
|
| 728 |
if __name__ == "__main__":
|
| 729 |
try:
|
| 730 |
logger.info("Launching Gradio application")
|