jitubutwal1441 commited on
Commit
45652a5
·
verified ·
1 Parent(s): 4bbdf20

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -24
app.py CHANGED
@@ -1,28 +1,13 @@
1
- from fastapi import FastAPI, File, UploadFile, Form
2
- from fastapi.responses import StreamingResponse
3
  from diffusers import StableDiffusionPipeline
4
- import torch, io
5
- from PIL import Image
6
 
7
- app = FastAPI()
8
- pipe = StableDiffusionPipeline.from_pretrained(
9
- "nitrosocke/Ghibli-Diffusion",
10
- torch_dtype=torch.float16,
11
- safety_checker=None # Optional: disable safety checker
12
- ).to("cuda")
13
 
14
- # Health check endpoint
15
- @app.get("/health")
16
- async def health_check():
17
- return {"status": "ok", "message": "FastAPI server is running!"}
18
 
19
- @app.post("/ghibli")
20
- async def generate(
21
- file: UploadFile = File(...), prompt: str = Form(...)
22
- ):
23
- image = Image.open(io.BytesIO(await file.read())).convert("RGB")
24
- result = pipe(prompt=prompt, image=image, guidance_scale=7.5).images[0]
25
- buf = io.BytesIO()
26
- result.save(buf, format="PNG")
27
- buf.seek(0)
28
- return StreamingResponse(buf, media_type="image/png")
 
1
+ import gradio as gr
2
+ import torch
3
  from diffusers import StableDiffusionPipeline
 
 
4
 
5
+ model_id = "runwayml/stable-diffusion-v1-5"
6
+ pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16)
7
+ pipe = pipe.to("cuda")
 
 
 
8
 
9
+ def generate(prompt):
10
+ image = pipe(prompt).images[0]
11
+ return image
 
12
 
13
+ gr.Interface(fn=generate, inputs="text", outputs="image").launch()