File size: 1,223 Bytes
424839d
c9fdad1
 
 
 
 
d0ff1b1
50bfe7a
c9fdad1
50bfe7a
c9fdad1
 
1fc5ea4
 
 
c9fdad1
 
 
 
 
 
 
 
 
 
789ecb4
50bfe7a
c9fdad1
 
 
 
 
 
 
 
 
 
 
50bfe7a
 
c9fdad1
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import os
import io
import torch
from fastapi import FastAPI, HTTPException
from fastapi.responses import StreamingResponse
from diffusers import FluxPipeline  # type: ignore
from huggingface_hub import login

app = FastAPI()

# Récupération du token et authentification
token = os.getenv("HF_TOKEN")
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
pipe.enable_model_cpu_offload()

def generate_image(prompt: str):
    image = pipe(
        prompt,
        height=1024,
        width=1024,
        guidance_scale=3.5,
        num_inference_steps=50,
        max_sequence_length=512,
        generator=torch.Generator("cpu").manual_seed(0)
    ).images[0]
    return image

@app.get("/generate")
def generate(prompt: str):
    try:
        image = generate_image(prompt)
        # On sauvegarde l'image dans un buffer en mémoire
        buf = io.BytesIO()
        image.save(buf, format="PNG")
        buf.seek(0)
        return StreamingResponse(buf, media_type="image/png")
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

if __name__ == "__main__":
    import uvicorn # type: ignore
    uvicorn.run(app, host="0.0.0.0", port=7860)