Spaces:
Running
Running
File size: 1,932 Bytes
5b38336 d90d6a6 12cb4be 5b38336 d90d6a6 2011e87 d90d6a6 51214b8 d90d6a6 51214b8 d90d6a6 51214b8 d90d6a6 51214b8 d90d6a6 12cb4be d90d6a6 51214b8 d90d6a6 51214b8 d90d6a6 5b38336 d90d6a6 2011e87 d90d6a6 2011e87 5b38336 d90d6a6 51214b8 5b38336 51214b8 5b38336 d90d6a6 5b38336 567cfc7 51214b8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 |
from huggingface_hub import InferenceClient
import gradio_client
import io
import globales
import herramientas
def genera_platillo_gpu(platillo):
prompt = globales.previo + platillo
print("Eso es el prompt final:", prompt)
kwargs = {
"prompt": prompt,
"api_name": "/infer"
}
try:
client = gradio_client.Client(globales.espacio, hf_token=globales.llave)
result = client.predict(**kwargs
# prompt=prompt,
# negative_prompt="",
# seed=42,
# randomize_seed=True,
# width=1024,
# height=1024,
# guidance_scale=3.5,
# num_inference_steps=28,
# api_name="/infer"
)
#Cuando es GPU, debe de restar segundos disponibles de HF
herramientas.restaSegundosGPU(globales.work_cost)
return result[0]
except Exception as e:
print("Excepción es: ", e)
return "default.png"
def genera_platillo_inference(platillo):
client = InferenceClient(
provider= globales.proveedor,
api_key=globales.llave
)
prompt = globales.previo + platillo
try:
image = client.text_to_image(
prompt,
model=globales.inferencia,
#seed=42,
#guidance_scale=7.5,
#num_inference_steps=50,
#width=1024, #El default es 1024 x 1024 y quizá 1024*768, el max es 1536.
#height=1024 #El límite de replicate es 1024.
)
img_io = io.BytesIO()
image.save(img_io, "PNG")
img_io.seek(0)
return img_io
except Exception as e:
print("Excepción es: ", e)
with open("default.png", "rb") as default_image_file:
img_io_default = io.BytesIO(default_image_file.read())
img_io_default.seek(0)
return img_io_default |