Update app.py
Browse files
app.py
CHANGED
@@ -1,33 +1,31 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
import os
|
3 |
from huggingface_hub import InferenceClient
|
4 |
from PIL import Image
|
|
|
|
|
5 |
|
6 |
# Load token from environment
|
7 |
token = os.environ["HF_TOKEN"]
|
8 |
|
9 |
-
#
|
10 |
client = InferenceClient(
|
11 |
model="artificialguybr/TshirtDesignRedmond-V2",
|
12 |
provider="fal-ai",
|
13 |
token=token,
|
14 |
)
|
15 |
|
16 |
-
# Trigger
|
17 |
trigger_word = "T shirt design, TshirtDesignAF, "
|
18 |
|
19 |
def generate_image(prompt):
|
20 |
full_prompt = f"{prompt} {trigger_word}"
|
21 |
-
print("Generating image with
|
22 |
-
|
23 |
-
#
|
24 |
image = client.text_to_image(
|
25 |
-
full_prompt,
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
"scheduler": "DPMSolverMultistepScheduler",
|
30 |
-
}
|
31 |
)
|
32 |
return image
|
33 |
|
|
|
|
|
|
|
1 |
from huggingface_hub import InferenceClient
|
2 |
from PIL import Image
|
3 |
+
import gradio as gr
|
4 |
+
import os
|
5 |
|
6 |
# Load token from environment
|
7 |
token = os.environ["HF_TOKEN"]
|
8 |
|
9 |
+
# Create the client
|
10 |
client = InferenceClient(
|
11 |
model="artificialguybr/TshirtDesignRedmond-V2",
|
12 |
provider="fal-ai",
|
13 |
token=token,
|
14 |
)
|
15 |
|
16 |
+
# Trigger word for model
|
17 |
trigger_word = "T shirt design, TshirtDesignAF, "
|
18 |
|
19 |
def generate_image(prompt):
|
20 |
full_prompt = f"{prompt} {trigger_word}"
|
21 |
+
print("Generating image with:", full_prompt)
|
22 |
+
|
23 |
+
# Call the HF inference client directly with correct parameters
|
24 |
image = client.text_to_image(
|
25 |
+
prompt=full_prompt,
|
26 |
+
negative_prompt="(worst quality, low quality, lowres, bad photo, ...)",
|
27 |
+
num_inference_steps=30,
|
28 |
+
guidance_scale=7.5,
|
|
|
|
|
29 |
)
|
30 |
return image
|
31 |
|