gaur3009 commited on
Commit
ebc700f
·
verified ·
1 Parent(s): 0d5240c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -13
app.py CHANGED
@@ -1,33 +1,31 @@
1
- import gradio as gr
2
- import os
3
  from huggingface_hub import InferenceClient
4
  from PIL import Image
 
 
5
 
6
  # Load token from environment
7
  token = os.environ["HF_TOKEN"]
8
 
9
- # Setup inference client
10
  client = InferenceClient(
11
  model="artificialguybr/TshirtDesignRedmond-V2",
12
  provider="fal-ai",
13
  token=token,
14
  )
15
 
16
- # Trigger words as per model instructions
17
  trigger_word = "T shirt design, TshirtDesignAF, "
18
 
19
  def generate_image(prompt):
20
  full_prompt = f"{prompt} {trigger_word}"
21
- print("Generating image with prompt:", full_prompt)
22
-
23
- # Generate image
24
  image = client.text_to_image(
25
- full_prompt,
26
- params={
27
- "negative_prompt": "(worst quality, low quality, normal quality, lowres, low details...)",
28
- "num_inference_steps": 30,
29
- "scheduler": "DPMSolverMultistepScheduler",
30
- }
31
  )
32
  return image
33
 
 
 
 
1
  from huggingface_hub import InferenceClient
2
  from PIL import Image
3
+ import gradio as gr
4
+ import os
5
 
6
  # Load token from environment
7
  token = os.environ["HF_TOKEN"]
8
 
9
+ # Create the client
10
  client = InferenceClient(
11
  model="artificialguybr/TshirtDesignRedmond-V2",
12
  provider="fal-ai",
13
  token=token,
14
  )
15
 
16
+ # Trigger word for model
17
  trigger_word = "T shirt design, TshirtDesignAF, "
18
 
19
  def generate_image(prompt):
20
  full_prompt = f"{prompt} {trigger_word}"
21
+ print("Generating image with:", full_prompt)
22
+
23
+ # Call the HF inference client directly with correct parameters
24
  image = client.text_to_image(
25
+ prompt=full_prompt,
26
+ negative_prompt="(worst quality, low quality, lowres, bad photo, ...)",
27
+ num_inference_steps=30,
28
+ guidance_scale=7.5,
 
 
29
  )
30
  return image
31