lionelgarnier commited on
Commit
b0c8c02
·
1 Parent(s): 98c7793

bugfix cursor

Browse files
Files changed (1) hide show
  1. app.py +13 -7
app.py CHANGED
@@ -18,21 +18,27 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
18
  MAX_SEED = np.iinfo(np.int32).max
19
  MAX_IMAGE_SIZE = 2048
20
 
21
- try:
22
- text_gen_pipeline = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.3", max_new_tokens=2048, device=device)
23
- except Exception as e:
24
- text_gen_pipeline = None
25
- print(f"Error loading text generation model: {e}")
 
 
 
 
 
26
 
27
  def refine_prompt(prompt):
28
- if text_gen_pipeline is None:
 
29
  return "Text generation model is unavailable."
30
  try:
31
  messages = [
32
  {"role": "system", "content": "You are a product designer. You will get a basic prompt of product request and you need to imagine a new product design to satisfy that need. Produce an extended description of product front view that will be used by Flux to generate a visual"},
33
  {"role": "user", "content": prompt},
34
  ]
35
- refined_prompt = text_gen_pipeline(messages)
36
  return refined_prompt
37
  except Exception as e:
38
  return f"Error refining prompt: {str(e)}"
 
18
  MAX_SEED = np.iinfo(np.int32).max
19
  MAX_IMAGE_SIZE = 2048
20
 
21
+ _text_gen_pipeline = None
22
+ def get_text_gen_pipeline():
23
+ global _text_gen_pipeline
24
+ if _text_gen_pipeline is None:
25
+ try:
26
+ _text_gen_pipeline = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.3", max_new_tokens=2048, device=device)
27
+ except Exception as e:
28
+ print(f"Error loading text generation model: {e}")
29
+ return None
30
+ return _text_gen_pipeline
31
 
32
  def refine_prompt(prompt):
33
+ text_gen = get_text_gen_pipeline()
34
+ if text_gen is None:
35
  return "Text generation model is unavailable."
36
  try:
37
  messages = [
38
  {"role": "system", "content": "You are a product designer. You will get a basic prompt of product request and you need to imagine a new product design to satisfy that need. Produce an extended description of product front view that will be used by Flux to generate a visual"},
39
  {"role": "user", "content": prompt},
40
  ]
41
+ refined_prompt = text_gen(messages)
42
  return refined_prompt
43
  except Exception as e:
44
  return f"Error refining prompt: {str(e)}"