yaron123 commited on
Commit
efb73d5
·
1 Parent(s): df91595
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -411,7 +411,6 @@ CHECKPOINTS = ESRGANUpscalerCheckpoints(
411
  }
412
  )
413
 
414
- # initialize the enhancer, on the cpu
415
  device = DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
416
  DTYPE = torch.bfloat16 if torch.cuda.is_bf16_supported() else torch.float32
417
  enhancer = ESRGANUpscaler(checkpoints=CHECKPOINTS, device=DEVICE, dtype=DTYPE)
@@ -505,6 +504,9 @@ function custom(){
505
  image_pipe = FluxPipeline.from_pretrained(base, torch_dtype=torch.bfloat16).to(device)
506
  #image_pipe.enable_model_cpu_offload()
507
 
 
 
 
508
  # functionality
509
 
510
  @spaces.GPU(duration=180)
@@ -513,7 +515,7 @@ def upscaler(
513
  prompt: str = "masterpiece, best quality, highres",
514
  negative_prompt: str = "worst quality, low quality, normal quality",
515
  seed: int = int(str(random.random()).split(".")[1]),
516
- upscale_factor: int = 8,
517
  controlnet_scale: float = 0.6,
518
  controlnet_decay: float = 1.0,
519
  condition_scale: int = 6,
@@ -553,9 +555,7 @@ def upscaler(
553
 
554
  @spaces.GPU(duration=180)
555
  def summarize_text(
556
- text, max_length=30, num_beams=16, early_stopping=True,
557
- pegasus_tokenizer = PegasusTokenizerFast.from_pretrained("google/pegasus-xsum"),
558
- pegasus_model = PegasusForConditionalGeneration.from_pretrained("google/pegasus-xsum")
559
  ):
560
  log(f'CALL summarize_text')
561
  summary = pegasus_tokenizer.decode( pegasus_model.generate(
 
411
  }
412
  )
413
 
 
414
  device = DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
415
  DTYPE = torch.bfloat16 if torch.cuda.is_bf16_supported() else torch.float32
416
  enhancer = ESRGANUpscaler(checkpoints=CHECKPOINTS, device=DEVICE, dtype=DTYPE)
 
504
  image_pipe = FluxPipeline.from_pretrained(base, torch_dtype=torch.bfloat16).to(device)
505
  #image_pipe.enable_model_cpu_offload()
506
 
507
+ pegasus_tokenizer = PegasusTokenizerFast.from_pretrained("google/pegasus-xsum"),
508
+ pegasus_model = PegasusForConditionalGeneration.from_pretrained("google/pegasus-xsum")
509
+
510
  # functionality
511
 
512
  @spaces.GPU(duration=180)
 
515
  prompt: str = "masterpiece, best quality, highres",
516
  negative_prompt: str = "worst quality, low quality, normal quality",
517
  seed: int = int(str(random.random()).split(".")[1]),
518
+ upscale_factor: int = 4,
519
  controlnet_scale: float = 0.6,
520
  controlnet_decay: float = 1.0,
521
  condition_scale: int = 6,
 
555
 
556
  @spaces.GPU(duration=180)
557
  def summarize_text(
558
+ text, max_length=30, num_beams=16, early_stopping=True
 
 
559
  ):
560
  log(f'CALL summarize_text')
561
  summary = pegasus_tokenizer.decode( pegasus_model.generate(