manbeast3b commited on
Commit
3432750
·
verified ·
1 Parent(s): 13cdb16

Update src/pipeline.py

Browse files
Files changed (1) hide show
  1. src/pipeline.py +5 -5
src/pipeline.py CHANGED
@@ -32,17 +32,17 @@ def load_pipeline(pipeline=None) -> StableDiffusionXLPipeline:
32
  pipeline = compile_pipe(pipeline)
33
  load_pipe(pipeline, dir="/home/sandbox/.cache/huggingface/hub/models--RobertML--cached-pipe-02/snapshots/58d70deae87034cce351b780b48841f9746d4ad7")
34
 
35
- instance = get_instance(device)
36
  # mul = torch.nn.Parameter(torch.tensor(0.3038, requires_grad=False, device=device))
37
  # sub = torch.nn.Parameter(torch.tensor(-0.3141, requires_grad=False, device=device))
38
  # scaling_factor = torch.nn.Parameter(torch.tensor(0.5439, requires_grad=False, device=device))
39
  # mul = torch.nn.Parameter(torch.tensor(0.2940097749233246, requires_grad=False, device=device))
40
  # sub = torch.nn.Parameter(torch.tensor(-0.31909096240997314, requires_grad=False, device=device))
41
  # scaling_factor = torch.nn.Parameter(torch.tensor(0.554410457611084, requires_grad=False, device=device))
42
- mul = torch.nn.Parameter(torch.tensor(1.2, requires_grad=False, device=device))
43
- sub = torch.nn.Parameter(torch.tensor(0.75, requires_grad=False, device=device))
44
- scaling_factor = torch.nn.Parameter(torch.tensor(pipeline.vae.config.scaling_factor, requires_grad=False, device=device))
45
- hook_pipe(pipeline, instance, mul, sub, scaling_factor)
46
 
47
  for _ in range(1):
48
  deepcache_output = pipeline(prompt="telestereography, unstrengthen, preadministrator, copatroness, hyperpersonal, paramountness, paranoid, guaniferous", output_type="pil", num_inference_steps=20)
 
32
  pipeline = compile_pipe(pipeline)
33
  load_pipe(pipeline, dir="/home/sandbox/.cache/huggingface/hub/models--RobertML--cached-pipe-02/snapshots/58d70deae87034cce351b780b48841f9746d4ad7")
34
 
35
+ # instance = get_instance(device)
36
  # mul = torch.nn.Parameter(torch.tensor(0.3038, requires_grad=False, device=device))
37
  # sub = torch.nn.Parameter(torch.tensor(-0.3141, requires_grad=False, device=device))
38
  # scaling_factor = torch.nn.Parameter(torch.tensor(0.5439, requires_grad=False, device=device))
39
  # mul = torch.nn.Parameter(torch.tensor(0.2940097749233246, requires_grad=False, device=device))
40
  # sub = torch.nn.Parameter(torch.tensor(-0.31909096240997314, requires_grad=False, device=device))
41
  # scaling_factor = torch.nn.Parameter(torch.tensor(0.554410457611084, requires_grad=False, device=device))
42
+ # mul = torch.nn.Parameter(torch.tensor(1.2, requires_grad=False, device=device))
43
+ # sub = torch.nn.Parameter(torch.tensor(0.75, requires_grad=False, device=device))
44
+ # scaling_factor = torch.nn.Parameter(torch.tensor(pipeline.vae.config.scaling_factor, requires_grad=False, device=device))
45
+ # hook_pipe(pipeline, instance, mul, sub, scaling_factor)
46
 
47
  for _ in range(1):
48
  deepcache_output = pipeline(prompt="telestereography, unstrengthen, preadministrator, copatroness, hyperpersonal, paramountness, paranoid, guaniferous", output_type="pil", num_inference_steps=20)