Yaron Koresh commited on
Commit
239a40a
·
verified ·
1 Parent(s): 49bbe75

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -5
app.py CHANGED
@@ -44,10 +44,6 @@ formatter = logging.Formatter('\n >>> [%(levelname)s] %(asctime)s %(name)s: %(me
44
  handler2.setFormatter(formatter)
45
  root.addHandler(handler2)
46
 
47
- # storage data
48
-
49
- last_motion=""
50
-
51
  # constant data
52
 
53
  dtype = torch.float16
@@ -60,6 +56,11 @@ vae = AutoencoderKL.from_pretrained("stabilityai/sd-vae-ft-mse").to(device, dtyp
60
  #unet = UNet2DConditionModel.from_config("emilianJR/epiCRealism",subfolder="unet").to(device, dtype).load_state_dict(load_file(hf_hub_download("emilianJR/epiCRealism", "unet/diffusion_pytorch_model.safetensors"), device=device), strict=False)
61
  adapter = MotionAdapter.from_pretrained("guoyww/animatediff-motion-adapter-v1-5-3", torch_dtype=dtype, device=device)
62
 
 
 
 
 
 
63
  # precision data
64
 
65
  fast=True
@@ -376,6 +377,7 @@ def handle(*inp):
376
  return out_pipe
377
 
378
  def ui():
 
379
  with gr.Blocks(theme=gr.themes.Soft(),css=css,js=js) as demo:
380
  with gr.Column(elem_id="col-container"):
381
  gr.Markdown(f"""
@@ -419,7 +421,6 @@ def ui():
419
  with gr.Row():
420
  run_button = gr.Button("START",elem_classes="btn",scale=0)
421
  with gr.Row():
422
- result = []
423
  result.append(gr.Image(interactive=False,elem_classes="image-container", label="Result", show_label=False, type='filepath', show_share_button=False))
424
  result.append(gr.Image(interactive=False,elem_classes="image-container", label="Result", show_label=False, type='filepath', show_share_button=False))
425
  gr.on(
 
44
  handler2.setFormatter(formatter)
45
  root.addHandler(handler2)
46
 
 
 
 
 
47
  # constant data
48
 
49
  dtype = torch.float16
 
56
  #unet = UNet2DConditionModel.from_config("emilianJR/epiCRealism",subfolder="unet").to(device, dtype).load_state_dict(load_file(hf_hub_download("emilianJR/epiCRealism", "unet/diffusion_pytorch_model.safetensors"), device=device), strict=False)
57
  adapter = MotionAdapter.from_pretrained("guoyww/animatediff-motion-adapter-v1-5-3", torch_dtype=dtype, device=device)
58
 
59
+ # variable data
60
+
61
+ last_motion=""
62
+ result = []
63
+
64
  # precision data
65
 
66
  fast=True
 
377
  return out_pipe
378
 
379
  def ui():
380
+ global result
381
  with gr.Blocks(theme=gr.themes.Soft(),css=css,js=js) as demo:
382
  with gr.Column(elem_id="col-container"):
383
  gr.Markdown(f"""
 
421
  with gr.Row():
422
  run_button = gr.Button("START",elem_classes="btn",scale=0)
423
  with gr.Row():
 
424
  result.append(gr.Image(interactive=False,elem_classes="image-container", label="Result", show_label=False, type='filepath', show_share_button=False))
425
  result.append(gr.Image(interactive=False,elem_classes="image-container", label="Result", show_label=False, type='filepath', show_share_button=False))
426
  gr.on(