chenlei
commited on
Commit
·
74d6568
1
Parent(s):
544c892
update
Browse files
ootd/inference_ootd_hd.py
CHANGED
@@ -25,7 +25,7 @@ from transformers import AutoProcessor, CLIPVisionModelWithProjection
|
|
25 |
from transformers import CLIPTextModel, CLIPTokenizer
|
26 |
import requests
|
27 |
|
28 |
-
VIT_PATH = "
|
29 |
VAE_PATH = "levihsu/OOTDiffusion"
|
30 |
UNET_PATH = "levihsu/OOTDiffusion"
|
31 |
MODEL_PATH ="/home/user/app/checkpoints/ootd"
|
@@ -59,7 +59,7 @@ class OOTDiffusionHD:
|
|
59 |
#下载该文件
|
60 |
with open("/home/user/app/checkpoints/ootd/text_encoder/pytorch_model.bin", "wb") as f:
|
61 |
f.write(response.content)
|
62 |
-
|
63 |
self.pipe = OotdPipeline.from_pretrained(
|
64 |
MODEL_PATH,
|
65 |
unet_garm=unet_garm,
|
|
|
25 |
from transformers import CLIPTextModel, CLIPTokenizer
|
26 |
import requests
|
27 |
|
28 |
+
VIT_PATH = "openai/clip-vit-large-patch14"
|
29 |
VAE_PATH = "levihsu/OOTDiffusion"
|
30 |
UNET_PATH = "levihsu/OOTDiffusion"
|
31 |
MODEL_PATH ="/home/user/app/checkpoints/ootd"
|
|
|
59 |
#下载该文件
|
60 |
with open("/home/user/app/checkpoints/ootd/text_encoder/pytorch_model.bin", "wb") as f:
|
61 |
f.write(response.content)
|
62 |
+
|
63 |
self.pipe = OotdPipeline.from_pretrained(
|
64 |
MODEL_PATH,
|
65 |
unet_garm=unet_garm,
|