LLAVA_CLIP_PATH = 'openai/clip-vit-large-patch14-336' | |
LLAVA_MODEL_PATH = 'liuhaotian/llava-v1.5-13b' | |
SDXL_CLIP1_PATH = 'openai/clip-vit-large-patch14' | |
SDXL_CLIP2_CKPT_PTH = 'laion/CLIP-ViT-bigG-14-laion2B-39B-b160k/open_clip_pytorch_model.bin' |
LLAVA_CLIP_PATH = 'openai/clip-vit-large-patch14-336' | |
LLAVA_MODEL_PATH = 'liuhaotian/llava-v1.5-13b' | |
SDXL_CLIP1_PATH = 'openai/clip-vit-large-patch14' | |
SDXL_CLIP2_CKPT_PTH = 'laion/CLIP-ViT-bigG-14-laion2B-39B-b160k/open_clip_pytorch_model.bin' |