PyTorch
clip
custom_code
Gengzigang commited on
Commit
834f0b9
1 Parent(s): 78f99e8
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -39,7 +39,7 @@ image_path = "CLIP.png"
39
  model_name_or_path = "LLM2CLIP-EVA02-L-14-336" # or /path/to/local/LLM2CLIP-EVA02-L-14-336
40
  image_size = 336
41
 
42
- processor = CLIPImageProcessor.from_pretrained("openai/clip-vit-large-patch14")
43
  model = AutoModel.from_pretrained(
44
  model_name_or_path,
45
  torch_dtype=torch.float16,
 
39
  model_name_or_path = "LLM2CLIP-EVA02-L-14-336" # or /path/to/local/LLM2CLIP-EVA02-L-14-336
40
  image_size = 336
41
 
42
+ processor = CLIPImageProcessor.from_pretrained("openai/clip-vit-large-patch14-336")
43
  model = AutoModel.from_pretrained(
44
  model_name_or_path,
45
  torch_dtype=torch.float16,