Awiny commited on
Commit
d756d59
β€’
1 Parent(s): ae3cb55

add large file local

Browse files
Files changed (3) hide show
  1. app.py +3 -3
  2. models/blip2_model.py +6 -6
  3. models/controlnet_model.py +2 -2
app.py CHANGED
@@ -20,14 +20,14 @@ parser.add_argument('--contolnet_device', choices=['cuda', 'cpu'], default='cpu'
20
 
21
  args = parser.parse_args()
22
 
23
- # device = "cuda" if torch.cuda.is_available() else "cpu"
24
- device = "cpu"
25
 
26
  if device == "cuda":
27
  args.image_caption_device = "cuda"
28
  args.dense_caption_device = "cuda"
29
  args.semantic_segment_device = "cuda"
30
- args.contolnet_device = "cuda"
31
  else:
32
  args.image_caption_device = "cpu"
33
  args.dense_caption_device = "cpu"
 
20
 
21
  args = parser.parse_args()
22
 
23
+ device = "cuda" if torch.cuda.is_available() else "cpu"
24
+ # device = "cpu"
25
 
26
  if device == "cuda":
27
  args.image_caption_device = "cuda"
28
  args.dense_caption_device = "cuda"
29
  args.semantic_segment_device = "cuda"
30
+ args.contolnet_device = "cpu"
31
  else:
32
  args.image_caption_device = "cpu"
33
  args.dense_caption_device = "cpu"
models/blip2_model.py CHANGED
@@ -14,14 +14,14 @@ class ImageCaptioning:
14
  self.data_type = torch.float32
15
  else:
16
  self.data_type = torch.float16
17
- # processor = Blip2Processor.from_pretrained("pretrained_models/blip2-opt-2.7b")
18
- # model = Blip2ForConditionalGeneration.from_pretrained(
19
- # "pretrained_models/blip2-opt-2.7b", torch_dtype=self.data_type
20
- # )
21
- processor = Blip2Processor.from_pretrained("Salesforce/blip2-opt-2.7b")
22
  model = Blip2ForConditionalGeneration.from_pretrained(
23
- "Salesforce/blip2-opt-2.7b", torch_dtype=self.data_type
24
  )
 
 
 
 
25
  model.to(self.device)
26
  return processor, model
27
 
 
14
  self.data_type = torch.float32
15
  else:
16
  self.data_type = torch.float16
17
+ processor = Blip2Processor.from_pretrained("pretrained_models/blip2-opt-2.7b")
 
 
 
 
18
  model = Blip2ForConditionalGeneration.from_pretrained(
19
+ "pretrained_models/blip2-opt-2.7b", torch_dtype=self.data_type
20
  )
21
+ # processor = Blip2Processor.from_pretrained("Salesforce/blip2-opt-2.7b")
22
+ # model = Blip2ForConditionalGeneration.from_pretrained(
23
+ # "Salesforce/blip2-opt-2.7b", torch_dtype=self.data_type
24
+ # )
25
  model.to(self.device)
26
  return processor, model
27
 
models/controlnet_model.py CHANGED
@@ -25,8 +25,8 @@ class TextToImage:
25
  map_location=self.device, # Add this line
26
  ).to(self.device)
27
  pipeline = StableDiffusionControlNetPipeline.from_pretrained(
28
- # "pretrained_models/stable-diffusion-v1-5",
29
- "runwayml/stable-diffusion-v1-5",
30
  controlnet=controlnet,
31
  safety_checker=None,
32
  torch_dtype=self.data_type,
 
25
  map_location=self.device, # Add this line
26
  ).to(self.device)
27
  pipeline = StableDiffusionControlNetPipeline.from_pretrained(
28
+ "pretrained_models/stable-diffusion-v1-5",
29
+ # "runwayml/stable-diffusion-v1-5",
30
  controlnet=controlnet,
31
  safety_checker=None,
32
  torch_dtype=self.data_type,