altayavci commited on
Commit
eb756d1
1 Parent(s): 66dc5c5

Update ip_adapter_openpose.py

Browse files
Files changed (1) hide show
  1. ip_adapter_openpose.py +2 -8
ip_adapter_openpose.py CHANGED
@@ -3,25 +3,19 @@ import torch
3
 
4
  from openpose import get_openpose, init as init_openpose
5
  from adapter_model import MODEL
6
- from segmentation import get_cropped, init_body as init_body_seg
7
- from utils import alpha_composite_manuel
8
 
9
  init_openpose()
10
- init_body_seg()
11
  ip_model = MODEL("pose")
12
 
13
 
14
  def generate(img_human: Image, img_clothes: Image, segment_id:int, width: int=512, height: int=768):
15
 
16
  img_human = img_human.resize((width, height))
17
- img_clothes = img_clothes.resize((width, height))
18
- cropped_clothes = get_cropped(img_clothes, segment_id, False, False)
19
- cropped_clothes = alpha_composite_manuel(cropped_clothes)
20
-
21
  img_openpose = get_openpose(img_human)
22
 
23
  img_openpose_gen = ip_model.model.generate(
24
- pil_image=cropped_clothes,
25
  image=img_openpose,
26
  width=width,
27
  height=height,
 
3
 
4
  from openpose import get_openpose, init as init_openpose
5
  from adapter_model import MODEL
 
 
6
 
7
  init_openpose()
 
8
  ip_model = MODEL("pose")
9
 
10
 
11
  def generate(img_human: Image, img_clothes: Image, segment_id:int, width: int=512, height: int=768):
12
 
13
  img_human = img_human.resize((width, height))
14
+ img_clothes = img_clothes.resize((width, height))
 
 
 
15
  img_openpose = get_openpose(img_human)
16
 
17
  img_openpose_gen = ip_model.model.generate(
18
+ pil_image=img_clothes,
19
  image=img_openpose,
20
  width=width,
21
  height=height,