Spaces:
Configuration error
Configuration error
File size: 662 Bytes
d3fbdbe 1fef0be d634edc 1fef0be bbc9cc3 d3fbdbe eb756d1 d3fbdbe 1fef0be d3fbdbe 62805d5 d3fbdbe |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
from PIL import Image
import torch
from openpose import get_openpose, init as init_openpose
from adapter_model import MODEL
init_openpose()
ip_model = MODEL("pose")
def generate(img_human: Image, img_clothes: Image):
img_human = img_human.resize((512, 512))
img_clothes = img_clothes.resize((512, 768))
img_openpose = get_openpose(img_human)
img_openpose_gen = ip_model.model.generate(
pil_image=img_clothes,
image=img_openpose,
width=512,
height=768,
num_samples=1,
num_inference_steps=30,
seed=42
)[0]
torch.cuda.empty_cache()
return img_openpose_gen.convert("RGB")
|