Spaces:
No application file
No application file
""" | |
api.py๋ fastapi๋ฅผ ์ฌ์ฉํ๊ณ ์์ง๋ง, | |
gradio๋ฅผ ์ฌ์ฉํ ์น ๋ฐ๋ชจ๋ฅผ ํ์ธํ๊ณ ์ถ๋ค๋ฉด gradio ํด๋ ์์ ์๋ app-final.py๋ฅผ ์ฌ์ฉํ๋ฉด ๋๋ค | |
api.py์ ๋ง์คํน ๋ถ๋ถ์ ์ ๋๋ก ์์ ํ๊ฒ, | |
api2.py์ | |
""" | |
import os | |
os.environ['CUDA_HOME'] = '/usr/local/cuda' | |
os.environ['PATH'] = os.environ['PATH'] + ':/usr/local/cuda/bin' | |
from datetime import datetime | |
from pydantic import BaseModel | |
import spaces | |
import numpy as np | |
import torch | |
from diffusers.image_processor import VaeImageProcessor | |
from huggingface_hub import snapshot_download | |
from PIL import Image | |
torch.jit.script = lambda f: f | |
from model.cloth_masker import AutoMasker, vis_mask | |
from model.pipeline import CatVTONPipeline | |
from utils import init_weight_dtype, resize_and_crop, resize_and_padding | |
from test import morph_close, morph_open, morph_open2, extend_mask_downward, extend_mask_downward2, image_equal | |
import cv2 | |
from fastapi import FastAPI, File, Form, UploadFile, WebSocket, WebSocketDisconnect | |
from typing import List | |
from typing import Optional | |
import shutil | |
from fastapi.responses import JSONResponse | |
import uuid | |
import base64 | |
from io import BytesIO | |
from fastapi.middleware.cors import CORSMiddleware | |
import asyncio | |
app = FastAPI() | |
origins = [ | |
"http://localhost", | |
"http://localhost:8080", | |
"http://localhost:3000", | |
"http://127.0.0.1:8080", | |
"http://127.0.0.1:3000", | |
] | |
app.add_middleware( | |
CORSMiddleware, | |
allow_origins=["*"], # You can set this to specific origins instead of '*' for security | |
allow_credentials=True, | |
allow_methods=["*"], | |
allow_headers=["*"], | |
) | |
# app.add_middleware( | |
# CORSMiddleware, | |
# allow_origins=origins, | |
# allow_credentials=True, | |
# allow_methods=["*"], | |
# allow_headers=["*"], | |
# ) | |
print ('starting app') | |
# api ์ฐ๊ฒฐํ๋ฉด์ ์ถ๊ฐํ ์ฝ๋ | |
def pil_to_base64(img: Image.Image) -> str: | |
buffer = BytesIO() | |
img.save(buffer, format="PNG") # PNG ํ์์ผ๋ก ์ ์ฅ | |
return base64.b64encode(buffer.getvalue()).decode("utf-8") | |
# GPU์์ ํ์ฌ ํ ๋น๋ ๋ฉ๋ชจ๋ฆฌ ํ์ธ (GPU 0๋ฒ ๊ธฐ์ค) | |
#allocated_memory = torch.cuda.memory_allocated(0) | |
#print(f"GPU 0์์ ํ ๋น๋ ๋ฉ๋ชจ๋ฆฌ: {allocated_memory / (1024 ** 2)} MB") # MB๋ก ๋ณํํ์ฌ ์ถ๋ ฅ | |
# ์ค์ ๊ฐ์ ํ๊ฒฝ ๋ณ์๋ก ์ ์ | |
BASE_MODEL_PATH = os.getenv("BASE_MODEL_PATH", "booksforcharlie/stable-diffusion-inpainting") | |
RESUME_PATH = os.getenv("RESUME_PATH", "zhengchong/CatVTON") | |
OUTPUT_DIR = os.getenv("OUTPUT_DIR", "resource/demo/output") | |
WIDTH = int(os.getenv("WIDTH", 768)) | |
HEIGHT = int(os.getenv("HEIGHT", 1024)) | |
def image_grid(imgs, rows, cols): | |
assert len(imgs) == rows * cols | |
w, h = imgs[0].size | |
grid = Image.new("RGB", size=(cols * w, rows * h)) | |
for i, img in enumerate(imgs): | |
grid.paste(img, box=(i % cols * w, i // cols * h)) | |
return grid | |
repo_path = snapshot_download(repo_id=RESUME_PATH) | |
print ('repo_path') | |
# Pipeline | |
pipeline = CatVTONPipeline( | |
base_ckpt=BASE_MODEL_PATH, | |
attn_ckpt=repo_path, | |
attn_ckpt_version="mix", | |
weight_dtype=init_weight_dtype("no"), | |
use_tf32=True, | |
device='cuda' | |
) | |
# AutoMasker | |
mask_processor = VaeImageProcessor(vae_scale_factor=8, do_normalize=False, do_binarize=True, do_convert_grayscale=True) | |
automasker = AutoMasker( | |
densepose_ckpt=os.path.join(repo_path, "DensePose"), | |
schp_ckpt=os.path.join(repo_path, "SCHP"), | |
device='cuda', | |
) | |
# ๋งค๊ฐ๋ณ์๋ก fitting_type ์ถ๊ฐํด์ผ ํจ. cloth_type ๋ฐ์. | |
async def submit_function( | |
person_image, | |
cloth_image, | |
cloth_type, | |
fitting_type, | |
num_inference_steps, | |
guidance_scale, | |
seed, | |
show_type | |
): | |
#person_image, mask = person_image["background"], person_image["layers"][0] # person_image["layers"][0]์ด ์ ์ ๊ฐ ๊ทธ๋ฆฐ ๋ง์คํฌ ๋ ์ด์ด์. | |
#mask = Image.open(mask).convert("L") | |
#if len(np.unique(np.array(mask))) == 1: | |
# mask = None # ์ฌ์ฉ์๊ฐ ๋ง์คํฌ๋ฅผ ๊ทธ๋ฆฌ์ง ์์ ๊ฒฝ์ฐ. | |
#else: | |
# mask = np.array(mask) | |
# mask[mask > 0] = 255 # ๋ฐฐ๊ฒฝ์ด ๊ฒ์์. | |
# mask = Image.fromarray(mask) | |
mask = None | |
tmp_folder = "resource/demo/output" | |
date_str = datetime.now().strftime("%Y%m%d%H%M%S") | |
result_save_path = os.path.join(tmp_folder, date_str[:8], date_str[8:] + ".png") | |
if not os.path.exists(os.path.join(tmp_folder, date_str[:8])): | |
os.makedirs(os.path.join(tmp_folder, date_str[:8])) | |
generator = None | |
if seed != -1: | |
generator = torch.Generator(device='cuda').manual_seed(seed) | |
person_image = Image.open(person_image).convert("RGB") | |
cloth_image = Image.open(cloth_image).convert("RGB") | |
person_image = resize_and_crop(person_image, (768, 1024)) | |
cloth_image = resize_and_padding(cloth_image, (768, 1024)) | |
#์์ธ์ฒ๋ฆฌ | |
#man | |
compare_image_mlvl0 = Image.open("./resource/demo/example/person/men/m_lvl0.png").convert("RGB") | |
compare_image_mlvl0 = resize_and_crop(compare_image_mlvl0, (768, 1024)) | |
compare_image_mlvl1 = Image.open("./resource/demo/example/person/men/m_lvl1.png").convert("RGB") | |
compare_image_mlvl1 = resize_and_crop(compare_image_mlvl1, (768, 1024)) | |
compare_image_mlvl2 = Image.open("./resource/demo/example/person/men/m_lvl2.png").convert("RGB") | |
compare_image_mlvl2 = resize_and_crop(compare_image_mlvl2, (768, 1024)) | |
compare_image_mlvl3 = Image.open("./resource/demo/example/person/men/m_lvl3.png").convert("RGB") | |
compare_image_mlvl3 = resize_and_crop(compare_image_mlvl3, (768, 1024)) | |
#womam | |
compare_image_wlvl0 = Image.open("./resource/demo/example/person/women/w_lvl0.png").convert("RGB") | |
compare_image_wlvl0 = resize_and_crop(compare_image_wlvl0, (768, 1024)) | |
compare_image_wlvl1 = Image.open("./resource/demo/example/person/women/w_lvl1.png").convert("RGB") | |
compare_image_wlvl1 = resize_and_crop(compare_image_wlvl1, (768, 1024)) | |
compare_image_wlvl2 = Image.open("./resource/demo/example/person/women/w_lvl2.png").convert("RGB") | |
compare_image_wlvl2 = resize_and_crop(compare_image_wlvl2, (768, 1024)) | |
compare_image_wlvl3 = Image.open("./resource/demo/example/person/women/w_lvl3.png").convert("RGB") | |
compare_image_wlvl3 = resize_and_crop(compare_image_wlvl3, (768, 1024)) | |
# Process mask | |
if mask is not None: | |
mask = resize_and_crop(mask, (WIDTH, HEIGHT)) | |
else: | |
if image_equal(person_image, compare_image_mlvl3): | |
person_image2 = Image.open("./resource/demo/example/person/men/m_lvl0.png").convert("RGB") | |
person_image2 = resize_and_crop(person_image2, (WIDTH, HEIGHT)) | |
mask = automasker( | |
person_image2, | |
cloth_type | |
)['mask'] | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl3_lower_sam_v2.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl3_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
mask_np = np.array(mask) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
if cloth_type == "upper": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_lower_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_upper_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
elif cloth_type == "lower": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_upper_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_lower_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
else: | |
mask = Image.fromarray(mask_np) | |
elif image_equal(person_image, compare_image_wlvl3): | |
person_image2 = Image.open("./resource/demo/example/person/women/w_lvl0.png").convert("RGB") | |
person_image2 = resize_and_crop(person_image2, (WIDTH, HEIGHT)) | |
mask = automasker( | |
person_image2, | |
cloth_type | |
)['mask'] | |
# ์ดํ ์ฒ๋ฆฌ | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl3_lower_sam_v2.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl3_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
mask_np = np.array(mask) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
if cloth_type == "upper": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_lower_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_upper_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
elif cloth_type == "lower": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_upper_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_lower_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
else: | |
mask = Image.fromarray(mask_np) | |
elif image_equal(person_image, compare_image_mlvl2): | |
person_image2 = Image.open("./resource/demo/example/person/men/m_lvl0.png").convert("RGB") | |
person_image2 = resize_and_crop(person_image2, (WIDTH, HEIGHT)) | |
mask = automasker( | |
person_image2, | |
cloth_type | |
)['mask'] | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl2_lower_sam_v2.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl2_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
mask_np = np.array(mask) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
if cloth_type == "upper": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_lower_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_upper_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
elif cloth_type == "lower": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_upper_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_lower_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
else: | |
mask = Image.fromarray(mask_np) | |
elif image_equal(person_image, compare_image_wlvl2): | |
person_image2 = Image.open("./resource/demo/example/person/women/w_lvl0.png").convert("RGB") | |
person_image2 = resize_and_crop(person_image2, (WIDTH, HEIGHT)) | |
mask = automasker( | |
person_image2, | |
cloth_type | |
)['mask'] | |
# ์ดํ ์ฒ๋ฆฌ | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl2_lower_sam_v2.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl2_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
mask_np = np.array(mask) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
if cloth_type == "upper": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_lower_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_upper_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
elif cloth_type == "lower": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_upper_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_lower_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
else: | |
mask = Image.fromarray(mask_np) | |
elif image_equal(person_image, compare_image_mlvl1): | |
person_image2 = Image.open("./resource/demo/example/person/men/m_lvl0.png").convert("RGB") | |
person_image2 = resize_and_crop(person_image2, (WIDTH, HEIGHT)) | |
mask = automasker( | |
person_image2, | |
cloth_type | |
)['mask'] | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl1_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl1_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
mask_np = np.array(mask) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
if cloth_type == "upper": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_lower_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_upper_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
elif cloth_type == "lower": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_upper_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_lower_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
else: | |
mask = Image.fromarray(mask_np) | |
elif image_equal(person_image, compare_image_wlvl1): | |
person_image2 = Image.open("./resource/demo/example/person/women/w_lvl0.png").convert("RGB") | |
person_image2 = resize_and_crop(person_image2, (WIDTH, HEIGHT)) | |
mask = automasker( | |
person_image2, | |
cloth_type | |
)['mask'] | |
# ์ดํ ์ฒ๋ฆฌ | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl1_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl1_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
mask_np = np.array(mask) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
if cloth_type == "upper": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_lower_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_upper_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
elif cloth_type == "lower": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_upper_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_lower_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
else: | |
mask = Image.fromarray(mask_np) | |
elif image_equal(person_image, compare_image_mlvl0): | |
person_image2 = Image.open("./resource/demo/example/person/men/m_lvl0.png").convert("RGB") | |
person_image2 = resize_and_crop(person_image2, (WIDTH, HEIGHT)) | |
mask = automasker( | |
person_image2, | |
cloth_type | |
)['mask'] | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl0_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl0_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
mask_np = np.array(mask) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
if cloth_type == "upper": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_lower_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_upper_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
elif cloth_type == "lower": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_upper_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_lower_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
else: | |
mask = Image.fromarray(mask_np) | |
elif image_equal(person_image, compare_image_wlvl0): | |
person_image2 = Image.open("./resource/demo/example/person/women/w_lvl0.png").convert("RGB") | |
person_image2 = resize_and_crop(person_image2, (WIDTH, HEIGHT)) | |
mask = automasker( | |
person_image2, | |
cloth_type | |
)['mask'] | |
# ์ดํ ์ฒ๋ฆฌ | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl0_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl0_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
mask_np = np.array(mask) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
if cloth_type == "upper": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_lower_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_upper_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
elif cloth_type == "lower": | |
kernel = np.ones((10, 10), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
result_np = np.where(sam_mask_upper_np== 255, 0, mask_np) | |
result_np = np.where(sam_mask_lower_np== 255, 255, result_np) | |
mask = Image.fromarray(result_np) | |
else: | |
mask = Image.fromarray(mask_np) | |
else: | |
mask = automasker( | |
person_image, | |
cloth_type | |
)['mask'] | |
# mask.save("./app_mask_created.png") | |
# ๊ฐ๋ bmi์ง์ ๋์ ์๋ฐํ์ ๊ฒฝ์ฐ, upper mask๋ฅผ ์ ํํ ์์ฑํด๋ด์ง ๋ชปํ๋ ๊ฒฝ์ฐ๊ฐ ์์ด ์๋์ผ๋ก ํ ๋ฒ ๋ ์ฒ๋ฆฌํด์ค. | |
# ํ์ด๋์จ ๋ถ๋ถ ๋ฐ์ด๋ฒ๋ฆฌ๊ธฐ (๋ ์ฌ์ฉ์๊ฐ ๊ทธ๋ฆฐ mask์ ๋ํด์๋ ์ํ๋๋ฉด ์๋๋ฏ๋ก, else๋ฌธ ์์ ๋ฃ์ด๋๊ธฐ) | |
#if cloth_type == "upper": | |
# height = (np.array(mask)).shape[0] | |
# y_threshold = int(height * 0.7) # ์ด๋ฏธ์ง ๋์ด์ 50ํผ์ผํธ ์ดํ. 50ํผ์ผํธ๊ฐ ๋ฑ ์ ๋นํจ. | |
# ๋ฐ๋ถ๋ถ ์ ๊ฑฐ | |
# mask = remove_bottom_part(np.array(mask), y_threshold) | |
# ์ ๋ฐฉ๋ฒ์ผ๋ก ํด๊ฒฐ ๋ถ๊ฐ์. ํ์ด๋์จ ๋ถ๋ถ | |
# input ๋ target ์ด๋ฏธ์ง๋ง๋ค, ์์ฑ๋๋ mask ์์ญ์ ํฌ๊ธฐ๊ฐ ๋ค๋ฅด๊ธฐ ๋๋ฌธ. mask ํ์ผ ์์ฒด์ ํฌ๊ธฐ๋ ๊ฐ์ ์ง์ธ์ . | |
# ์ถ๊ฐ๋ก Fitting Type์ ๋ฐ๋ผ ๋ง์คํฌ ์ฒ๋ฆฌ (else๋ฌธ ๋ด๋ถ) | |
if fitting_type == "standard": | |
# mlvl3์ ๋ํ upper lower ๊ฐ๊ฐ. | |
if image_equal(person_image, compare_image_mlvl3) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl3_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_mlvl3) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl3_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# mlvl2์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_mlvl2) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl2_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_mlvl2) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl2_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# mlvl1์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_mlvl1) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl1_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_mlvl1) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl1_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# mlvl0์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_mlvl0) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl0_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_mlvl0) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl0_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# wlvl3์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_wlvl3) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl3_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_wlvl3) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl3_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# wlvl2์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_wlvl2) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl2_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_wlvl2) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl2_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# wlvl1์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_wlvl1) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl1_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_wlvl1) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl1_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# wlvl0์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_wlvl0) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl0_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_wlvl0) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl0_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=100) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# ๊ทธ ์ธ ๋ํดํธ | |
# flag2 | |
else: | |
if cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
extended_mask = extend_mask_downward2(np.array(mask), pixels=50) | |
mask = extended_mask | |
else: | |
opened_mask = morph_open(mask) | |
extended_mask = extend_mask_downward(np.array(mask), pixels=70) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_open(final_mask) | |
mask = final_mask | |
elif fitting_type == "loose" : | |
# mlvl3์ ๋ํ upper lower ๊ฐ๊ฐ. | |
if image_equal(person_image, compare_image_mlvl3) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl3_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_mlvl3) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl3_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# mlvl2์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_mlvl2) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl2_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_mlvl2) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl2_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# mlvl1์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_mlvl1) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl1_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_mlvl1) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl1_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# mlvl0์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_mlvl0) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/m_lvl0_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_mlvl0) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/m_lvl0_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# wlvl3์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_wlvl3) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl3_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_wlvl3) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl3_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# wlvl2์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_wlvl2) and cloth_type == "upper": | |
# flag | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl2_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1110) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
# final_mask = morph_close(morph_open(final_mask)) ์๋ ์ฃผ์ ์๋๋ฐ ์ฃผ์์ฒ๋ฆฌํจ. 1110 | |
mask = final_mask | |
elif image_equal(person_image, compare_image_wlvl2) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl2_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# wlvl1์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_wlvl1) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl1_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_wlvl1) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl1_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# wlvl0์ ๋ํ upper lower ๊ฐ๊ฐ. | |
elif image_equal(person_image, compare_image_wlvl0) and cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
sam_mask_upper = Image.open("./resource/demo/example/person/sam/w_lvl0_upper_sam.png").convert("L") | |
sam_mask_upper = resize_and_crop(sam_mask_upper, (WIDTH, HEIGHT)) | |
sam_mask_upper_np = np.array(sam_mask_upper) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_upper_np = cv2.dilate(sam_mask_upper_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_upper_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
elif image_equal(person_image, compare_image_wlvl0) and cloth_type == "lower": | |
opened_mask = morph_open(mask) | |
sam_mask_lower = Image.open("./resource/demo/example/person/sam/w_lvl0_lower_sam.png").convert("L") | |
sam_mask_lower = resize_and_crop(sam_mask_lower, (WIDTH, HEIGHT)) | |
sam_mask_lower_np = np.array(sam_mask_lower) | |
# ์ถ๊ฐํ ๋ถ๋ถ (1113) | |
kernel = np.ones((40, 40), np.uint8) | |
sam_mask_lower_np = cv2.dilate(sam_mask_lower_np, kernel, iterations=1) | |
extended_mask = extend_mask_downward(sam_mask_lower_np, pixels=200) | |
#์ต์ข ๋ง์คํฌ ์ฒ๋ฆฌ (test.py ์ค๋ช ์ฐธ๊ณ ) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
final_mask = morph_close(morph_open(final_mask)) | |
mask = final_mask | |
# ๊ทธ ์ธ ๋ํดํธ | |
else: | |
if cloth_type == "upper": | |
opened_mask = morph_open(mask) | |
extended_mask = extend_mask_downward2(np.array(mask), pixels=70) | |
mask = extended_mask | |
else: | |
opened_mask = morph_open(mask) | |
extended_mask = extend_mask_downward(np.array(mask), pixels=100) | |
final_mask = Image.fromarray(np.array(opened_mask) | np.array(extended_mask)) | |
#morph_open ๊ฐ์ธ๋ morph_close ์์ฐ. | |
final_mask = morph_open(final_mask) | |
mask = final_mask | |
# ๋ธ๋ฌ์ฒ๋ฆฌ | |
mask = mask_processor.blur(mask, blur_factor=9) | |
# Inference | |
# try: | |
# result_image = pipeline( | |
# image=person_image, | |
# condition_image=cloth_image, | |
# mask=mask, | |
# num_inference_steps=num_inference_steps, | |
# guidance_scale=guidance_scale, | |
# generator=generator | |
# )[0] | |
# print(pipeline.progress) | |
# except Exception as e: | |
# raise gr.Error( | |
# "An error occurred. Please try again later: {}".format(e) | |
# ) | |
#inside of submit_function | |
pipeline.progress = 0.02 | |
pipeline_future = asyncio.to_thread( | |
pipeline, | |
image=person_image, | |
condition_image=cloth_image, | |
mask=mask, | |
num_inference_steps=num_inference_steps, | |
guidance_scale=guidance_scale, | |
generator=generator | |
) | |
# Immediately print progress after starting pipeline() | |
#print(pipeline.progress) | |
await asyncio.sleep(1) | |
async def print_progress(): | |
print('starting printing progress: ', pipeline_future) | |
while pipeline.progress < 0.9: | |
print(f"Progress: {pipeline.progress}") | |
await asyncio.sleep(1) | |
# Optionally, print the final progress after completion | |
print(f"Final Progress: {pipeline.progress}") | |
progress_task = asyncio.create_task(print_progress()) | |
#await print_progress() | |
#progress_task | |
result = await pipeline_future | |
# Wait for the result_image after the print statement | |
result_image = result[0] | |
await progress_task | |
# Post-process | |
masked_person = vis_mask(person_image, mask) | |
save_result_image = image_grid([person_image, masked_person, cloth_image, result_image], 1, 4) | |
save_result_image.save(result_save_path) | |
if show_type == "result only": | |
return {"result_image": result_image, "masked_person": masked_person} | |
else: | |
width, height = person_image.size | |
if show_type == "input & result": | |
condition_width = width // 2 | |
conditions = image_grid([person_image, cloth_image], 2, 1) | |
else: | |
condition_width = width // 3 | |
conditions = image_grid([person_image, masked_person , cloth_image], 3, 1) | |
conditions = conditions.resize((condition_width, height), Image.NEAREST) | |
new_result_image = Image.new("RGB", (width + condition_width + 5, height)) | |
new_result_image.paste(conditions, (0, 0)) | |
new_result_image.paste(result_image, (condition_width + 5, 0)) | |
return new_result_image | |
#add websocket related function inside process_image or submit_function so that it sends the progress every 1 second | |
# FastAPI ํจ์ ์ ์ | |
async def process_image( | |
cloth_type: str = Form(...), | |
fitting_type: str = Form(...), | |
person_image: UploadFile = File(...), | |
cloth_image: UploadFile = File(...) | |
): | |
try: | |
# ๊ณ ์ ํ ํ์ผ ์ด๋ฆ ์์ฑ | |
person_filename = f"received_{uuid.uuid4().hex}_{person_image.filename}" | |
cloth_filename = f"received_{uuid.uuid4().hex}_{cloth_image.filename}" | |
print ('person_filename: ', person_filename) | |
print ('cloth_filename: ', cloth_filename) | |
# ์ด๋ฏธ์ง ์ ์ฅ ๋๋ ํ ๋ฆฌ ์์ฑ | |
os.makedirs("uploads", exist_ok=True) | |
# ์ ๋ก๋๋ ์ด๋ฏธ์ง ์ ์ฅ | |
person_path = os.path.join("uploads", person_filename) | |
cloth_path = os.path.join("uploads", cloth_filename) | |
with open(person_path, "wb") as buffer: | |
shutil.copyfileobj(person_image.file, buffer) | |
with open(cloth_path, "wb") as buffer: | |
shutil.copyfileobj(cloth_image.file, buffer) | |
pipeline.progress = 0.02 | |
# ์ด๋ฏธ์ง ์ฒ๋ฆฌ ํจ์ ํธ์ถ | |
# num inference : 50๋ณด๋ค 25๊ฐ ๋์๋ฏ | |
result = await submit_function( | |
person_image=person_path, | |
cloth_image=cloth_path, | |
cloth_type=cloth_type, | |
fitting_type=fitting_type, | |
num_inference_steps=25, | |
guidance_scale=2.5, | |
seed=42, | |
show_type='result only' | |
) | |
print ('processing done') | |
# ๋ฐํ๋ ์ด๋ฏธ์ง ์ถ์ถ | |
result_image = result['result_image'] | |
masked_person = result['masked_person'] | |
result_image.save('results/result.png') | |
# ์ด๋ฏธ์ง๋ฅผ Base64๋ก ์ธ์ฝ๋ฉ | |
result_image_b64 = pil_to_base64(result_image) | |
masked_person_b64 = pil_to_base64(masked_person) | |
# ์์ ํ์ผ ์ญ์ (ํ์ ์) | |
os.remove(person_path) | |
os.remove(cloth_path) | |
return { | |
"message": "์ด๋ฏธ์ง๊ฐ ์ฒ๋ฆฌ๋์์ต๋๋ค", | |
"result_image": result_image_b64, | |
"masked_person": masked_person_b64 | |
} | |
except Exception as e: | |
return JSONResponse(status_code=500, content={"message": f"์ค๋ฅ ๋ฐ์: {str(e)}"}) | |
#blocking language | |
#syncronous language | |
async def websocket_endpoint(websocket: WebSocket): | |
print('awaiting websocket connection') | |
await websocket.accept() | |
print ('websocket accepted') | |
try: | |
while True: | |
if pipeline.progress >= 0.94: | |
await asyncio.sleep(0.1); | |
if pipeline.progress >= 0.94: | |
break | |
if pipeline.progress < 0.02: | |
pipeline.progress = 0.02 | |
await websocket.send_text(f"{pipeline.progress}") | |
await asyncio.sleep(0.5) | |
await websocket.send_text("Processing complete") | |
except WebSocketDisconnect: | |
print(f"Client disconnected") | |
finally: | |
await websocket.close() | |
async def send_to_ssh( | |
cloth_type: str = Form(...), | |
fitting_type: str = Form(...), | |
person_image: UploadFile = File(...), # ์ด๋ฏธ์ง ํ์ผ ์ ๋ก๋๋ก ์ฒ๋ฆฌ | |
cloth_image: UploadFile = File(...) | |
): | |
# ๋ฐ์ ๋ฐ์ดํฐ๋ฅผ ์ฒ๋ฆฌํ๊ฑฐ๋ ์ ์ฅํ๋ ๋ก์ง | |
return {"message": "๋ฐ์ดํฐ๊ฐ ์ฑ๊ณต์ ์ผ๋ก ์ฒ๋ฆฌ๋์์ต๋๋ค."} | |
async def test(): | |
return JSONResponse(status_code=200, content={"message": "hello"}) | |
def person_example_fn(image_path): | |
return image_path | |