Spaces:
Runtime error
Runtime error
import os | |
import app_configs as configs | |
import service | |
import gradio as gr | |
import numpy as np | |
import cv2 | |
from PIL import Image | |
import logging | |
from huggingface_hub import hf_hub_download | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger() | |
sam = None #service.get_sam(configs.model_type, configs.model_ckpt_path, configs.device) | |
red = (255,0,0) | |
blue = (0,0,255) | |
def load_sam_instance(): | |
global sam | |
if sam is None: | |
gr.Info('Initialising SAM, hang in there...') | |
if not os.path.exists(configs.model_ckpt_path): | |
gr.Info('Downloading weights from hugging face hub') | |
chkpt_path = hf_hub_download("ybelkada/segment-anything", configs.model_ckpt_path) | |
else: | |
chkpt_path = configs.model_ckpt_path | |
sam = service.get_sam(configs.model_type, chkpt_path, configs.device) | |
return sam | |
block = gr.Blocks() | |
with block: | |
# states | |
def point_coords_empty(): | |
return [] | |
def point_labels_empty(): | |
return [] | |
point_coords = gr.State(point_coords_empty) | |
point_labels = gr.State(point_labels_empty) | |
raw_image = gr.Image(type='pil', visible=False) | |
# UI | |
with gr.Row(): | |
with gr.Column(): | |
input_image = gr.Image(label='Input', height=512, type='pil') | |
with gr.Row(): | |
point_label_radio = gr.Radio(label='Point Label', choices=[1,0], value=1) | |
reset_btn = gr.Button('Reset') | |
run_btn = gr.Button('Run', variant = 'primary') | |
with gr.Column(): | |
with gr.Tab('Cutout'): | |
cutout_gallery = gr.Gallery() | |
with gr.Tab('Annotation'): | |
masks_annotated_image = gr.AnnotatedImage(label='Segments') | |
gr.Examples(examples=[['examples/cat-256.png','examples/cat-256.png']],inputs=[input_image, raw_image]) | |
# components | |
components = {point_coords, point_labels, raw_image, input_image, point_label_radio, reset_btn, run_btn, cutout_gallery, masks_annotated_image} | |
# event - init coords | |
def on_reset_btn_click(raw_image): | |
return raw_image, point_coords_empty(), point_labels_empty(), None | |
reset_btn.click(on_reset_btn_click, [raw_image], [input_image, point_coords, point_labels], queue=False) | |
def on_input_image_upload(input_image): | |
return input_image, point_coords_empty(), point_labels_empty(), None | |
input_image.upload(on_input_image_upload, [input_image], [raw_image, point_coords, point_labels], queue=False) | |
# event - set coords | |
def on_input_image_select(input_image, point_coords, point_labels, point_label_radio, evt: gr.SelectData): | |
x, y = evt.index | |
color = red if point_label_radio == 0 else blue | |
img = np.array(input_image) | |
cv2.circle(img, (x, y), 10, color, -1) | |
img = Image.fromarray(img) | |
point_coords.append([x,y]) | |
point_labels.append(point_label_radio) | |
return img, point_coords, point_labels | |
input_image.select(on_input_image_select, [input_image, point_coords, point_labels, point_label_radio], [input_image, point_coords, point_labels], queue=False) | |
# event - inference | |
def on_run_btn_click(data): | |
sam = load_sam_instance() | |
image = data[raw_image] | |
if len(data[point_coords]) == 0: | |
masks, _ = service.predict_all(sam, image) | |
else: | |
masks, _ = service.predict_conditioned(sam, | |
image, | |
point_coords=np.array(data[point_coords]), | |
point_labels=np.array(data[point_labels])) | |
annotated = (image, [(masks[i], f'Mask {i}') for i in range(len(masks))]) | |
cutouts = [service.cutout(image, mask) for mask in masks] | |
return cutouts, annotated, masks | |
run_btn.click(on_run_btn_click, components, [cutout_gallery, masks_annotated_image], queue=True) | |
if __name__ == '__main__': | |
block.queue() | |
block.launch() |