Spaces:
Runtime error
Runtime error
File size: 1,545 Bytes
36a325d a14768e 36a325d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import os
import numpy as np
import torch
from PIL import Image
import time
from segment_anything import sam_model_registry, SamPredictor
def sam_init(device_id=0):
sam_checkpoint = os.path.join(os.path.dirname(__file__), "ckpt/sam_vit_h_4b8939.pth")
model_type = "vit_h"
device = "cuda:{}".format(device_id) if torch.cuda.is_available() else "cpu"
sam = sam_model_registry[model_type](checkpoint=sam_checkpoint).to(device=device)
predictor = SamPredictor(sam)
return predictor
def sam_out_nosave(predictor, input_image, bbox):
bbox = np.array(bbox)
image = np.asarray(input_image)
start_time = time.time()
predictor.set_image(image)
h, w, _ = image.shape
input_point = np.array([[h//2, w//2]])
input_label = np.array([1])
masks, scores, logits = predictor.predict(
point_coords=input_point,
point_labels=input_label,
multimask_output=True,
)
masks_bbox, scores_bbox, logits_bbox = predictor.predict(
box=bbox,
multimask_output=True
)
print(f"SAM Time: {time.time() - start_time:.3f}s")
opt_idx = np.argmax(scores)
mask = masks[opt_idx]
out_image = np.zeros((image.shape[0], image.shape[1], 4), dtype=np.uint8)
out_image[:, :, :3] = image
out_image_bbox = out_image.copy()
out_image[:, :, 3] = mask.astype(np.uint8) * 255
out_image_bbox[:, :, 3] = masks_bbox[-1].astype(np.uint8) * 255 # np.argmax(scores_bbox)
torch.cuda.empty_cache()
return Image.fromarray(out_image_bbox, mode='RGBA') |