Spaces:
Runtime error
Runtime error
File size: 483 Bytes
11c68ac |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
import streamlit as st
from transformers import pipeline
from PIL import Image
import torch
##BLIP
# Create the caption pipeline
initial_caption_pipe = pipeline('image-to-text', model="Salesforce/blip-image-captioning-large")
# Display the image using Streamlit
uploaded_image = st.file_uploader("Upload an image", type=["png", "jpg", "jpeg"])
if uploaded_image is not None:
image= Image.open(uploaded_image)
st.image(image, caption="Uploaded Image", use_column_width=True) |