srivatsavdamaraju's picture
Update app.py
fa725d5 verified
raw
history blame
1.08 kB
import gradio as gr
import torch
from PIL import Image
from transformers import AutoModel, AutoTokenizer
# Load the model and tokenizer from the local path
model = AutoModel.from_pretrained('minicpm/models', trust_remote_code=True)
tokenizer = AutoTokenizer.from_pretrained('minicpm/models', trust_remote_code=True)
# Set the model to evaluation mode
model.eval()
def predict(image, question):
# Preprocess the image
image = image.convert('RGB')
# Create the message list
msgs = [{'role': 'user', 'content': question}]
# Generate a response
res = model.chat(
image=image,
msgs=msgs,
tokenizer=tokenizer,
sampling=True,
temperature=0.1
)
return res
# Create the Gradio interface
iface = gr.Interface(
fn=predict,
inputs=[
gr.inputs.Image(type="pil", label="Upload an Image"),
gr.inputs.Textbox(label="Ask a Question")
],
outputs="text",
title="Image Question Answering",
description="Upload an image and ask a question about it."
)
# Launch the app
iface.launch()