File size: 3,230 Bytes
2693b38
b8436d4
2693b38
b8436d4
caab17b
fdad2a7
 
 
 
d7e9af9
fdad2a7
 
 
d7e9af9
fdad2a7
b8436d4
 
caab17b
b8436d4
caab17b
b8436d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d7e9af9
4c0a3b4
b8436d4
d7e9af9
b8436d4
 
6fc7565
b8436d4
 
 
d5f5ad4
b8436d4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import gradio as gr
from transformers import AutoProcessor, BlipForConditionalGeneration

import torch

torch.hub.download_url_to_file('https://static01.nytimes.com/newsgraphics/2023-06-08-disinfo-ai-detector/7343d4ca746b7965141e230d94dd4f5f564f2bfb/_assets/elon.jpg', 'elon.png')
torch.hub.download_url_to_file('https://static01.nytimes.com/newsgraphics/2023-06-08-disinfo-ai-detector/7343d4ca746b7965141e230d94dd4f5f564f2bfb/_assets/pentagon.jpg', 'pentagon.jpg')
torch.hub.download_url_to_file('https://static01.nytimes.com/newsgraphics/2023-06-08-disinfo-ai-detector/7343d4ca746b7965141e230d94dd4f5f564f2bfb/_assets/horns.jpg', 'horns.jpg')
torch.hub.download_url_to_file('https://static01.nytimes.com/newsgraphics/2023-06-08-disinfo-ai-detector/7343d4ca746b7965141e230d94dd4f5f564f2bfb/_assets/waves.png', 'waves.jpg')
torch.hub.download_url_to_file('https://static01.nytimes.com/newsgraphics/2023-06-08-disinfo-ai-detector/7343d4ca746b7965141e230d94dd4f5f564f2bfb/_assets/radcliffe.jpg', 'radcliffe.jpg')
torch.hub.download_url_to_file('https://static01.nytimes.com/newsgraphics/2023-06-08-disinfo-ai-detector/7343d4ca746b7965141e230d94dd4f5f564f2bfb/_assets/australia.jpg', 'australia.jpg')
torch.hub.download_url_to_file('https://static01.nytimes.com/newsgraphics/2023-06-08-disinfo-ai-detector/7343d4ca746b7965141e230d94dd4f5f564f2bfb/_assets/yeti.jpeg', 'yeti.jpg')
torch.hub.download_url_to_file('https://static01.nytimes.com/newsgraphics/2023-06-08-disinfo-ai-detector/7343d4ca746b7965141e230d94dd4f5f564f2bfb/_assets/pollock.jpg', 'pollock.jpg')
torch.hub.download_url_to_file('https://static01.nytimes.com/newsgraphics/2023-06-08-disinfo-ai-detector/7343d4ca746b7965141e230d94dd4f5f564f2bfb/_assets/man.png', 'man.png')

blip_processor_large = AutoProcessor.from_pretrained("umm-maybe/image-generator-identifier")
blip_model_large = BlipForConditionalGeneration.from_pretrained("umm-maybe/image-generator-identifier")

device = "cuda" if torch.cuda.is_available() else "cpu"

blip_model_large.to(device)

def generate_caption(processor, model, image):
    inputs = processor(images=image, return_tensors="pt").to(device)
    
    generated_ids = model.generate(pixel_values=inputs.pixel_values, max_length=50)

    generated_caption = processor.batch_decode(generated_ids, skip_special_tokens=True)[0]
   
    return generated_caption


def generate_captions(image):

    caption_blip_large = generate_caption(blip_processor_large, blip_model_large, image)

    return caption_blip_large


   
examples = [["elon.jpg"], ["pentagon.jpg"], ["horns.jpg"], ["waves.jpg"], ["radcliffe.jpg"], ["australia.jpg"], ["yeti.jpg"], ["pollock.jpg"], ["man.png"]]

title = "Generator Identification via Image Captioning"
description = "Gradio Demo to illustrate the use of a fine-tuned BLIP image captioning model to identify synthetic images.  To use it, simply upload your image and click 'submit', or click one of the examples to load them."

interface = gr.Interface(fn=generate_captions, 
                         inputs="image",
                         outputs="textbox",
                         examples=examples, 
                         title=title,
                         description=description)
interface.launch()