Spaces:
Sleeping
Sleeping
tangjicheng123
commited on
Commit
•
0f497f0
1
Parent(s):
7725d56
feat: clean code
Browse files
app.py
CHANGED
@@ -45,11 +45,12 @@ def inference_image_caption(raw_image):
|
|
45 |
caption = model.generate(image, sample=True, top_p=0.9, max_length=20, min_length=5)
|
46 |
return caption[0]
|
47 |
|
48 |
-
inputs = gr.Image(type='pil')
|
49 |
outputs = gr.outputs.Textbox(label="Output")
|
50 |
|
51 |
title = "BLIP"
|
52 |
|
53 |
description = "Gradio demo for BLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation"
|
54 |
|
55 |
-
gr.Interface(inference_image_caption, inputs, outputs, title=title, description=description, examples=[['starrynight.jpeg',]])
|
|
|
|
45 |
caption = model.generate(image, sample=True, top_p=0.9, max_length=20, min_length=5)
|
46 |
return caption[0]
|
47 |
|
48 |
+
inputs = gr.Image(type='pil', label="Input")
|
49 |
outputs = gr.outputs.Textbox(label="Output")
|
50 |
|
51 |
title = "BLIP"
|
52 |
|
53 |
description = "Gradio demo for BLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation"
|
54 |
|
55 |
+
app = gr.Interface(inference_image_caption, inputs, outputs, title=title, description=description, examples=[['starrynight.jpeg',]])
|
56 |
+
app.launch(enable_queue=True, share=True)
|