Spaces:
Runtime error
Runtime error
Update for ZeroGPU
Browse files- README.md +1 -1
- app.py +3 -0
- requirements.txt +2 -2
README.md
CHANGED
@@ -5,7 +5,7 @@ colorFrom: blue
|
|
5 |
colorTo: pink
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.44.4
|
8 |
-
python_version: 3.10.
|
9 |
app_file: app.py
|
10 |
pinned: false
|
11 |
license: bsd-3-clause
|
|
|
5 |
colorTo: pink
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.44.4
|
8 |
+
python_version: 3.10.11
|
9 |
app_file: app.py
|
10 |
pinned: false
|
11 |
license: bsd-3-clause
|
app.py
CHANGED
@@ -7,6 +7,7 @@ import string
|
|
7 |
|
8 |
import gradio as gr
|
9 |
import PIL.Image
|
|
|
10 |
import torch
|
11 |
from transformers import AutoProcessor, Blip2ForConditionalGeneration
|
12 |
|
@@ -29,6 +30,7 @@ else:
|
|
29 |
model = None
|
30 |
|
31 |
|
|
|
32 |
def generate_caption(
|
33 |
image: PIL.Image.Image,
|
34 |
decoding_method: str,
|
@@ -52,6 +54,7 @@ def generate_caption(
|
|
52 |
return result
|
53 |
|
54 |
|
|
|
55 |
def answer_question(
|
56 |
image: PIL.Image.Image,
|
57 |
text: str,
|
|
|
7 |
|
8 |
import gradio as gr
|
9 |
import PIL.Image
|
10 |
+
import spaces
|
11 |
import torch
|
12 |
from transformers import AutoProcessor, Blip2ForConditionalGeneration
|
13 |
|
|
|
30 |
model = None
|
31 |
|
32 |
|
33 |
+
@spaces.GPU
|
34 |
def generate_caption(
|
35 |
image: PIL.Image.Image,
|
36 |
decoding_method: str,
|
|
|
54 |
return result
|
55 |
|
56 |
|
57 |
+
@spaces.GPU
|
58 |
def answer_question(
|
59 |
image: PIL.Image.Image,
|
60 |
text: str,
|
requirements.txt
CHANGED
@@ -4,6 +4,6 @@ gradio==3.44.4
|
|
4 |
huggingface-hub==0.17.2
|
5 |
Pillow==10.0.1
|
6 |
scipy==1.11.2
|
7 |
-
torch==2.0.
|
8 |
-
torchvision==0.15.
|
9 |
transformers==4.33.2
|
|
|
4 |
huggingface-hub==0.17.2
|
5 |
Pillow==10.0.1
|
6 |
scipy==1.11.2
|
7 |
+
torch==2.0.0
|
8 |
+
torchvision==0.15.1
|
9 |
transformers==4.33.2
|