Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -4,12 +4,14 @@ from PIL import Image
|
|
4 |
import base64
|
5 |
from io import BytesIO
|
6 |
import json
|
|
|
7 |
|
8 |
-
from huggingface_hub import InferenceClient
|
9 |
from transformers import AutoProcessor
|
10 |
|
11 |
idefics_processor = AutoProcessor.from_pretrained("HuggingFaceM4/idefics2-8b")
|
12 |
|
|
|
13 |
|
14 |
def process_images_and_text(image_path, query, client):
|
15 |
messages = [
|
@@ -70,7 +72,7 @@ def resize_image(img):
|
|
70 |
return img
|
71 |
|
72 |
|
73 |
-
idefics_client = InferenceClient("
|
74 |
rating_client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
|
75 |
|
76 |
|
|
|
4 |
import base64
|
5 |
from io import BytesIO
|
6 |
import json
|
7 |
+
import os
|
8 |
|
9 |
+
from huggingface_hub import InferenceClient, login
|
10 |
from transformers import AutoProcessor
|
11 |
|
12 |
idefics_processor = AutoProcessor.from_pretrained("HuggingFaceM4/idefics2-8b")
|
13 |
|
14 |
+
login(token=os.getenv("hf_token"))
|
15 |
|
16 |
def process_images_and_text(image_path, query, client):
|
17 |
messages = [
|
|
|
72 |
return img
|
73 |
|
74 |
|
75 |
+
idefics_client = InferenceClient("meta-llama/Llama-3.2-11B-Vision-Instruct")
|
76 |
rating_client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
|
77 |
|
78 |
|