Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -112,15 +112,11 @@ model = CLIPTextModelWithProjection.from_pretrained("Diangle/clip4clip-webvid")
|
|
112 |
tokenizer = CLIPTokenizer.from_pretrained("Diangle/clip4clip-webvid")
|
113 |
|
114 |
def search(search_sentence):
|
115 |
-
inputs = tokenizer(text=search_sentence , return_tensors="pt"
|
116 |
-
|
117 |
-
|
118 |
-
#
|
119 |
-
|
120 |
-
# final_output = text_embeds[torch.arange(text_embeds.shape[0]), inputs["input_ids"].argmax(dim=-1)]
|
121 |
-
|
122 |
-
# Normalization
|
123 |
-
final_output = outputs[1] / outputs[1].norm(dim=-1, keepdim=True)
|
124 |
sequence_output = final_output.cpu().detach().numpy()
|
125 |
|
126 |
nn_search = NearestNeighbors(n_neighbors=5, metric='binary', rerank_from=100)
|
|
|
112 |
tokenizer = CLIPTokenizer.from_pretrained("Diangle/clip4clip-webvid")
|
113 |
|
114 |
def search(search_sentence):
|
115 |
+
inputs = tokenizer(text=search_sentence , return_tensors="pt")
|
116 |
+
outputs = model(input_ids=inputs["input_ids"], attention_mask=inputs["attention_mask"])
|
117 |
+
|
118 |
+
# Normalizing the embeddings:
|
119 |
+
final_output = outputs[0] / outputs[0].norm(dim=-1, keepdim=True)
|
|
|
|
|
|
|
|
|
120 |
sequence_output = final_output.cpu().detach().numpy()
|
121 |
|
122 |
nn_search = NearestNeighbors(n_neighbors=5, metric='binary', rerank_from=100)
|