Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -13,6 +13,11 @@ For more information on `huggingface_hub` Inference API support, please check th
|
|
13 |
"""
|
14 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
15 |
model = SentenceTransformer('intfloat/multilingual-e5-large-instruct')
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
def get_detailed_instruct(task_description: str, query: str) -> str:
|
18 |
return f'Instruct: {task_description}\nQuery: {query}'
|
@@ -40,13 +45,14 @@ def respond(
|
|
40 |
queries = [
|
41 |
get_detailed_instruct(task, message)
|
42 |
]
|
|
|
43 |
|
44 |
query_embeddings = model.encode(queries, convert_to_tensor=True, normalize_embeddings=True)
|
45 |
scores = (query_embeddings @ encoded_questions.T) * 100
|
46 |
selected_references['similarity'] = scores.tolist()[0]
|
47 |
sorted_references = selected_references.sort_values(by='similarity', ascending=False)
|
48 |
print(sorted_references.shape[0])
|
49 |
-
sorted_references = sorted_references.iloc[:
|
50 |
sorted_references = sorted_references.sort_values(by='similarity', ascending=True)
|
51 |
print(sorted_references.shape[0])
|
52 |
print(sorted_references['similarity'].tolist())
|
@@ -139,11 +145,7 @@ demo = gr.ChatInterface(
|
|
139 |
),
|
140 |
],
|
141 |
cache_examples="lazy",
|
142 |
-
examples=
|
143 |
-
["Why is men created?"],
|
144 |
-
["Please tell me about superstition!"],
|
145 |
-
["How moses defeat pharaoh?"],
|
146 |
-
],
|
147 |
)
|
148 |
|
149 |
if __name__ == "__main__":
|
|
|
13 |
"""
|
14 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
15 |
model = SentenceTransformer('intfloat/multilingual-e5-large-instruct')
|
16 |
+
examples=[
|
17 |
+
["Why is men created?"],
|
18 |
+
["Please tell me about superstition!"],
|
19 |
+
["How moses defeat pharaoh?"],
|
20 |
+
]
|
21 |
|
22 |
def get_detailed_instruct(task_description: str, query: str) -> str:
|
23 |
return f'Instruct: {task_description}\nQuery: {query}'
|
|
|
45 |
queries = [
|
46 |
get_detailed_instruct(task, message)
|
47 |
]
|
48 |
+
examples.append[message]
|
49 |
|
50 |
query_embeddings = model.encode(queries, convert_to_tensor=True, normalize_embeddings=True)
|
51 |
scores = (query_embeddings @ encoded_questions.T) * 100
|
52 |
selected_references['similarity'] = scores.tolist()[0]
|
53 |
sorted_references = selected_references.sort_values(by='similarity', ascending=False)
|
54 |
print(sorted_references.shape[0])
|
55 |
+
sorted_references = sorted_references.iloc[:1]
|
56 |
sorted_references = sorted_references.sort_values(by='similarity', ascending=True)
|
57 |
print(sorted_references.shape[0])
|
58 |
print(sorted_references['similarity'].tolist())
|
|
|
145 |
),
|
146 |
],
|
147 |
cache_examples="lazy",
|
148 |
+
examples=examples,
|
|
|
|
|
|
|
|
|
149 |
)
|
150 |
|
151 |
if __name__ == "__main__":
|