tykiww's picture
Update services/qa_service/qna.py
6afbc36 verified
raw
history blame
1.79 kB
import json
from services.qa_service.utils import format_prompt
class QAService:
def __init__(self, conf, pinecone, model_pipeline, question, goals):
self.conf = conf
self.pc = pinecone['connection']
self.pc_index = self.pc.Index(self.conf['embeddings']['index_name'])
self.embedder = pinecone['embedder']
self.model_pipeline = model_pipeline
self.question = question
self.goals = goals
def __enter__(self):
print("Start Q&A Service")
return self
def __exit__(self, exc_type, exc_val, exc_tb):
print("Exiting Q&A Service")
def parse_results(self, result):
parsed = []
for i in result['matches']:
collect = i['metadata']['_node_content']
content = json.loads(collect)
parsed.append({
"speakers": content["speakers"],
"text": content["text"]
})
return parsed
def retrieve_context(self):
"""Pass embedded question into pinecone"""
embedded_query = self.embedder.get_text_embedding(self.question)
result = self.pc_index.query(
vector=embedded_query,
top_k=5,
include_values=False,
include_metadata=True
)
#output = self.parse_results(result)
output = result
return output
def run(self):
"""Query pinecone outputs and infer results"""
full_context = self.retrieve_context()
context = [i["text"] for i in full_context]
prompt = format_prompt(self.question, context)
output = self.model_pipeline.infer(prompt)
return self.question, full_context, # output, context