File size: 2,672 Bytes
c1d7a66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90bb9b0
 
 
 
 
c1d7a66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cf22d4f
c1d7a66
 
 
cf22d4f
95445f2
c1d7a66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import json
import os
import pathlib
import sys
import time
from typing import Any, Dict, List

import pinecone  # cloud-hosted vector database for context retrieval
# for vector search
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores import Pinecone

from dotenv import load_dotenv

from PIL import Image
from transformers import (AutoModelForSequenceClassification, AutoTokenizer, GPT2Tokenizer, OPTForCausalLM, T5ForConditionalGeneration)

PINECONE_API_KEY = os.environ.get("PINECONE_API")
#
# from huggingface_hub import HfApi, SpaceHardware

#api = HfApi(token=PINECONE_API_KEY)

class Retrieval:
    def __init__(self,
               device='cuda',
               use_clip=True):
        
        self.user_question = ''
        self.max_text_length = None
        self.pinecone_index_name = 'uiuc-chatbot'  # uiuc-chatbot-v2
        self.use_clip = use_clip

        # init parameters
        self.device = device
        self.num_answers_generated = 3

        self.vectorstore = None
    
    def _load_pinecone_vectorstore(self,):
        model_name = "intfloat/e5-large"  # best text embedding model. 1024 dims.
        
        embeddings = HuggingFaceEmbeddings(model_name=model_name)
        #pinecone.init(api_key=os.environ['PINECONE_API_KEY'], environment="us-west1-gcp")
        pinecone.init(api_key=PINECONE_API_KEY, environment="us-west1-gcp")
        pincecone_index = pinecone.Index("uiuc-chatbot")
        
        self.vectorstore = Pinecone(index=pincecone_index, embedding_function=embeddings.embed_query, text_key="text")


    def retrieve_contexts_from_pinecone(self, user_question: str, topk: int = None) -> List[Any]:
        ''' 
        Invoke Pinecone for vector search. These vector databases are created in the notebook `data_formatting_patel.ipynb` and `data_formatting_student_notes.ipynb`.
        Returns a list of LangChain Documents. They have properties: `doc.page_content`: str, doc.metadata['page_number']: int, doc.metadata['textbook_name']: str.
        '''
        print("USER QUESTION: ", user_question)
        print("TOPK: ", topk)


        if topk is None:
            topk = self.num_answers_generated

        # similarity search
        top_context_list = self.vectorstore.similarity_search(user_question, k=topk)

        # add the source info to the bottom of the context.
        top_context_metadata = [f"Source: page {doc.metadata['page_number']} in {doc.metadata['textbook_name']}" for doc in top_context_list]
        relevant_context_list = [f"{text.page_content}. {meta}" for text, meta in zip(top_context_list, top_context_metadata)]
        return relevant_context_list