File size: 1,903 Bytes
fe60fa2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import streamlit as st
from langchain_community.document_loaders import TextLoader
from langchain_openai import AzureOpenAIEmbeddings
from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter
from langchain_community.vectorstores import FAISS
from langchain.docstore.document import Document

import openai
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
import tiktoken

import os

from dotenv import load_dotenv
load_dotenv()
if not os.environ.get("OPENAI_API_KEY"):
    raise Exception("No OpenAI Key detected")

embeddings = OpenAIEmbeddings(deployment="textembedding", chunk_size = 16, api_key = os.environ["OPENAI_API_KEY"])
index_name = "SCLC"
store = FAISS.load_local(index_name, embeddings)
llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0)

from langchain.prompts.few_shot import FewShotPromptTemplate
from langchain.prompts.prompt import PromptTemplate
from operator import itemgetter
from langchain.schema import StrOutputParser
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough

TEMPLATE = """You are a chatbot.
Here is the context:
{context}
----------------------------------------------------------------
You are to reply the following question, with reference to the above context.
Question:
{question}
----------------------------------------------------------------
Your reply:
"""

prompt = PromptTemplate(
    input_variables = ["question", "context"],
    template = TEMPLATE
)
retriever = store.as_retriever(search_type="similarity", search_kwargs={"k":2})
def format_docs(docs):
    return "\n--------------------\n".join(doc.page_content for doc in docs)

chain = ({"context": retriever | format_docs, "question": RunnablePassthrough()} | 
    prompt | 
    llm | 
    StrOutputParser()
)


st.title("test")

t = st.text_input("Input")
st.write(chain.invoke(t))