Spaces:
Sleeping
Sleeping
import gradio as gr | |
from langchain.chat_models import ChatOpenAI | |
from langchain.llms import OpenAI | |
from langchain.embeddings import OpenAIEmbeddings | |
from langchain.vectorstores import Chroma | |
from langchain.memory import ConversationSummaryMemory | |
from langchain.chains import ConversationalRetrievalChain | |
from dotenv import load_dotenv, find_dotenv | |
_ = load_dotenv(find_dotenv()) # read local .env file | |
# Read VectorStore | |
vectorstore = Chroma( | |
embedding_function=OpenAIEmbeddings(), | |
persist_directory="chroma_db" | |
) | |
# Memory | |
llm_memory = OpenAI(temperature=0.0) | |
memory = ConversationSummaryMemory( | |
llm=llm_memory, memory_key="chat_history", return_messages=True | |
) | |
# Chabot QA | |
llm_qa = ChatOpenAI(temperature=0.0) | |
retriever = vectorstore.as_retriever() | |
qa = ConversationalRetrievalChain.from_llm( | |
llm_qa, | |
retriever=retriever, | |
memory=memory | |
) | |
def chatbot(message, history): | |
response = qa(message) | |
return response["answer"] | |
iface = gr.ChatInterface( | |
fn=chatbot, | |
title="馃 AI Bot for Help Center in CCC.uno 馃摓", | |
examples = [ | |
"Qui茅n eres?", | |
"Qu茅 es Press 3?", | |
"C贸mo puedo usar Secure Screen?", | |
"C贸mo puedo obtener reportes de IVR?", | |
"Por que se queda procesando en Take Calls?", | |
], | |
description="A chatbot for CCC.uno Support <br> Ask whatever you want related to CCC.uno <br> Knowledge base from: https://help.ccc.uno/es/" | |
) | |
if __name__ == "__main__": | |
iface.launch() | |