import gradio as gr from langchain.chat_models import ChatOpenAI from langchain.llms import OpenAI from langchain.embeddings import OpenAIEmbeddings from langchain.vectorstores import Chroma from langchain.memory import ConversationSummaryMemory from langchain.chains import ConversationalRetrievalChain from dotenv import load_dotenv, find_dotenv _ = load_dotenv(find_dotenv()) # read local .env file # Read VectorStore vectorstore = Chroma( embedding_function=OpenAIEmbeddings(), persist_directory="chroma_db" ) # Memory llm_memory = OpenAI(temperature=0.0) memory = ConversationSummaryMemory( llm=llm_memory, memory_key="chat_history", return_messages=True ) # Chabot QA llm_qa = ChatOpenAI(temperature=0.0) retriever = vectorstore.as_retriever() qa = ConversationalRetrievalChain.from_llm( llm_qa, retriever=retriever, memory=memory ) def chatbot(message, history): response = qa(message) return response["answer"] iface = gr.ChatInterface( fn=chatbot, title="🤖 AI Bot for Help Center in CCC.uno 📞", examples = [ "Quién eres?", "Qué es Press 3?", "Cómo puedo usar Secure Screen?", "Cómo puedo obtener reportes de IVR?", "Por que se queda procesando en Take Calls?", ], description="A chatbot for CCC.uno Support
Ask whatever you want related to CCC.uno
Knowledge base from: https://help.ccc.uno/es/" ) if __name__ == "__main__": iface.launch()