Spaces:
Sleeping
Sleeping
File size: 1,490 Bytes
49654de 0139af7 49654de |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
import gradio as gr
from langchain.chat_models import ChatOpenAI
from langchain.llms import OpenAI
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import Chroma
from langchain.memory import ConversationSummaryMemory
from langchain.chains import ConversationalRetrievalChain
from dotenv import load_dotenv, find_dotenv
_ = load_dotenv(find_dotenv()) # read local .env file
# Read VectorStore
vectorstore = Chroma(
embedding_function=OpenAIEmbeddings(),
persist_directory="chroma_db"
)
# Memory
llm_memory = OpenAI(temperature=0.0)
memory = ConversationSummaryMemory(
llm=llm_memory, memory_key="chat_history", return_messages=True
)
# Chabot QA
llm_qa = ChatOpenAI(temperature=0.0)
retriever = vectorstore.as_retriever()
qa = ConversationalRetrievalChain.from_llm(
llm_qa,
retriever=retriever,
memory=memory
)
def chatbot(message, history):
response = qa(message)
return response["answer"]
iface = gr.ChatInterface(
fn=chatbot,
title="🤖 AI Bot for Help Center in CCC.uno 📞",
examples = [
"Quién eres?",
"Qué es Press 3?",
"Cómo puedo usar Secure Screen?",
"Cómo puedo obtener reportes de IVR?",
"Por que se queda procesando en Take Calls?",
],
description="A chatbot for CCC.uno Support <br> Ask whatever you want related to CCC.uno <br> Knowledge base from: https://help.ccc.uno/es/"
)
if __name__ == "__main__":
iface.launch()
|