|
import openai |
|
import os |
|
import gradio as gr |
|
from langchain.document_loaders import DirectoryLoader, TextLoader, UnstructuredFileLoader |
|
from langchain.embeddings.openai import OpenAIEmbeddings |
|
from langchain.vectorstores import Chroma |
|
from langchain.chains import RetrievalQA |
|
from langchain.chat_models import ChatOpenAI |
|
|
|
os.environ["OPENAI_API_KEY"] = "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" |
|
|
|
def question_document(Document, Question): |
|
|
|
|
|
if not Document.name.endswith('.txt'): |
|
return ("Le document doit être un fichier texte (.txt)") |
|
|
|
loader = TextLoader(Document.name, encoding = "ISO-8859-1") |
|
|
|
|
|
txt_docs = loader.load_and_split() |
|
|
|
|
|
embeddings = OpenAIEmbeddings() |
|
|
|
txt_docsearch = Chroma.from_documents(txt_docs, embeddings) |
|
|
|
|
|
llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0.3) |
|
|
|
|
|
qa_txt = RetrievalQA.from_chain_type(llm=llm, |
|
chain_type="map_reduce", |
|
retriever=txt_docsearch.as_retriever() |
|
) |
|
|
|
answer = qa_txt.run(Question) |
|
return answer |
|
|
|
|
|
|
|
iface = gr.Interface( |
|
fn = question_document, |
|
inputs= ["file","text"], |
|
outputs = gr.outputs.Textbox(label="Réponse"), |
|
title="Long Text Questioner", |
|
description="par Nicolas \nPermet d'interroger un document texte", |
|
allow_flagging = "never") |
|
|
|
iface.launch() |