Spaces:
Runtime error
Runtime error
NicolasGaudemet
commited on
Commit
•
9ff205d
1
Parent(s):
2ca7ea7
Update summarizer_app.py
Browse files- summarizer_app.py +5 -3
summarizer_app.py
CHANGED
@@ -8,15 +8,17 @@ from langchain.docstore.document import Document
|
|
8 |
from langchain.chains.summarize import load_summarize_chain
|
9 |
import gradio as gr
|
10 |
|
11 |
-
#définition du LLM
|
12 |
-
llm = OpenAI(model = "text-davinci-003", max_token = 1000, temperature=0, openai_api_key = os.environ['OpenaiKey'])
|
13 |
-
|
14 |
#chargement des paramètres
|
15 |
with open("parametres.json", "r") as p:
|
16 |
params = json.load(p)
|
17 |
taille_max = params["taille_max"]
|
|
|
|
|
18 |
chunks_max = taille_max//4000
|
19 |
|
|
|
|
|
|
|
20 |
#résumé d'un texte
|
21 |
|
22 |
def summarize_text(text_to_summarize, llm):
|
|
|
8 |
from langchain.chains.summarize import load_summarize_chain
|
9 |
import gradio as gr
|
10 |
|
|
|
|
|
|
|
11 |
#chargement des paramètres
|
12 |
with open("parametres.json", "r") as p:
|
13 |
params = json.load(p)
|
14 |
taille_max = params["taille_max"]
|
15 |
+
modele = params["modele"]
|
16 |
+
summary_length = params["summary_length"]
|
17 |
chunks_max = taille_max//4000
|
18 |
|
19 |
+
#définition du LLM
|
20 |
+
llm = OpenAI(model_name = modele, max_tokens = summary_length, temperature=0, openai_api_key = os.environ['OpenaiKey'])
|
21 |
+
|
22 |
#résumé d'un texte
|
23 |
|
24 |
def summarize_text(text_to_summarize, llm):
|