replace streamlit experimental functions
Browse files
app.py
CHANGED
@@ -29,7 +29,7 @@ st.markdown(f"#### Have a conversation with {BOOK_NAME} by {AUTHOR_NAME} π")
|
|
29 |
|
30 |
|
31 |
##### functionss ####
|
32 |
-
@st.
|
33 |
def load_vectorstore():
|
34 |
# download from hugging face
|
35 |
cache_dir=f"{BOOK_NAME}_cache"
|
@@ -62,7 +62,7 @@ def load_vectorstore():
|
|
62 |
return docsearch
|
63 |
|
64 |
|
65 |
-
@st.
|
66 |
def load_prompt(book_name, author_name):
|
67 |
prompt_template = f"""You're an AI version of {AUTHOR_NAME}'s book '{BOOK_NAME}' and are supposed to answer quesions people have for the book. Thanks to advancements in AI people can now talk directly to books.
|
68 |
People have a lot of questions after reading {BOOK_NAME}, you are here to answer them as you think the author {AUTHOR_NAME} would, using context from the book.
|
@@ -83,7 +83,7 @@ def load_prompt(book_name, author_name):
|
|
83 |
return PROMPT
|
84 |
|
85 |
|
86 |
-
@st.
|
87 |
def load_chain():
|
88 |
llm = OpenAI(temperature=0.2)
|
89 |
|
|
|
29 |
|
30 |
|
31 |
##### functionss ####
|
32 |
+
@st.cache_resource(show_spinner=False)
|
33 |
def load_vectorstore():
|
34 |
# download from hugging face
|
35 |
cache_dir=f"{BOOK_NAME}_cache"
|
|
|
62 |
return docsearch
|
63 |
|
64 |
|
65 |
+
@st.cache_data(show_spinner=False)
|
66 |
def load_prompt(book_name, author_name):
|
67 |
prompt_template = f"""You're an AI version of {AUTHOR_NAME}'s book '{BOOK_NAME}' and are supposed to answer quesions people have for the book. Thanks to advancements in AI people can now talk directly to books.
|
68 |
People have a lot of questions after reading {BOOK_NAME}, you are here to answer them as you think the author {AUTHOR_NAME} would, using context from the book.
|
|
|
83 |
return PROMPT
|
84 |
|
85 |
|
86 |
+
@st.cache_resource(show_spinner=False)
|
87 |
def load_chain():
|
88 |
llm = OpenAI(temperature=0.2)
|
89 |
|