Spaces:
Sleeping
Sleeping
LVKinyanjui
commited on
Commit
•
c336e96
1
Parent(s):
6dcc394
Updated requirements and hf model
Browse files- app.py +7 -4
- examples/llama3.py +1 -1
- requirements.txt +1 -0
app.py
CHANGED
@@ -43,8 +43,11 @@ if file is not None:
|
|
43 |
include=["documents", ]
|
44 |
)
|
45 |
|
46 |
-
query_text = [" ".join([str(element) for element in inner_list])
|
47 |
-
for inner_list in query_results["documents"]][0]
|
48 |
-
|
49 |
st.write("Database Query Matches")
|
50 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
include=["documents", ]
|
44 |
)
|
45 |
|
|
|
|
|
|
|
46 |
st.write("Database Query Matches")
|
47 |
+
query_results
|
48 |
+
|
49 |
+
# query_text = [" ".join([str(element) for element in inner_list])
|
50 |
+
# for inner_list in query_results["documents"]][0]
|
51 |
+
|
52 |
+
# st.write("Database Query Matches")
|
53 |
+
# st.markdown(query_text)
|
examples/llama3.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import transformers
|
2 |
import torch
|
3 |
|
4 |
-
model_id = "meta-llama/Meta-Llama-3-8B"
|
5 |
|
6 |
pipeline = transformers.pipeline(
|
7 |
"text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device_map="auto"
|
|
|
1 |
import transformers
|
2 |
import torch
|
3 |
|
4 |
+
model_id = "meta-llama/Meta-Llama-3.1-8B"
|
5 |
|
6 |
pipeline = transformers.pipeline(
|
7 |
"text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device_map="auto"
|
requirements.txt
CHANGED
@@ -1,3 +1,4 @@
|
|
1 |
chromadb==0.5.5
|
2 |
pymupdf==1.24.9
|
3 |
streamlit==1.38.0
|
|
|
|
1 |
chromadb==0.5.5
|
2 |
pymupdf==1.24.9
|
3 |
streamlit==1.38.0
|
4 |
+
transformers==4.44.2
|