LVKinyanjui commited on
Commit
67138c2
β€’
1 Parent(s): ec455f6

Attempted to add hf hub login for gated repo; cleaned up house

Browse files
app_inference.py CHANGED
@@ -3,6 +3,8 @@ import streamlit as st
3
  import transformers, torch
4
  import json, os
5
 
 
 
6
  # CONSTANTS
7
  MAX_NEW_TOKENS = 256
8
  SYSTEM_MESSAGE = "You are a hepful, knowledgeable assistant"
@@ -15,6 +17,9 @@ text_input = st.text_input("Query", value="Why is the sky Blue")
15
  submit = st.button("Submit")
16
 
17
  # MODEL AREA
 
 
 
18
  model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
19
 
20
  @st.cache_resource
 
3
  import transformers, torch
4
  import json, os
5
 
6
+ from huggingface_hub import HfApi
7
+
8
  # CONSTANTS
9
  MAX_NEW_TOKENS = 256
10
  SYSTEM_MESSAGE = "You are a hepful, knowledgeable assistant"
 
17
  submit = st.button("Submit")
18
 
19
  # MODEL AREA
20
+ # Use the token to authenticate
21
+ token = os.environ.get("HF_TOKEN_READ")
22
+ api = HfApi(token=token)
23
  model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
24
 
25
  @st.cache_resource
examples/{Contextual_RAG.ipynb β†’ techniques/Contextual_RAG.ipynb} RENAMED
File without changes
examples/{OLMoE_1B_7B.ipynb β†’ techniques/OLMoE_1B_7B.ipynb} RENAMED
File without changes
examples/{RAPTOR.ipynb β†’ techniques/RAPTOR.ipynb} RENAMED
File without changes
examples/{RAPTOR_llama_index.ipynb β†’ techniques/RAPTOR_llama_index.ipynb} RENAMED
File without changes
examples/{Zamba_2_1_2B.ipynb β†’ techniques/Zamba_2_1_2B.ipynb} RENAMED
File without changes
examples/{chromadb_.py β†’ techniques/chromadb_.py} RENAMED
File without changes
examples/{llama3.py β†’ techniques/llama3.py} RENAMED
File without changes
examples/{phi3.py β†’ techniques/phi3.py} RENAMED
File without changes
examples/{pymupdf_loader.py β†’ techniques/pymupdf_loader.py} RENAMED
File without changes
examples/{upsert_RAPTOR.ipynb β†’ techniques/upsert_RAPTOR.ipynb} RENAMED
File without changes
examples/tests/hf_login.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # from huggingface_hub import login
2
+ # login()
3
+
4
+ import transformers, torch
5
+
6
+ import os
7
+ from huggingface_hub import HfApi
8
+
9
+ # Get the token from the environment variable
10
+ token = os.environ.get("HF_TOKEN_READ")
11
+
12
+ print(token)
13
+
14
+ # Use the token to authenticate
15
+ api = HfApi(token=token)
16
+ model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
17
+
18
+ pipeline = transformers.pipeline(
19
+ "text-generation",
20
+ model=model_id,
21
+ model_kwargs={"torch_dtype": torch.bfloat16},
22
+ device_map="auto",
23
+ )
requirements.txt CHANGED
@@ -9,4 +9,5 @@ langchain-google-genai==2.0.0
9
  langchain-community==0.3.0
10
  python-dotenv==1.0.1
11
  tiktoken==0.7.0
12
- torch
 
 
9
  langchain-community==0.3.0
10
  python-dotenv==1.0.1
11
  tiktoken==0.7.0
12
+ torch
13
+ huggingface-hub==0.25.1