LVKinyanjui commited on
Commit
8cb18c2
1 Parent(s): 67138c2

Updated login procedure to hopefully make it work

Browse files
Files changed (2) hide show
  1. app_inference.py +3 -3
  2. examples/tests/hf_login.py +2 -2
app_inference.py CHANGED
@@ -3,7 +3,7 @@ import streamlit as st
3
  import transformers, torch
4
  import json, os
5
 
6
- from huggingface_hub import HfApi
7
 
8
  # CONSTANTS
9
  MAX_NEW_TOKENS = 256
@@ -18,8 +18,8 @@ submit = st.button("Submit")
18
 
19
  # MODEL AREA
20
  # Use the token to authenticate
21
- token = os.environ.get("HF_TOKEN_READ")
22
- api = HfApi(token=token)
23
  model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
24
 
25
  @st.cache_resource
 
3
  import transformers, torch
4
  import json, os
5
 
6
+ from huggingface_hub import login
7
 
8
  # CONSTANTS
9
  MAX_NEW_TOKENS = 256
 
18
 
19
  # MODEL AREA
20
  # Use the token to authenticate
21
+ token = os.getenv("HF_TOKEN_READ")
22
+ api = login(token=token)
23
  model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
24
 
25
  @st.cache_resource
examples/tests/hf_login.py CHANGED
@@ -1,4 +1,4 @@
1
- # from huggingface_hub import login
2
  # login()
3
 
4
  import transformers, torch
@@ -12,7 +12,7 @@ token = os.environ.get("HF_TOKEN_READ")
12
  print(token)
13
 
14
  # Use the token to authenticate
15
- api = HfApi(token=token)
16
  model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
17
 
18
  pipeline = transformers.pipeline(
 
1
+ from huggingface_hub import login
2
  # login()
3
 
4
  import transformers, torch
 
12
  print(token)
13
 
14
  # Use the token to authenticate
15
+ api = login(token=token)
16
  model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
17
 
18
  pipeline = transformers.pipeline(