Zimix commited on
Commit
a0a2e08
1 Parent(s): 419cc4a

Update interaction.py

Browse files
Files changed (1) hide show
  1. interaction.py +5 -5
interaction.py CHANGED
@@ -8,19 +8,19 @@ import gradio as gr
8
  from transformers import AutoTokenizer, LlamaForCausalLM
9
  from utils import SteamGenerationMixin
10
 
11
- auth_token = os.getenv("AUTH_TOKEN")
12
- print('^_^ auth_token:',os.getenv("AUTH_TOKEN"),'!!!!!!!!!!')
13
  print('^_^:secret_token',os.getenv("SECRET_TOKEN"),'!!!!!!!!!!')
14
  class MindBot(object):
15
  def __init__(self, model_path, tokenizer_path,if_int8=False):
16
  # self.device = torch.device("cuda")
17
  # device_ids = [1, 2]
18
  if if_int8:
19
- self.model = SteamGenerationMixin.from_pretrained(model_path, device_map='auto', load_in_8bit=True,use_auth_token='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').eval()
20
  else:
21
- self.model = SteamGenerationMixin.from_pretrained(model_path, device_map='auto',use_auth_token='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').half().eval()
22
 
23
- self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_path,use_auth_token='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')
24
  # sp_tokens = {'additional_special_tokens': ['<human>', '<bot>']}
25
  # self.tokenizer.add_special_tokens(sp_tokens)
26
  self.history = []
 
8
  from transformers import AutoTokenizer, LlamaForCausalLM
9
  from utils import SteamGenerationMixin
10
 
11
+ auth_token = os.getenv("Zimix")
12
+ print('^_^ auth_token:',os.getenv("Zimix"),'!!!!!!!!!!')
13
  print('^_^:secret_token',os.getenv("SECRET_TOKEN"),'!!!!!!!!!!')
14
  class MindBot(object):
15
  def __init__(self, model_path, tokenizer_path,if_int8=False):
16
  # self.device = torch.device("cuda")
17
  # device_ids = [1, 2]
18
  if if_int8:
19
+ self.model = SteamGenerationMixin.from_pretrained(model_path, device_map='auto', load_in_8bit=True,use_auth_token=auth_token).eval()
20
  else:
21
+ self.model = SteamGenerationMixin.from_pretrained(model_path, device_map='auto',use_auth_token=auth_token).half().eval()
22
 
23
+ self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_path,use_auth_token=auth_token)
24
  # sp_tokens = {'additional_special_tokens': ['<human>', '<bot>']}
25
  # self.tokenizer.add_special_tokens(sp_tokens)
26
  self.history = []