hh2017 commited on
Commit
2f2269a
1 Parent(s): 6765249

correct model name, and add use_safetensors=False in from_quanted

Browse files
Files changed (1) hide show
  1. web_quant.py +2 -2
web_quant.py CHANGED
@@ -9,7 +9,7 @@ import gradio as gr
9
  from threading import Thread
10
 
11
  def load_model(model_name):
12
- model = AutoGPTQForCausalLM.from_quantized(model_name, device_map="auto")
13
  tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side="right", use_fast=False)
14
  return model, tokenizer
15
 
@@ -108,6 +108,6 @@ def main(args):
108
 
109
  if __name__ == '__main__':
110
  parser = argparse.ArgumentParser()
111
- parser.add_argument("--model-name", type=str, default="FreedomIntelligence/AceGPT-7B-chat-GPTQ")
112
  args = parser.parse_args()
113
  main(args)
 
9
  from threading import Thread
10
 
11
  def load_model(model_name):
12
+ model = AutoGPTQForCausalLM.from_quantized(model_name, device_map="auto", use_safetensors=False)
13
  tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side="right", use_fast=False)
14
  return model, tokenizer
15
 
 
108
 
109
  if __name__ == '__main__':
110
  parser = argparse.ArgumentParser()
111
+ parser.add_argument("--model-name", type=str, default="FreedomIntelligence/AceGPT-7b-chat-GPTQ")
112
  args = parser.parse_args()
113
  main(args)