loubnabnl HF staff commited on
Commit
371a716
1 Parent(s): 3746488

use 1B checkpoint instead of 6B

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -9,8 +9,8 @@ example = [
9
  ["def count_words(filename):", 40, 0.6, 42],
10
  ["def print_hello_world():", 8, 0.6, 42],
11
  ["def get_file_size(filepath):", 22, 0.6, 42]]
12
- tokenizer = AutoTokenizer.from_pretrained("facebook/incoder-6B")
13
- model = AutoModelForCausalLM.from_pretrained("facebook/incoder-6B", low_cpu_mem_usage=True)
14
 
15
 
16
  MAX_LENGTH = 2048
 
9
  ["def count_words(filename):", 40, 0.6, 42],
10
  ["def print_hello_world():", 8, 0.6, 42],
11
  ["def get_file_size(filepath):", 22, 0.6, 42]]
12
+ tokenizer = AutoTokenizer.from_pretrained("facebook/incoder-1B")
13
+ model = AutoModelForCausalLM.from_pretrained("facebook/incoder-1B", low_cpu_mem_usage=True)
14
 
15
 
16
  MAX_LENGTH = 2048