valhalla commited on
Commit
d2fde4c
1 Parent(s): a7a3d34

add flax model

Browse files
Files changed (3) hide show
  1. .gitattributes +1 -0
  2. config.json +6 -0
  3. flax_model.msgpack +3 -0
.gitattributes CHANGED
@@ -6,3 +6,4 @@
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
 
 
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
9
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -10,6 +10,7 @@
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
 
13
  "d_model": 1024,
14
  "decoder_attention_heads": 16,
15
  "decoder_ffn_dim": 4096,
@@ -25,6 +26,9 @@
25
  "eos_token_id": 2,
26
  "extra_pos_embeddings": 2,
27
  "force_bos_token_to_be_generated": true,
 
 
 
28
  "id2label": {
29
  "0": "LABEL_0",
30
  "1": "LABEL_1",
@@ -65,5 +69,7 @@
65
  "num_beams": 4
66
  }
67
  },
 
 
68
  "vocab_size": 50264
69
  }
 
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
13
+ "classifier_dropout": 0.0,
14
  "d_model": 1024,
15
  "decoder_attention_heads": 16,
16
  "decoder_ffn_dim": 4096,
 
26
  "eos_token_id": 2,
27
  "extra_pos_embeddings": 2,
28
  "force_bos_token_to_be_generated": true,
29
+ "forced_bos_token_id": 0,
30
+ "forced_eos_token_id": 2,
31
+ "gradient_checkpointing": false,
32
  "id2label": {
33
  "0": "LABEL_0",
34
  "1": "LABEL_1",
 
69
  "num_beams": 4
70
  }
71
  },
72
+ "transformers_version": "4.7.0.dev0",
73
+ "use_cache": true,
74
  "vocab_size": 50264
75
  }
flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e21ed77f98ac394a17a832a9da673334febf3c1acf0d8eee887be8eb250c30d5
3
+ size 459975830