ybelkada commited on
Commit
6aa5e38
1 Parent(s): 2d879a0

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -25,7 +25,7 @@
25
  "dense_act_fn": "gelu_new",
26
  "diversity_penalty": 0.0,
27
  "do_sample": false,
28
- "dropout_rate": 0.1,
29
  "early_stopping": false,
30
  "encoder_hidden_size": 768,
31
  "encoder_no_repeat_ngram_size": 0,
@@ -95,7 +95,7 @@
95
  "_name_or_path": "",
96
  "add_cross_attention": false,
97
  "architectures": null,
98
- "attention_dropout": 0.0,
99
  "bad_words_ids": null,
100
  "begin_suppress_tokens": null,
101
  "bos_token_id": null,
@@ -107,7 +107,7 @@
107
  "dense_act_fn": "gelu_new",
108
  "diversity_penalty": 0.0,
109
  "do_sample": false,
110
- "dropout_rate": 0.0,
111
  "early_stopping": false,
112
  "encoder_no_repeat_ngram_size": 0,
113
  "eos_token_id": null,
@@ -115,7 +115,7 @@
115
  "finetuning_task": null,
116
  "forced_bos_token_id": null,
117
  "forced_eos_token_id": null,
118
- "hidden_dropout_prob": 0.0,
119
  "hidden_size": 768,
120
  "id2label": {
121
  "0": "LABEL_0",
 
25
  "dense_act_fn": "gelu_new",
26
  "diversity_penalty": 0.0,
27
  "do_sample": false,
28
+ "dropout_rate": 0.2,
29
  "early_stopping": false,
30
  "encoder_hidden_size": 768,
31
  "encoder_no_repeat_ngram_size": 0,
 
95
  "_name_or_path": "",
96
  "add_cross_attention": false,
97
  "architectures": null,
98
+ "attention_dropout": 0.2,
99
  "bad_words_ids": null,
100
  "begin_suppress_tokens": null,
101
  "bos_token_id": null,
 
107
  "dense_act_fn": "gelu_new",
108
  "diversity_penalty": 0.0,
109
  "do_sample": false,
110
+ "dropout_rate": 0.2,
111
  "early_stopping": false,
112
  "encoder_no_repeat_ngram_size": 0,
113
  "eos_token_id": null,
 
115
  "finetuning_task": null,
116
  "forced_bos_token_id": null,
117
  "forced_eos_token_id": null,
118
+ "hidden_dropout_prob": 0.2,
119
  "hidden_size": 768,
120
  "id2label": {
121
  "0": "LABEL_0",