File size: 557 Bytes
64d1f1e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
{
  "attn_implementation": "flash_attention_2",
  "bos_token_id": 151643,
  "cache_config": null,
  "cache_implementation": null,
  "do_sample": true,
  "dola_layers": null,
  "eos_token_id": [
    151645,
    151643
  ],
  "max_matching_ngram_size": null,
  "min_p": null,
  "output_logits": null,
  "pad_token_id": 151643,
  "repetition_penalty": 1.05,
  "return_legacy_cache": true,
  "stop_strings": null,
  "temperature": 0.7,
  "token_healing": false,
  "top_k": 20,
  "top_p": 0.8,
  "transformers_version": "4.37.2",
  "watermarking_config": null
}