{ "attn_implementation": "flash_attention_2", "bos_token_id": 151643, "cache_config": null, "cache_implementation": null, "do_sample": true, "dola_layers": null, "eos_token_id": [ 151645, 151643 ], "max_matching_ngram_size": null, "min_p": null, "output_logits": null, "pad_token_id": 151643, "repetition_penalty": 1.05, "return_legacy_cache": true, "stop_strings": null, "temperature": 0.7, "token_healing": false, "top_k": 20, "top_p": 0.8, "transformers_version": "4.37.2", "watermarking_config": null }