{ "_name_or_path": "/root/prot-nla/data/experiments/hf_output/checkpoint-3500", "architectures": [ "EncoderDecoderModel" ], "decoder": { "_name_or_path": "", "add_cross_attention": true, "architectures": null, "attention_probs_dropout_prob": 0.1, "bad_words_ids": null, "begin_suppress_tokens": null, "bos_token_id": 0, "chunk_size_feed_forward": 0, "cross_attention_hidden_size": null, "decoder_start_token_id": 0, "diversity_penalty": 0.0, "do_sample": false, "early_stopping": false, "embedding_size": 512, "encoder_no_repeat_ngram_size": 0, "eos_token_id": null, "exponential_decay_length_penalty": null, "finetuning_task": null, "forced_bos_token_id": null, "forced_eos_token_id": null, "hidden_act": "silu", "hidden_dropout_prob": 0.1, "hidden_size": 512, "id2label": { "0": "LABEL_0", "1": "LABEL_1" }, "initializer_range": 0.02, "intermediate_size": 1024, "is_decoder": true, "is_encoder_decoder": false, "label2id": { "LABEL_0": 0, "LABEL_1": 1 }, "layer_norm_eps": 1e-12, "length_penalty": 1.0, "max_length": 20, "max_position_embeddings": 1536, "min_length": 0, "model_type": "roformer", "no_repeat_ngram_size": 0, "num_attention_heads": 8, "num_beam_groups": 1, "num_beams": 1, "num_hidden_layers": 12, "num_return_sequences": 1, "output_attentions": false, "output_hidden_states": false, "output_scores": false, "pad_token_id": 2, "prefix": null, "problem_type": null, "pruned_heads": {}, "remove_invalid_values": false, "repetition_penalty": 1.0, "return_dict": true, "return_dict_in_generate": false, "rotary_value": false, "sep_token_id": null, "suppress_tokens": null, "task_specific_params": null, "temperature": 1.0, "tf_legacy_loss": false, "tie_encoder_decoder": false, "tie_word_embeddings": true, "tokenizer_class": null, "top_k": 50, "top_p": 1.0, "torch_dtype": null, "torchscript": false, "type_vocab_size": 2, "typical_p": 1.0, "use_bfloat16": false, "use_cache": true, "vocab_size": 42316 }, "decoder_start_token_id": 0, "encoder": { "_name_or_path": "khairi/esm2_t12_35M_UR50D", "add_cross_attention": false, "architectures": [ "EsmModel" ], "attention_probs_dropout_prob": 0.0, "bad_words_ids": null, "begin_suppress_tokens": null, "bos_token_id": null, "chunk_size_feed_forward": 0, "classifier_dropout": null, "cross_attention_hidden_size": null, "decoder_start_token_id": null, "diversity_penalty": 0.0, "do_sample": false, "early_stopping": false, "emb_layer_norm_before": false, "encoder_no_repeat_ngram_size": 0, "eos_token_id": null, "esmfold_config": null, "exponential_decay_length_penalty": null, "finetuning_task": null, "forced_bos_token_id": null, "forced_eos_token_id": null, "hidden_act": "gelu", "hidden_dropout_prob": 0.0, "hidden_size": 480, "id2label": { "0": "LABEL_0", "1": "LABEL_1" }, "initializer_range": 0.02, "intermediate_size": 1920, "is_decoder": false, "is_encoder_decoder": false, "is_folding_model": false, "label2id": { "LABEL_0": 0, "LABEL_1": 1 }, "layer_norm_eps": 1e-05, "length_penalty": 1.0, "mask_token_id": 32, "max_length": 20, "max_position_embeddings": 1026, "min_length": 0, "model_type": "esm", "no_repeat_ngram_size": 0, "num_attention_heads": 20, "num_beam_groups": 1, "num_beams": 1, "num_hidden_layers": 12, "num_return_sequences": 1, "output_attentions": false, "output_hidden_states": false, "output_scores": false, "pad_token_id": 1, "position_embedding_type": "rotary", "prefix": null, "problem_type": null, "pruned_heads": {}, "remove_invalid_values": false, "repetition_penalty": 1.0, "return_dict": true, "return_dict_in_generate": false, "sep_token_id": null, "suppress_tokens": null, "task_specific_params": null, "temperature": 1.0, "tf_legacy_loss": false, "tie_encoder_decoder": false, "tie_word_embeddings": true, "token_dropout": true, "tokenizer_class": null, "top_k": 50, "top_p": 1.0, "torch_dtype": "float32", "torchscript": false, "typical_p": 1.0, "use_bfloat16": false, "use_cache": true, "vocab_list": null, "vocab_size": 33 }, "eos_token_id": 1, "is_encoder_decoder": true, "model_type": "encoder-decoder", "pad_token_id": 2, "torch_dtype": "float32", "transformers_version": "4.40.1", "vocab_size": 42316 }