gliner-multitask-large-v0.5 / gliner_config.json
Ihor's picture
Upload folder using huggingface_hub
f40557b verified
raw
history blame
880 Bytes
{
"model_name": "microsoft/deberta-v3-large",
"name": "token level gliner large",
"max_width": 100,
"hidden_size": 512,
"dropout": 0.1,
"fine_tune": true,
"subtoken_pooling": "first",
"span_mode": "token_level",
"num_steps": 6000,
"train_batch_size": 8,
"eval_every": 1000,
"warmup_ratio": 0.1,
"scheduler_type": "linear",
"loss_alpha": 0.75,
"loss_gamma": 0,
"loss_reduction": "sum",
"lr_encoder": "5e-6",
"lr_others": "7e-6",
"weight_decay_encoder": 0.01,
"weight_decay_other": 0.01,
"root_dir": "gliner_logs",
"train_data": "../../data/unie_ner1.json",
"val_data_dir": "none",
"prev_path": "logs_large/model_120000",
"save_total_limit": 10,
"size_sup": -1,
"max_types": 30,
"shuffle_types": true,
"random_drop": true,
"max_neg_type_ratio": 1,
"max_len": 768,
"freeze_token_rep": false,
"log_dir": "logs_final"
}