Spaces:
Sleeping
Sleeping
config: | |
(): colpali_engine.utils.train_colpali_engine_models.ColModelTrainingConfig | |
output_dir: !path ../../../models/biidefics2-8b-chatty | |
processor: | |
(): colpali_engine.utils.wrapper.AutoProcessorWrapper | |
pretrained_model_name_or_path: "HuggingFaceM4/idefics2-8b" | |
do_image_splitting: false | |
model: | |
(): colpali_engine.utils.wrapper.AutoColModelWrapper | |
pretrained_model_name_or_path: "HuggingFaceM4/idefics2-8b-chatty" | |
training_objective: "biencoder" | |
# attn_implementation: "flash_attention_2" | |
torch_dtype: !ext torch.bfloat16 | |
quantization_config: | |
(): transformers.BitsAndBytesConfig | |
load_in_4bit: true | |
bnb_4bit_quant_type: "nf4" | |
bnb_4bit_compute_dtype: "float16" | |
bnb_4bit_use_double_quant: true | |
dataset_loading_func: !ext colpali_engine.utils.dataset_transformation.load_docvqa_dataset | |
max_length: 256 | |
run_eval: true | |
loss_func: | |
(): colpali_engine.loss.colbert_loss.BiEncoderLoss | |
tr_args: | |
(): transformers.training_args.TrainingArguments | |
output_dir: null | |
overwrite_output_dir: true | |
num_train_epochs: 3 | |
per_device_train_batch_size: 4 | |
gradient_accumulation_steps: 8 | |
per_device_eval_batch_size: 4 | |
eval_strategy: "steps" | |
dataloader_num_workers: 8 | |
# bf16: true | |
save_steps: 500 | |
logging_steps: 10 | |
eval_steps: 50 | |
warmup_steps: 100 | |
learning_rate: 5e-5 | |
save_total_limit: 1 | |
peft_config: | |
(): peft.LoraConfig | |
r: 8 | |
lora_alpha: 8 | |
lora_dropout: 0.1 | |
init_lora_weights: "gaussian" | |
bias: "none" | |
task_type: "FEATURE_EXTRACTION" | |
target_modules: '.*(text_model|modality_projection|perceiver_resampler).*(down_proj|gate_proj|up_proj|k_proj|q_proj|v_proj|o_proj).*$' | |