Spaces:
Sleeping
Sleeping
config: | |
(): colpali_engine.utils.train_colpali_engine_models.ColModelTrainingConfig | |
model: | |
(): colpali_engine.utils.wrapper.AutoColModelWrapper | |
pretrained_model_name_or_path: "HuggingFaceM4/tiny-random-LlamaForCausalLM" | |
# attn_implementation: "eager" | |
quantization_config: | |
(): transformers.BitsAndBytesConfig | |
load_in_4bit: true | |
bnb_4bit_quant_type: "nf4" | |
bnb_4bit_compute_dtype: "float16" | |
bnb_4bit_use_double_quant: true | |
dataset_loading_func: !ext colpali_engine.utils.dataset_transformation.load_manu_embeddings | |
loss_func: | |
(): colpali_engine.loss.colbert_loss.ColbertLoss | |
max_length: 256 | |
run_eval: true | |
add_suffix: true | |
tr_args: | |
(): transformers.training_args.TrainingArguments | |
output_dir: null | |
overwrite_output_dir: true | |
num_train_epochs: 1 | |
per_device_train_batch_size: 64 | |
gradient_accumulation_steps: 2 | |
per_device_eval_batch_size: 8 | |
dataloader_num_workers: 8 | |
bf16: false | |
save_steps: 500 | |
logging_steps: 10 | |
warmup_steps: 500 | |
learning_rate: 5e-5 | |
save_total_limit: 1 | |
optim: "paged_adamw_8bit" | |
peft_config: | |
(): peft.LoraConfig | |
r: 16 | |
lora_alpha: 32 | |
lora_dropout: 0.05 | |
bias: "none" | |
task_type: "FEATURE_EXTRACTION" | |
target_modules: | |
- 'up_proj' | |
- 'down_proj' | |
- 'gate_proj' | |
- 'k_proj' | |
- 'q_proj' | |
- 'v_proj' | |
- 'o_proj' | |