File size: 269 Bytes
9fa3d89
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
from ola_vlm.train.sherlock_dsg_train import train
import torch.multiprocessing as mp

if __name__ == "__main__":
    # mp.set_start_method('spawn')
    # try:
        # train(attn_implementation="flash_attention_2")
    # except:
    train(attn_implementation="eager")