sotediffusion-wuerstchen3 / dataset /kohya-command-base.sh
Disty0's picture
Upload 8 files
561c3da verified
raw
history blame contribute delete
No virus
1.94 kB
#!/bin/sh
CURRENT=$1
CURRENT_SUB=$2
PAST=$3
PAST_SUB=$4
OUT_PATH="/home/ubuntu/out/sotediffusion-wr3_3b"
DATASET_PATH="/home/ubuntu/dataset/combined"
PROMPT_PATH="/home/ubuntu/sotediffusion-prompt.txt"
WUER_PATH="/home/ubuntu/models"
accelerate launch --mixed_precision bf16 --multi_gpu stable_cascade_train_stage_c.py \
--mixed_precision bf16 \
--save_precision fp16 \
--sdpa \
--ddp_static_graph \
--train_text_encoder \
--resolution "1024,1024" \
--train_batch_size 16 \
--gradient_checkpointing \
--learning_rate 1e-5 \
--learning_rate_te1 4e-6 \
--lr_scheduler constant_with_warmup \
--lr_warmup_steps 100 \
--optimizer_type adafactor \
--optimizer_args "scale_parameter=False" "relative_step=False" "warmup_init=False" \
--token_warmup_min 1 \
--token_warmup_step 0 \
--shuffle_caption \
--caption_separator ", " \
--caption_dropout_rate 0 \
--caption_tag_dropout_rate 0 \
--caption_dropout_every_n_epochs 0 \
--dataset_repeats 1 \
--save_state \
--save_every_n_steps 1024 \
--sample_every_n_steps 512 \
--max_token_length 225 \
--max_train_epochs 1 \
--caption_extension ".txt" \
--max_data_loader_n_workers 16 \
--persistent_data_loader_workers \
--enable_bucket \
--min_bucket_reso 256 \
--max_bucket_reso 4096 \
--bucket_reso_steps 64 \
--bucket_no_upscale \
--log_with tensorboard \
--output_name sotediffusion-wr3_3b \
--train_data_dir $DATASET_PATH-$CURRENT_SUB \
--in_json $DATASET_PATH-$CURRENT_SUB.json \
--output_dir $OUT_PATH-$CURRENT/$CURRENT_SUB \
--logging_dir $OUT_PATH-$CURRENT/$CURRENT_SUB/logs \
--resume $OUT_PATH-$PAST/$PAST_SUB/sotediffusion-wr3_3b-state \
--stage_c_checkpoint_path $OUT_PATH-$PAST/$PAST_SUB/sotediffusion-wr3_3b.safetensors \
--text_model_checkpoint_path $OUT_PATH-$PAST/$PAST_SUB/sotediffusion-wr3_3b_text_model.safetensors \
--effnet_checkpoint_path $WUER_PATH/effnet_encoder.safetensors \
--previewer_checkpoint_path $WUER_PATH/previewer.safetensors \
--sample_prompts $PROMPT_PATH