Upload batch_lora_quickpod.sh
Browse files- batch_lora_quickpod.sh +48 -0
batch_lora_quickpod.sh
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
input_file="/root/kohya_ss/remaining.txt"
|
4 |
+
|
5 |
+
while IFS=' ' read -r word1 word2 word3 word4 word5; do
|
6 |
+
echo "test"
|
7 |
+
if [[ -z $word1 || -z $word2 || -z $word3 || -z $word4 || -z $word5 ]]; then
|
8 |
+
break
|
9 |
+
fi
|
10 |
+
echo "Processing $word3"
|
11 |
+
|
12 |
+
# Change directory to the specific path, use quotes to handle spaces in filenames
|
13 |
+
cd /root/kohya_ss/dataset/instance/* || exit
|
14 |
+
rm -f * || exit # Use -f to force removal and avoid prompts
|
15 |
+
|
16 |
+
wget "$word1" || exit
|
17 |
+
unzip -o *.zip || exit # Use -o to overwrite files without prompting
|
18 |
+
rm -f *.zip || exit
|
19 |
+
|
20 |
+
# Loop through each text file in the directory
|
21 |
+
for file in ./*.txt; do
|
22 |
+
# Check if the file exists
|
23 |
+
if [[ -f "$file" ]]; then
|
24 |
+
# Use sed to remove the string and save the changes back to the file
|
25 |
+
sed -i "s/$word2//g" "$file"
|
26 |
+
echo "Processed file: $file"
|
27 |
+
|
28 |
+
# Remove leading spaces from each line in the file
|
29 |
+
sed -i 's/^ *//' "$file"
|
30 |
+
echo "Removed leading spaces from file: $file"
|
31 |
+
fi
|
32 |
+
done
|
33 |
+
file_count_png=$(ls -l /root/kohya_ss/dataset/instance/*/*.png | wc -l)
|
34 |
+
file_count_jpg=$(ls -l /root/kohya_ss/dataset/instance/*/*.jpg | wc -l)
|
35 |
+
file_count_jpeg=$(ls -l /root/kohya_ss/dataset/instance/*/*.jpeg| wc -l)
|
36 |
+
file_count=$((file_count_png + file_count_jpg + file_count_jpeg))
|
37 |
+
echo ${file_count}
|
38 |
+
# Run the Python script with debug output
|
39 |
+
cd /root/kohya_ss
|
40 |
+
echo "Starting Python script..."
|
41 |
+
accelerate launch --num_cpu_threads_per_process=8 "./sdxl_train_network.py" --enable_bucket --min_bucket_reso=256 --max_bucket_reso=2048 --pretrained_model_name_or_path="/root/kohya_ss/models/animagine-xl-3.1.safetensors" --train_data_dir="/root/kohya_ss/dataset/instance" --resolution="1280,1280" --output_dir="/root/kohya_ss/outputs" --logging_dir="/root/kohya_ss/logs" --network_alpha="48" --save_model_as=safetensors --network_module=networks.lora --network_args rank_dropout="0.1" module_dropout="0.1" --network_dim=96 --output_name=${word3} --lr_scheduler_num_cycles=$((word4)) --scale_weight_norms="5" --network_dropout="0.1" --no_half_vae --full_bf16 --learning_rate="8e-05" --lr_scheduler="constant" --train_batch_size="2" --max_train_steps=$((word4 * file_count)) --save_every_n_epochs=$((word5)) --mixed_precision="bf16" --save_precision="bf16" --seed="6969" --caption_extension=".txt" --cache_latents --optimizer_type="AdamW8bit" --max_grad_norm="1" --max_data_loader_n_workers="0" --max_token_length=225 --caption_dropout_rate="0.08" --bucket_reso_steps=64 --gradient_checkpointing --xformers --bucket_no_upscale --noise_offset=0.01 --adaptive_noise_scale=0.001
|
42 |
+
echo "Done Python script..."
|
43 |
+
|
44 |
+
cd /notebooks/kohya_ss || exit
|
45 |
+
|
46 |
+
done < "$input_file"
|
47 |
+
|
48 |
+
|