Commit
•
704fa4a
1
Parent(s):
40c686d
Training in progress, step 1000
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- added_tokens.json +1611 -0
- config.json +51 -0
- merges.txt +0 -0
- model.safetensors +3 -0
- normalizer.json +1742 -0
- preprocessor_config.json +14 -0
- run.sh +33 -0
- run_speech_recognition_seq2seq.py +627 -0
- runs/Mar27_12-33-46_hf-dgx-01/events.out.tfevents.1711539342.hf-dgx-01.1366875.0 +3 -0
- runs/Mar27_12-56-36_hf-dgx-01/events.out.tfevents.1711540605.hf-dgx-01.1386946.0 +3 -0
- runs/Mar27_12-58-17_hf-dgx-01/events.out.tfevents.1711540707.hf-dgx-01.1389604.0 +3 -0
- runs/Mar27_14-10-21_hf-dgx-01/events.out.tfevents.1711545030.hf-dgx-01.1482719.0 +3 -0
- special_tokens_map.json +139 -0
- tokenizer.json +0 -0
- tokenizer_config.json +0 -0
- training_args.bin +3 -0
- vocab.json +0 -0
- wandb/debug-cli.sanchit.log +0 -0
- wandb/debug-internal.log +0 -0
- wandb/debug.log +28 -0
- wandb/run-20240327_123544-5vwxt2ut/files/config.yaml +731 -0
- wandb/run-20240327_123544-5vwxt2ut/files/output.log +164 -0
- wandb/run-20240327_123544-5vwxt2ut/files/requirements.txt +247 -0
- wandb/run-20240327_123544-5vwxt2ut/files/wandb-metadata.json +737 -0
- wandb/run-20240327_123544-5vwxt2ut/files/wandb-summary.json +1 -0
- wandb/run-20240327_123544-5vwxt2ut/logs/debug-internal.log +0 -0
- wandb/run-20240327_123544-5vwxt2ut/logs/debug.log +28 -0
- wandb/run-20240327_123544-5vwxt2ut/run-5vwxt2ut.wandb +0 -0
- wandb/run-20240327_125651-wqmi98ok/files/config.yaml +715 -0
- wandb/run-20240327_125651-wqmi98ok/files/output.log +25 -0
- wandb/run-20240327_125651-wqmi98ok/files/requirements.txt +247 -0
- wandb/run-20240327_125651-wqmi98ok/files/wandb-metadata.json +738 -0
- wandb/run-20240327_125651-wqmi98ok/files/wandb-summary.json +1 -0
- wandb/run-20240327_125651-wqmi98ok/logs/debug-internal.log +188 -0
- wandb/run-20240327_125651-wqmi98ok/logs/debug.log +28 -0
- wandb/run-20240327_125651-wqmi98ok/run-wqmi98ok.wandb +0 -0
- wandb/run-20240327_125828-wlmqyk6v/files/config.yaml +731 -0
- wandb/run-20240327_125828-wlmqyk6v/files/output.log +1071 -0
- wandb/run-20240327_125828-wlmqyk6v/files/requirements.txt +247 -0
- wandb/run-20240327_125828-wlmqyk6v/files/wandb-metadata.json +737 -0
- wandb/run-20240327_125828-wlmqyk6v/files/wandb-summary.json +1 -0
- wandb/run-20240327_125828-wlmqyk6v/logs/debug-internal.log +0 -0
- wandb/run-20240327_125828-wlmqyk6v/logs/debug.log +29 -0
- wandb/run-20240327_125828-wlmqyk6v/run-wlmqyk6v.wandb +0 -0
- wandb/run-20240327_141031-aoxf8fxn/files/config.yaml +751 -0
- wandb/run-20240327_141031-aoxf8fxn/files/output.log +1170 -0
- wandb/run-20240327_141031-aoxf8fxn/files/requirements.txt +247 -0
- wandb/run-20240327_141031-aoxf8fxn/files/wandb-metadata.json +737 -0
- wandb/run-20240327_141031-aoxf8fxn/files/wandb-summary.json +1 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
wandb/run-20240327_141031-aoxf8fxn/run-aoxf8fxn.wandb filter=lfs diff=lfs merge=lfs -text
|
added_tokens.json
ADDED
@@ -0,0 +1,1611 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"<|0.00|>": 50365,
|
3 |
+
"<|0.02|>": 50366,
|
4 |
+
"<|0.04|>": 50367,
|
5 |
+
"<|0.06|>": 50368,
|
6 |
+
"<|0.08|>": 50369,
|
7 |
+
"<|0.10|>": 50370,
|
8 |
+
"<|0.12|>": 50371,
|
9 |
+
"<|0.14|>": 50372,
|
10 |
+
"<|0.16|>": 50373,
|
11 |
+
"<|0.18|>": 50374,
|
12 |
+
"<|0.20|>": 50375,
|
13 |
+
"<|0.22|>": 50376,
|
14 |
+
"<|0.24|>": 50377,
|
15 |
+
"<|0.26|>": 50378,
|
16 |
+
"<|0.28|>": 50379,
|
17 |
+
"<|0.30|>": 50380,
|
18 |
+
"<|0.32|>": 50381,
|
19 |
+
"<|0.34|>": 50382,
|
20 |
+
"<|0.36|>": 50383,
|
21 |
+
"<|0.38|>": 50384,
|
22 |
+
"<|0.40|>": 50385,
|
23 |
+
"<|0.42|>": 50386,
|
24 |
+
"<|0.44|>": 50387,
|
25 |
+
"<|0.46|>": 50388,
|
26 |
+
"<|0.48|>": 50389,
|
27 |
+
"<|0.50|>": 50390,
|
28 |
+
"<|0.52|>": 50391,
|
29 |
+
"<|0.54|>": 50392,
|
30 |
+
"<|0.56|>": 50393,
|
31 |
+
"<|0.58|>": 50394,
|
32 |
+
"<|0.60|>": 50395,
|
33 |
+
"<|0.62|>": 50396,
|
34 |
+
"<|0.64|>": 50397,
|
35 |
+
"<|0.66|>": 50398,
|
36 |
+
"<|0.68|>": 50399,
|
37 |
+
"<|0.70|>": 50400,
|
38 |
+
"<|0.72|>": 50401,
|
39 |
+
"<|0.74|>": 50402,
|
40 |
+
"<|0.76|>": 50403,
|
41 |
+
"<|0.78|>": 50404,
|
42 |
+
"<|0.80|>": 50405,
|
43 |
+
"<|0.82|>": 50406,
|
44 |
+
"<|0.84|>": 50407,
|
45 |
+
"<|0.86|>": 50408,
|
46 |
+
"<|0.88|>": 50409,
|
47 |
+
"<|0.90|>": 50410,
|
48 |
+
"<|0.92|>": 50411,
|
49 |
+
"<|0.94|>": 50412,
|
50 |
+
"<|0.96|>": 50413,
|
51 |
+
"<|0.98|>": 50414,
|
52 |
+
"<|1.00|>": 50415,
|
53 |
+
"<|1.02|>": 50416,
|
54 |
+
"<|1.04|>": 50417,
|
55 |
+
"<|1.06|>": 50418,
|
56 |
+
"<|1.08|>": 50419,
|
57 |
+
"<|1.10|>": 50420,
|
58 |
+
"<|1.12|>": 50421,
|
59 |
+
"<|1.14|>": 50422,
|
60 |
+
"<|1.16|>": 50423,
|
61 |
+
"<|1.18|>": 50424,
|
62 |
+
"<|1.20|>": 50425,
|
63 |
+
"<|1.22|>": 50426,
|
64 |
+
"<|1.24|>": 50427,
|
65 |
+
"<|1.26|>": 50428,
|
66 |
+
"<|1.28|>": 50429,
|
67 |
+
"<|1.30|>": 50430,
|
68 |
+
"<|1.32|>": 50431,
|
69 |
+
"<|1.34|>": 50432,
|
70 |
+
"<|1.36|>": 50433,
|
71 |
+
"<|1.38|>": 50434,
|
72 |
+
"<|1.40|>": 50435,
|
73 |
+
"<|1.42|>": 50436,
|
74 |
+
"<|1.44|>": 50437,
|
75 |
+
"<|1.46|>": 50438,
|
76 |
+
"<|1.48|>": 50439,
|
77 |
+
"<|1.50|>": 50440,
|
78 |
+
"<|1.52|>": 50441,
|
79 |
+
"<|1.54|>": 50442,
|
80 |
+
"<|1.56|>": 50443,
|
81 |
+
"<|1.58|>": 50444,
|
82 |
+
"<|1.60|>": 50445,
|
83 |
+
"<|1.62|>": 50446,
|
84 |
+
"<|1.64|>": 50447,
|
85 |
+
"<|1.66|>": 50448,
|
86 |
+
"<|1.68|>": 50449,
|
87 |
+
"<|1.70|>": 50450,
|
88 |
+
"<|1.72|>": 50451,
|
89 |
+
"<|1.74|>": 50452,
|
90 |
+
"<|1.76|>": 50453,
|
91 |
+
"<|1.78|>": 50454,
|
92 |
+
"<|1.80|>": 50455,
|
93 |
+
"<|1.82|>": 50456,
|
94 |
+
"<|1.84|>": 50457,
|
95 |
+
"<|1.86|>": 50458,
|
96 |
+
"<|1.88|>": 50459,
|
97 |
+
"<|1.90|>": 50460,
|
98 |
+
"<|1.92|>": 50461,
|
99 |
+
"<|1.94|>": 50462,
|
100 |
+
"<|1.96|>": 50463,
|
101 |
+
"<|1.98|>": 50464,
|
102 |
+
"<|10.00|>": 50865,
|
103 |
+
"<|10.02|>": 50866,
|
104 |
+
"<|10.04|>": 50867,
|
105 |
+
"<|10.06|>": 50868,
|
106 |
+
"<|10.08|>": 50869,
|
107 |
+
"<|10.10|>": 50870,
|
108 |
+
"<|10.12|>": 50871,
|
109 |
+
"<|10.14|>": 50872,
|
110 |
+
"<|10.16|>": 50873,
|
111 |
+
"<|10.18|>": 50874,
|
112 |
+
"<|10.20|>": 50875,
|
113 |
+
"<|10.22|>": 50876,
|
114 |
+
"<|10.24|>": 50877,
|
115 |
+
"<|10.26|>": 50878,
|
116 |
+
"<|10.28|>": 50879,
|
117 |
+
"<|10.30|>": 50880,
|
118 |
+
"<|10.32|>": 50881,
|
119 |
+
"<|10.34|>": 50882,
|
120 |
+
"<|10.36|>": 50883,
|
121 |
+
"<|10.38|>": 50884,
|
122 |
+
"<|10.40|>": 50885,
|
123 |
+
"<|10.42|>": 50886,
|
124 |
+
"<|10.44|>": 50887,
|
125 |
+
"<|10.46|>": 50888,
|
126 |
+
"<|10.48|>": 50889,
|
127 |
+
"<|10.50|>": 50890,
|
128 |
+
"<|10.52|>": 50891,
|
129 |
+
"<|10.54|>": 50892,
|
130 |
+
"<|10.56|>": 50893,
|
131 |
+
"<|10.58|>": 50894,
|
132 |
+
"<|10.60|>": 50895,
|
133 |
+
"<|10.62|>": 50896,
|
134 |
+
"<|10.64|>": 50897,
|
135 |
+
"<|10.66|>": 50898,
|
136 |
+
"<|10.68|>": 50899,
|
137 |
+
"<|10.70|>": 50900,
|
138 |
+
"<|10.72|>": 50901,
|
139 |
+
"<|10.74|>": 50902,
|
140 |
+
"<|10.76|>": 50903,
|
141 |
+
"<|10.78|>": 50904,
|
142 |
+
"<|10.80|>": 50905,
|
143 |
+
"<|10.82|>": 50906,
|
144 |
+
"<|10.84|>": 50907,
|
145 |
+
"<|10.86|>": 50908,
|
146 |
+
"<|10.88|>": 50909,
|
147 |
+
"<|10.90|>": 50910,
|
148 |
+
"<|10.92|>": 50911,
|
149 |
+
"<|10.94|>": 50912,
|
150 |
+
"<|10.96|>": 50913,
|
151 |
+
"<|10.98|>": 50914,
|
152 |
+
"<|11.00|>": 50915,
|
153 |
+
"<|11.02|>": 50916,
|
154 |
+
"<|11.04|>": 50917,
|
155 |
+
"<|11.06|>": 50918,
|
156 |
+
"<|11.08|>": 50919,
|
157 |
+
"<|11.10|>": 50920,
|
158 |
+
"<|11.12|>": 50921,
|
159 |
+
"<|11.14|>": 50922,
|
160 |
+
"<|11.16|>": 50923,
|
161 |
+
"<|11.18|>": 50924,
|
162 |
+
"<|11.20|>": 50925,
|
163 |
+
"<|11.22|>": 50926,
|
164 |
+
"<|11.24|>": 50927,
|
165 |
+
"<|11.26|>": 50928,
|
166 |
+
"<|11.28|>": 50929,
|
167 |
+
"<|11.30|>": 50930,
|
168 |
+
"<|11.32|>": 50931,
|
169 |
+
"<|11.34|>": 50932,
|
170 |
+
"<|11.36|>": 50933,
|
171 |
+
"<|11.38|>": 50934,
|
172 |
+
"<|11.40|>": 50935,
|
173 |
+
"<|11.42|>": 50936,
|
174 |
+
"<|11.44|>": 50937,
|
175 |
+
"<|11.46|>": 50938,
|
176 |
+
"<|11.48|>": 50939,
|
177 |
+
"<|11.50|>": 50940,
|
178 |
+
"<|11.52|>": 50941,
|
179 |
+
"<|11.54|>": 50942,
|
180 |
+
"<|11.56|>": 50943,
|
181 |
+
"<|11.58|>": 50944,
|
182 |
+
"<|11.60|>": 50945,
|
183 |
+
"<|11.62|>": 50946,
|
184 |
+
"<|11.64|>": 50947,
|
185 |
+
"<|11.66|>": 50948,
|
186 |
+
"<|11.68|>": 50949,
|
187 |
+
"<|11.70|>": 50950,
|
188 |
+
"<|11.72|>": 50951,
|
189 |
+
"<|11.74|>": 50952,
|
190 |
+
"<|11.76|>": 50953,
|
191 |
+
"<|11.78|>": 50954,
|
192 |
+
"<|11.80|>": 50955,
|
193 |
+
"<|11.82|>": 50956,
|
194 |
+
"<|11.84|>": 50957,
|
195 |
+
"<|11.86|>": 50958,
|
196 |
+
"<|11.88|>": 50959,
|
197 |
+
"<|11.90|>": 50960,
|
198 |
+
"<|11.92|>": 50961,
|
199 |
+
"<|11.94|>": 50962,
|
200 |
+
"<|11.96|>": 50963,
|
201 |
+
"<|11.98|>": 50964,
|
202 |
+
"<|12.00|>": 50965,
|
203 |
+
"<|12.02|>": 50966,
|
204 |
+
"<|12.04|>": 50967,
|
205 |
+
"<|12.06|>": 50968,
|
206 |
+
"<|12.08|>": 50969,
|
207 |
+
"<|12.10|>": 50970,
|
208 |
+
"<|12.12|>": 50971,
|
209 |
+
"<|12.14|>": 50972,
|
210 |
+
"<|12.16|>": 50973,
|
211 |
+
"<|12.18|>": 50974,
|
212 |
+
"<|12.20|>": 50975,
|
213 |
+
"<|12.22|>": 50976,
|
214 |
+
"<|12.24|>": 50977,
|
215 |
+
"<|12.26|>": 50978,
|
216 |
+
"<|12.28|>": 50979,
|
217 |
+
"<|12.30|>": 50980,
|
218 |
+
"<|12.32|>": 50981,
|
219 |
+
"<|12.34|>": 50982,
|
220 |
+
"<|12.36|>": 50983,
|
221 |
+
"<|12.38|>": 50984,
|
222 |
+
"<|12.40|>": 50985,
|
223 |
+
"<|12.42|>": 50986,
|
224 |
+
"<|12.44|>": 50987,
|
225 |
+
"<|12.46|>": 50988,
|
226 |
+
"<|12.48|>": 50989,
|
227 |
+
"<|12.50|>": 50990,
|
228 |
+
"<|12.52|>": 50991,
|
229 |
+
"<|12.54|>": 50992,
|
230 |
+
"<|12.56|>": 50993,
|
231 |
+
"<|12.58|>": 50994,
|
232 |
+
"<|12.60|>": 50995,
|
233 |
+
"<|12.62|>": 50996,
|
234 |
+
"<|12.64|>": 50997,
|
235 |
+
"<|12.66|>": 50998,
|
236 |
+
"<|12.68|>": 50999,
|
237 |
+
"<|12.70|>": 51000,
|
238 |
+
"<|12.72|>": 51001,
|
239 |
+
"<|12.74|>": 51002,
|
240 |
+
"<|12.76|>": 51003,
|
241 |
+
"<|12.78|>": 51004,
|
242 |
+
"<|12.80|>": 51005,
|
243 |
+
"<|12.82|>": 51006,
|
244 |
+
"<|12.84|>": 51007,
|
245 |
+
"<|12.86|>": 51008,
|
246 |
+
"<|12.88|>": 51009,
|
247 |
+
"<|12.90|>": 51010,
|
248 |
+
"<|12.92|>": 51011,
|
249 |
+
"<|12.94|>": 51012,
|
250 |
+
"<|12.96|>": 51013,
|
251 |
+
"<|12.98|>": 51014,
|
252 |
+
"<|13.00|>": 51015,
|
253 |
+
"<|13.02|>": 51016,
|
254 |
+
"<|13.04|>": 51017,
|
255 |
+
"<|13.06|>": 51018,
|
256 |
+
"<|13.08|>": 51019,
|
257 |
+
"<|13.10|>": 51020,
|
258 |
+
"<|13.12|>": 51021,
|
259 |
+
"<|13.14|>": 51022,
|
260 |
+
"<|13.16|>": 51023,
|
261 |
+
"<|13.18|>": 51024,
|
262 |
+
"<|13.20|>": 51025,
|
263 |
+
"<|13.22|>": 51026,
|
264 |
+
"<|13.24|>": 51027,
|
265 |
+
"<|13.26|>": 51028,
|
266 |
+
"<|13.28|>": 51029,
|
267 |
+
"<|13.30|>": 51030,
|
268 |
+
"<|13.32|>": 51031,
|
269 |
+
"<|13.34|>": 51032,
|
270 |
+
"<|13.36|>": 51033,
|
271 |
+
"<|13.38|>": 51034,
|
272 |
+
"<|13.40|>": 51035,
|
273 |
+
"<|13.42|>": 51036,
|
274 |
+
"<|13.44|>": 51037,
|
275 |
+
"<|13.46|>": 51038,
|
276 |
+
"<|13.48|>": 51039,
|
277 |
+
"<|13.50|>": 51040,
|
278 |
+
"<|13.52|>": 51041,
|
279 |
+
"<|13.54|>": 51042,
|
280 |
+
"<|13.56|>": 51043,
|
281 |
+
"<|13.58|>": 51044,
|
282 |
+
"<|13.60|>": 51045,
|
283 |
+
"<|13.62|>": 51046,
|
284 |
+
"<|13.64|>": 51047,
|
285 |
+
"<|13.66|>": 51048,
|
286 |
+
"<|13.68|>": 51049,
|
287 |
+
"<|13.70|>": 51050,
|
288 |
+
"<|13.72|>": 51051,
|
289 |
+
"<|13.74|>": 51052,
|
290 |
+
"<|13.76|>": 51053,
|
291 |
+
"<|13.78|>": 51054,
|
292 |
+
"<|13.80|>": 51055,
|
293 |
+
"<|13.82|>": 51056,
|
294 |
+
"<|13.84|>": 51057,
|
295 |
+
"<|13.86|>": 51058,
|
296 |
+
"<|13.88|>": 51059,
|
297 |
+
"<|13.90|>": 51060,
|
298 |
+
"<|13.92|>": 51061,
|
299 |
+
"<|13.94|>": 51062,
|
300 |
+
"<|13.96|>": 51063,
|
301 |
+
"<|13.98|>": 51064,
|
302 |
+
"<|14.00|>": 51065,
|
303 |
+
"<|14.02|>": 51066,
|
304 |
+
"<|14.04|>": 51067,
|
305 |
+
"<|14.06|>": 51068,
|
306 |
+
"<|14.08|>": 51069,
|
307 |
+
"<|14.10|>": 51070,
|
308 |
+
"<|14.12|>": 51071,
|
309 |
+
"<|14.14|>": 51072,
|
310 |
+
"<|14.16|>": 51073,
|
311 |
+
"<|14.18|>": 51074,
|
312 |
+
"<|14.20|>": 51075,
|
313 |
+
"<|14.22|>": 51076,
|
314 |
+
"<|14.24|>": 51077,
|
315 |
+
"<|14.26|>": 51078,
|
316 |
+
"<|14.28|>": 51079,
|
317 |
+
"<|14.30|>": 51080,
|
318 |
+
"<|14.32|>": 51081,
|
319 |
+
"<|14.34|>": 51082,
|
320 |
+
"<|14.36|>": 51083,
|
321 |
+
"<|14.38|>": 51084,
|
322 |
+
"<|14.40|>": 51085,
|
323 |
+
"<|14.42|>": 51086,
|
324 |
+
"<|14.44|>": 51087,
|
325 |
+
"<|14.46|>": 51088,
|
326 |
+
"<|14.48|>": 51089,
|
327 |
+
"<|14.50|>": 51090,
|
328 |
+
"<|14.52|>": 51091,
|
329 |
+
"<|14.54|>": 51092,
|
330 |
+
"<|14.56|>": 51093,
|
331 |
+
"<|14.58|>": 51094,
|
332 |
+
"<|14.60|>": 51095,
|
333 |
+
"<|14.62|>": 51096,
|
334 |
+
"<|14.64|>": 51097,
|
335 |
+
"<|14.66|>": 51098,
|
336 |
+
"<|14.68|>": 51099,
|
337 |
+
"<|14.70|>": 51100,
|
338 |
+
"<|14.72|>": 51101,
|
339 |
+
"<|14.74|>": 51102,
|
340 |
+
"<|14.76|>": 51103,
|
341 |
+
"<|14.78|>": 51104,
|
342 |
+
"<|14.80|>": 51105,
|
343 |
+
"<|14.82|>": 51106,
|
344 |
+
"<|14.84|>": 51107,
|
345 |
+
"<|14.86|>": 51108,
|
346 |
+
"<|14.88|>": 51109,
|
347 |
+
"<|14.90|>": 51110,
|
348 |
+
"<|14.92|>": 51111,
|
349 |
+
"<|14.94|>": 51112,
|
350 |
+
"<|14.96|>": 51113,
|
351 |
+
"<|14.98|>": 51114,
|
352 |
+
"<|15.00|>": 51115,
|
353 |
+
"<|15.02|>": 51116,
|
354 |
+
"<|15.04|>": 51117,
|
355 |
+
"<|15.06|>": 51118,
|
356 |
+
"<|15.08|>": 51119,
|
357 |
+
"<|15.10|>": 51120,
|
358 |
+
"<|15.12|>": 51121,
|
359 |
+
"<|15.14|>": 51122,
|
360 |
+
"<|15.16|>": 51123,
|
361 |
+
"<|15.18|>": 51124,
|
362 |
+
"<|15.20|>": 51125,
|
363 |
+
"<|15.22|>": 51126,
|
364 |
+
"<|15.24|>": 51127,
|
365 |
+
"<|15.26|>": 51128,
|
366 |
+
"<|15.28|>": 51129,
|
367 |
+
"<|15.30|>": 51130,
|
368 |
+
"<|15.32|>": 51131,
|
369 |
+
"<|15.34|>": 51132,
|
370 |
+
"<|15.36|>": 51133,
|
371 |
+
"<|15.38|>": 51134,
|
372 |
+
"<|15.40|>": 51135,
|
373 |
+
"<|15.42|>": 51136,
|
374 |
+
"<|15.44|>": 51137,
|
375 |
+
"<|15.46|>": 51138,
|
376 |
+
"<|15.48|>": 51139,
|
377 |
+
"<|15.50|>": 51140,
|
378 |
+
"<|15.52|>": 51141,
|
379 |
+
"<|15.54|>": 51142,
|
380 |
+
"<|15.56|>": 51143,
|
381 |
+
"<|15.58|>": 51144,
|
382 |
+
"<|15.60|>": 51145,
|
383 |
+
"<|15.62|>": 51146,
|
384 |
+
"<|15.64|>": 51147,
|
385 |
+
"<|15.66|>": 51148,
|
386 |
+
"<|15.68|>": 51149,
|
387 |
+
"<|15.70|>": 51150,
|
388 |
+
"<|15.72|>": 51151,
|
389 |
+
"<|15.74|>": 51152,
|
390 |
+
"<|15.76|>": 51153,
|
391 |
+
"<|15.78|>": 51154,
|
392 |
+
"<|15.80|>": 51155,
|
393 |
+
"<|15.82|>": 51156,
|
394 |
+
"<|15.84|>": 51157,
|
395 |
+
"<|15.86|>": 51158,
|
396 |
+
"<|15.88|>": 51159,
|
397 |
+
"<|15.90|>": 51160,
|
398 |
+
"<|15.92|>": 51161,
|
399 |
+
"<|15.94|>": 51162,
|
400 |
+
"<|15.96|>": 51163,
|
401 |
+
"<|15.98|>": 51164,
|
402 |
+
"<|16.00|>": 51165,
|
403 |
+
"<|16.02|>": 51166,
|
404 |
+
"<|16.04|>": 51167,
|
405 |
+
"<|16.06|>": 51168,
|
406 |
+
"<|16.08|>": 51169,
|
407 |
+
"<|16.10|>": 51170,
|
408 |
+
"<|16.12|>": 51171,
|
409 |
+
"<|16.14|>": 51172,
|
410 |
+
"<|16.16|>": 51173,
|
411 |
+
"<|16.18|>": 51174,
|
412 |
+
"<|16.20|>": 51175,
|
413 |
+
"<|16.22|>": 51176,
|
414 |
+
"<|16.24|>": 51177,
|
415 |
+
"<|16.26|>": 51178,
|
416 |
+
"<|16.28|>": 51179,
|
417 |
+
"<|16.30|>": 51180,
|
418 |
+
"<|16.32|>": 51181,
|
419 |
+
"<|16.34|>": 51182,
|
420 |
+
"<|16.36|>": 51183,
|
421 |
+
"<|16.38|>": 51184,
|
422 |
+
"<|16.40|>": 51185,
|
423 |
+
"<|16.42|>": 51186,
|
424 |
+
"<|16.44|>": 51187,
|
425 |
+
"<|16.46|>": 51188,
|
426 |
+
"<|16.48|>": 51189,
|
427 |
+
"<|16.50|>": 51190,
|
428 |
+
"<|16.52|>": 51191,
|
429 |
+
"<|16.54|>": 51192,
|
430 |
+
"<|16.56|>": 51193,
|
431 |
+
"<|16.58|>": 51194,
|
432 |
+
"<|16.60|>": 51195,
|
433 |
+
"<|16.62|>": 51196,
|
434 |
+
"<|16.64|>": 51197,
|
435 |
+
"<|16.66|>": 51198,
|
436 |
+
"<|16.68|>": 51199,
|
437 |
+
"<|16.70|>": 51200,
|
438 |
+
"<|16.72|>": 51201,
|
439 |
+
"<|16.74|>": 51202,
|
440 |
+
"<|16.76|>": 51203,
|
441 |
+
"<|16.78|>": 51204,
|
442 |
+
"<|16.80|>": 51205,
|
443 |
+
"<|16.82|>": 51206,
|
444 |
+
"<|16.84|>": 51207,
|
445 |
+
"<|16.86|>": 51208,
|
446 |
+
"<|16.88|>": 51209,
|
447 |
+
"<|16.90|>": 51210,
|
448 |
+
"<|16.92|>": 51211,
|
449 |
+
"<|16.94|>": 51212,
|
450 |
+
"<|16.96|>": 51213,
|
451 |
+
"<|16.98|>": 51214,
|
452 |
+
"<|17.00|>": 51215,
|
453 |
+
"<|17.02|>": 51216,
|
454 |
+
"<|17.04|>": 51217,
|
455 |
+
"<|17.06|>": 51218,
|
456 |
+
"<|17.08|>": 51219,
|
457 |
+
"<|17.10|>": 51220,
|
458 |
+
"<|17.12|>": 51221,
|
459 |
+
"<|17.14|>": 51222,
|
460 |
+
"<|17.16|>": 51223,
|
461 |
+
"<|17.18|>": 51224,
|
462 |
+
"<|17.20|>": 51225,
|
463 |
+
"<|17.22|>": 51226,
|
464 |
+
"<|17.24|>": 51227,
|
465 |
+
"<|17.26|>": 51228,
|
466 |
+
"<|17.28|>": 51229,
|
467 |
+
"<|17.30|>": 51230,
|
468 |
+
"<|17.32|>": 51231,
|
469 |
+
"<|17.34|>": 51232,
|
470 |
+
"<|17.36|>": 51233,
|
471 |
+
"<|17.38|>": 51234,
|
472 |
+
"<|17.40|>": 51235,
|
473 |
+
"<|17.42|>": 51236,
|
474 |
+
"<|17.44|>": 51237,
|
475 |
+
"<|17.46|>": 51238,
|
476 |
+
"<|17.48|>": 51239,
|
477 |
+
"<|17.50|>": 51240,
|
478 |
+
"<|17.52|>": 51241,
|
479 |
+
"<|17.54|>": 51242,
|
480 |
+
"<|17.56|>": 51243,
|
481 |
+
"<|17.58|>": 51244,
|
482 |
+
"<|17.60|>": 51245,
|
483 |
+
"<|17.62|>": 51246,
|
484 |
+
"<|17.64|>": 51247,
|
485 |
+
"<|17.66|>": 51248,
|
486 |
+
"<|17.68|>": 51249,
|
487 |
+
"<|17.70|>": 51250,
|
488 |
+
"<|17.72|>": 51251,
|
489 |
+
"<|17.74|>": 51252,
|
490 |
+
"<|17.76|>": 51253,
|
491 |
+
"<|17.78|>": 51254,
|
492 |
+
"<|17.80|>": 51255,
|
493 |
+
"<|17.82|>": 51256,
|
494 |
+
"<|17.84|>": 51257,
|
495 |
+
"<|17.86|>": 51258,
|
496 |
+
"<|17.88|>": 51259,
|
497 |
+
"<|17.90|>": 51260,
|
498 |
+
"<|17.92|>": 51261,
|
499 |
+
"<|17.94|>": 51262,
|
500 |
+
"<|17.96|>": 51263,
|
501 |
+
"<|17.98|>": 51264,
|
502 |
+
"<|18.00|>": 51265,
|
503 |
+
"<|18.02|>": 51266,
|
504 |
+
"<|18.04|>": 51267,
|
505 |
+
"<|18.06|>": 51268,
|
506 |
+
"<|18.08|>": 51269,
|
507 |
+
"<|18.10|>": 51270,
|
508 |
+
"<|18.12|>": 51271,
|
509 |
+
"<|18.14|>": 51272,
|
510 |
+
"<|18.16|>": 51273,
|
511 |
+
"<|18.18|>": 51274,
|
512 |
+
"<|18.20|>": 51275,
|
513 |
+
"<|18.22|>": 51276,
|
514 |
+
"<|18.24|>": 51277,
|
515 |
+
"<|18.26|>": 51278,
|
516 |
+
"<|18.28|>": 51279,
|
517 |
+
"<|18.30|>": 51280,
|
518 |
+
"<|18.32|>": 51281,
|
519 |
+
"<|18.34|>": 51282,
|
520 |
+
"<|18.36|>": 51283,
|
521 |
+
"<|18.38|>": 51284,
|
522 |
+
"<|18.40|>": 51285,
|
523 |
+
"<|18.42|>": 51286,
|
524 |
+
"<|18.44|>": 51287,
|
525 |
+
"<|18.46|>": 51288,
|
526 |
+
"<|18.48|>": 51289,
|
527 |
+
"<|18.50|>": 51290,
|
528 |
+
"<|18.52|>": 51291,
|
529 |
+
"<|18.54|>": 51292,
|
530 |
+
"<|18.56|>": 51293,
|
531 |
+
"<|18.58|>": 51294,
|
532 |
+
"<|18.60|>": 51295,
|
533 |
+
"<|18.62|>": 51296,
|
534 |
+
"<|18.64|>": 51297,
|
535 |
+
"<|18.66|>": 51298,
|
536 |
+
"<|18.68|>": 51299,
|
537 |
+
"<|18.70|>": 51300,
|
538 |
+
"<|18.72|>": 51301,
|
539 |
+
"<|18.74|>": 51302,
|
540 |
+
"<|18.76|>": 51303,
|
541 |
+
"<|18.78|>": 51304,
|
542 |
+
"<|18.80|>": 51305,
|
543 |
+
"<|18.82|>": 51306,
|
544 |
+
"<|18.84|>": 51307,
|
545 |
+
"<|18.86|>": 51308,
|
546 |
+
"<|18.88|>": 51309,
|
547 |
+
"<|18.90|>": 51310,
|
548 |
+
"<|18.92|>": 51311,
|
549 |
+
"<|18.94|>": 51312,
|
550 |
+
"<|18.96|>": 51313,
|
551 |
+
"<|18.98|>": 51314,
|
552 |
+
"<|19.00|>": 51315,
|
553 |
+
"<|19.02|>": 51316,
|
554 |
+
"<|19.04|>": 51317,
|
555 |
+
"<|19.06|>": 51318,
|
556 |
+
"<|19.08|>": 51319,
|
557 |
+
"<|19.10|>": 51320,
|
558 |
+
"<|19.12|>": 51321,
|
559 |
+
"<|19.14|>": 51322,
|
560 |
+
"<|19.16|>": 51323,
|
561 |
+
"<|19.18|>": 51324,
|
562 |
+
"<|19.20|>": 51325,
|
563 |
+
"<|19.22|>": 51326,
|
564 |
+
"<|19.24|>": 51327,
|
565 |
+
"<|19.26|>": 51328,
|
566 |
+
"<|19.28|>": 51329,
|
567 |
+
"<|19.30|>": 51330,
|
568 |
+
"<|19.32|>": 51331,
|
569 |
+
"<|19.34|>": 51332,
|
570 |
+
"<|19.36|>": 51333,
|
571 |
+
"<|19.38|>": 51334,
|
572 |
+
"<|19.40|>": 51335,
|
573 |
+
"<|19.42|>": 51336,
|
574 |
+
"<|19.44|>": 51337,
|
575 |
+
"<|19.46|>": 51338,
|
576 |
+
"<|19.48|>": 51339,
|
577 |
+
"<|19.50|>": 51340,
|
578 |
+
"<|19.52|>": 51341,
|
579 |
+
"<|19.54|>": 51342,
|
580 |
+
"<|19.56|>": 51343,
|
581 |
+
"<|19.58|>": 51344,
|
582 |
+
"<|19.60|>": 51345,
|
583 |
+
"<|19.62|>": 51346,
|
584 |
+
"<|19.64|>": 51347,
|
585 |
+
"<|19.66|>": 51348,
|
586 |
+
"<|19.68|>": 51349,
|
587 |
+
"<|19.70|>": 51350,
|
588 |
+
"<|19.72|>": 51351,
|
589 |
+
"<|19.74|>": 51352,
|
590 |
+
"<|19.76|>": 51353,
|
591 |
+
"<|19.78|>": 51354,
|
592 |
+
"<|19.80|>": 51355,
|
593 |
+
"<|19.82|>": 51356,
|
594 |
+
"<|19.84|>": 51357,
|
595 |
+
"<|19.86|>": 51358,
|
596 |
+
"<|19.88|>": 51359,
|
597 |
+
"<|19.90|>": 51360,
|
598 |
+
"<|19.92|>": 51361,
|
599 |
+
"<|19.94|>": 51362,
|
600 |
+
"<|19.96|>": 51363,
|
601 |
+
"<|19.98|>": 51364,
|
602 |
+
"<|2.00|>": 50465,
|
603 |
+
"<|2.02|>": 50466,
|
604 |
+
"<|2.04|>": 50467,
|
605 |
+
"<|2.06|>": 50468,
|
606 |
+
"<|2.08|>": 50469,
|
607 |
+
"<|2.10|>": 50470,
|
608 |
+
"<|2.12|>": 50471,
|
609 |
+
"<|2.14|>": 50472,
|
610 |
+
"<|2.16|>": 50473,
|
611 |
+
"<|2.18|>": 50474,
|
612 |
+
"<|2.20|>": 50475,
|
613 |
+
"<|2.22|>": 50476,
|
614 |
+
"<|2.24|>": 50477,
|
615 |
+
"<|2.26|>": 50478,
|
616 |
+
"<|2.28|>": 50479,
|
617 |
+
"<|2.30|>": 50480,
|
618 |
+
"<|2.32|>": 50481,
|
619 |
+
"<|2.34|>": 50482,
|
620 |
+
"<|2.36|>": 50483,
|
621 |
+
"<|2.38|>": 50484,
|
622 |
+
"<|2.40|>": 50485,
|
623 |
+
"<|2.42|>": 50486,
|
624 |
+
"<|2.44|>": 50487,
|
625 |
+
"<|2.46|>": 50488,
|
626 |
+
"<|2.48|>": 50489,
|
627 |
+
"<|2.50|>": 50490,
|
628 |
+
"<|2.52|>": 50491,
|
629 |
+
"<|2.54|>": 50492,
|
630 |
+
"<|2.56|>": 50493,
|
631 |
+
"<|2.58|>": 50494,
|
632 |
+
"<|2.60|>": 50495,
|
633 |
+
"<|2.62|>": 50496,
|
634 |
+
"<|2.64|>": 50497,
|
635 |
+
"<|2.66|>": 50498,
|
636 |
+
"<|2.68|>": 50499,
|
637 |
+
"<|2.70|>": 50500,
|
638 |
+
"<|2.72|>": 50501,
|
639 |
+
"<|2.74|>": 50502,
|
640 |
+
"<|2.76|>": 50503,
|
641 |
+
"<|2.78|>": 50504,
|
642 |
+
"<|2.80|>": 50505,
|
643 |
+
"<|2.82|>": 50506,
|
644 |
+
"<|2.84|>": 50507,
|
645 |
+
"<|2.86|>": 50508,
|
646 |
+
"<|2.88|>": 50509,
|
647 |
+
"<|2.90|>": 50510,
|
648 |
+
"<|2.92|>": 50511,
|
649 |
+
"<|2.94|>": 50512,
|
650 |
+
"<|2.96|>": 50513,
|
651 |
+
"<|2.98|>": 50514,
|
652 |
+
"<|20.00|>": 51365,
|
653 |
+
"<|20.02|>": 51366,
|
654 |
+
"<|20.04|>": 51367,
|
655 |
+
"<|20.06|>": 51368,
|
656 |
+
"<|20.08|>": 51369,
|
657 |
+
"<|20.10|>": 51370,
|
658 |
+
"<|20.12|>": 51371,
|
659 |
+
"<|20.14|>": 51372,
|
660 |
+
"<|20.16|>": 51373,
|
661 |
+
"<|20.18|>": 51374,
|
662 |
+
"<|20.20|>": 51375,
|
663 |
+
"<|20.22|>": 51376,
|
664 |
+
"<|20.24|>": 51377,
|
665 |
+
"<|20.26|>": 51378,
|
666 |
+
"<|20.28|>": 51379,
|
667 |
+
"<|20.30|>": 51380,
|
668 |
+
"<|20.32|>": 51381,
|
669 |
+
"<|20.34|>": 51382,
|
670 |
+
"<|20.36|>": 51383,
|
671 |
+
"<|20.38|>": 51384,
|
672 |
+
"<|20.40|>": 51385,
|
673 |
+
"<|20.42|>": 51386,
|
674 |
+
"<|20.44|>": 51387,
|
675 |
+
"<|20.46|>": 51388,
|
676 |
+
"<|20.48|>": 51389,
|
677 |
+
"<|20.50|>": 51390,
|
678 |
+
"<|20.52|>": 51391,
|
679 |
+
"<|20.54|>": 51392,
|
680 |
+
"<|20.56|>": 51393,
|
681 |
+
"<|20.58|>": 51394,
|
682 |
+
"<|20.60|>": 51395,
|
683 |
+
"<|20.62|>": 51396,
|
684 |
+
"<|20.64|>": 51397,
|
685 |
+
"<|20.66|>": 51398,
|
686 |
+
"<|20.68|>": 51399,
|
687 |
+
"<|20.70|>": 51400,
|
688 |
+
"<|20.72|>": 51401,
|
689 |
+
"<|20.74|>": 51402,
|
690 |
+
"<|20.76|>": 51403,
|
691 |
+
"<|20.78|>": 51404,
|
692 |
+
"<|20.80|>": 51405,
|
693 |
+
"<|20.82|>": 51406,
|
694 |
+
"<|20.84|>": 51407,
|
695 |
+
"<|20.86|>": 51408,
|
696 |
+
"<|20.88|>": 51409,
|
697 |
+
"<|20.90|>": 51410,
|
698 |
+
"<|20.92|>": 51411,
|
699 |
+
"<|20.94|>": 51412,
|
700 |
+
"<|20.96|>": 51413,
|
701 |
+
"<|20.98|>": 51414,
|
702 |
+
"<|21.00|>": 51415,
|
703 |
+
"<|21.02|>": 51416,
|
704 |
+
"<|21.04|>": 51417,
|
705 |
+
"<|21.06|>": 51418,
|
706 |
+
"<|21.08|>": 51419,
|
707 |
+
"<|21.10|>": 51420,
|
708 |
+
"<|21.12|>": 51421,
|
709 |
+
"<|21.14|>": 51422,
|
710 |
+
"<|21.16|>": 51423,
|
711 |
+
"<|21.18|>": 51424,
|
712 |
+
"<|21.20|>": 51425,
|
713 |
+
"<|21.22|>": 51426,
|
714 |
+
"<|21.24|>": 51427,
|
715 |
+
"<|21.26|>": 51428,
|
716 |
+
"<|21.28|>": 51429,
|
717 |
+
"<|21.30|>": 51430,
|
718 |
+
"<|21.32|>": 51431,
|
719 |
+
"<|21.34|>": 51432,
|
720 |
+
"<|21.36|>": 51433,
|
721 |
+
"<|21.38|>": 51434,
|
722 |
+
"<|21.40|>": 51435,
|
723 |
+
"<|21.42|>": 51436,
|
724 |
+
"<|21.44|>": 51437,
|
725 |
+
"<|21.46|>": 51438,
|
726 |
+
"<|21.48|>": 51439,
|
727 |
+
"<|21.50|>": 51440,
|
728 |
+
"<|21.52|>": 51441,
|
729 |
+
"<|21.54|>": 51442,
|
730 |
+
"<|21.56|>": 51443,
|
731 |
+
"<|21.58|>": 51444,
|
732 |
+
"<|21.60|>": 51445,
|
733 |
+
"<|21.62|>": 51446,
|
734 |
+
"<|21.64|>": 51447,
|
735 |
+
"<|21.66|>": 51448,
|
736 |
+
"<|21.68|>": 51449,
|
737 |
+
"<|21.70|>": 51450,
|
738 |
+
"<|21.72|>": 51451,
|
739 |
+
"<|21.74|>": 51452,
|
740 |
+
"<|21.76|>": 51453,
|
741 |
+
"<|21.78|>": 51454,
|
742 |
+
"<|21.80|>": 51455,
|
743 |
+
"<|21.82|>": 51456,
|
744 |
+
"<|21.84|>": 51457,
|
745 |
+
"<|21.86|>": 51458,
|
746 |
+
"<|21.88|>": 51459,
|
747 |
+
"<|21.90|>": 51460,
|
748 |
+
"<|21.92|>": 51461,
|
749 |
+
"<|21.94|>": 51462,
|
750 |
+
"<|21.96|>": 51463,
|
751 |
+
"<|21.98|>": 51464,
|
752 |
+
"<|22.00|>": 51465,
|
753 |
+
"<|22.02|>": 51466,
|
754 |
+
"<|22.04|>": 51467,
|
755 |
+
"<|22.06|>": 51468,
|
756 |
+
"<|22.08|>": 51469,
|
757 |
+
"<|22.10|>": 51470,
|
758 |
+
"<|22.12|>": 51471,
|
759 |
+
"<|22.14|>": 51472,
|
760 |
+
"<|22.16|>": 51473,
|
761 |
+
"<|22.18|>": 51474,
|
762 |
+
"<|22.20|>": 51475,
|
763 |
+
"<|22.22|>": 51476,
|
764 |
+
"<|22.24|>": 51477,
|
765 |
+
"<|22.26|>": 51478,
|
766 |
+
"<|22.28|>": 51479,
|
767 |
+
"<|22.30|>": 51480,
|
768 |
+
"<|22.32|>": 51481,
|
769 |
+
"<|22.34|>": 51482,
|
770 |
+
"<|22.36|>": 51483,
|
771 |
+
"<|22.38|>": 51484,
|
772 |
+
"<|22.40|>": 51485,
|
773 |
+
"<|22.42|>": 51486,
|
774 |
+
"<|22.44|>": 51487,
|
775 |
+
"<|22.46|>": 51488,
|
776 |
+
"<|22.48|>": 51489,
|
777 |
+
"<|22.50|>": 51490,
|
778 |
+
"<|22.52|>": 51491,
|
779 |
+
"<|22.54|>": 51492,
|
780 |
+
"<|22.56|>": 51493,
|
781 |
+
"<|22.58|>": 51494,
|
782 |
+
"<|22.60|>": 51495,
|
783 |
+
"<|22.62|>": 51496,
|
784 |
+
"<|22.64|>": 51497,
|
785 |
+
"<|22.66|>": 51498,
|
786 |
+
"<|22.68|>": 51499,
|
787 |
+
"<|22.70|>": 51500,
|
788 |
+
"<|22.72|>": 51501,
|
789 |
+
"<|22.74|>": 51502,
|
790 |
+
"<|22.76|>": 51503,
|
791 |
+
"<|22.78|>": 51504,
|
792 |
+
"<|22.80|>": 51505,
|
793 |
+
"<|22.82|>": 51506,
|
794 |
+
"<|22.84|>": 51507,
|
795 |
+
"<|22.86|>": 51508,
|
796 |
+
"<|22.88|>": 51509,
|
797 |
+
"<|22.90|>": 51510,
|
798 |
+
"<|22.92|>": 51511,
|
799 |
+
"<|22.94|>": 51512,
|
800 |
+
"<|22.96|>": 51513,
|
801 |
+
"<|22.98|>": 51514,
|
802 |
+
"<|23.00|>": 51515,
|
803 |
+
"<|23.02|>": 51516,
|
804 |
+
"<|23.04|>": 51517,
|
805 |
+
"<|23.06|>": 51518,
|
806 |
+
"<|23.08|>": 51519,
|
807 |
+
"<|23.10|>": 51520,
|
808 |
+
"<|23.12|>": 51521,
|
809 |
+
"<|23.14|>": 51522,
|
810 |
+
"<|23.16|>": 51523,
|
811 |
+
"<|23.18|>": 51524,
|
812 |
+
"<|23.20|>": 51525,
|
813 |
+
"<|23.22|>": 51526,
|
814 |
+
"<|23.24|>": 51527,
|
815 |
+
"<|23.26|>": 51528,
|
816 |
+
"<|23.28|>": 51529,
|
817 |
+
"<|23.30|>": 51530,
|
818 |
+
"<|23.32|>": 51531,
|
819 |
+
"<|23.34|>": 51532,
|
820 |
+
"<|23.36|>": 51533,
|
821 |
+
"<|23.38|>": 51534,
|
822 |
+
"<|23.40|>": 51535,
|
823 |
+
"<|23.42|>": 51536,
|
824 |
+
"<|23.44|>": 51537,
|
825 |
+
"<|23.46|>": 51538,
|
826 |
+
"<|23.48|>": 51539,
|
827 |
+
"<|23.50|>": 51540,
|
828 |
+
"<|23.52|>": 51541,
|
829 |
+
"<|23.54|>": 51542,
|
830 |
+
"<|23.56|>": 51543,
|
831 |
+
"<|23.58|>": 51544,
|
832 |
+
"<|23.60|>": 51545,
|
833 |
+
"<|23.62|>": 51546,
|
834 |
+
"<|23.64|>": 51547,
|
835 |
+
"<|23.66|>": 51548,
|
836 |
+
"<|23.68|>": 51549,
|
837 |
+
"<|23.70|>": 51550,
|
838 |
+
"<|23.72|>": 51551,
|
839 |
+
"<|23.74|>": 51552,
|
840 |
+
"<|23.76|>": 51553,
|
841 |
+
"<|23.78|>": 51554,
|
842 |
+
"<|23.80|>": 51555,
|
843 |
+
"<|23.82|>": 51556,
|
844 |
+
"<|23.84|>": 51557,
|
845 |
+
"<|23.86|>": 51558,
|
846 |
+
"<|23.88|>": 51559,
|
847 |
+
"<|23.90|>": 51560,
|
848 |
+
"<|23.92|>": 51561,
|
849 |
+
"<|23.94|>": 51562,
|
850 |
+
"<|23.96|>": 51563,
|
851 |
+
"<|23.98|>": 51564,
|
852 |
+
"<|24.00|>": 51565,
|
853 |
+
"<|24.02|>": 51566,
|
854 |
+
"<|24.04|>": 51567,
|
855 |
+
"<|24.06|>": 51568,
|
856 |
+
"<|24.08|>": 51569,
|
857 |
+
"<|24.10|>": 51570,
|
858 |
+
"<|24.12|>": 51571,
|
859 |
+
"<|24.14|>": 51572,
|
860 |
+
"<|24.16|>": 51573,
|
861 |
+
"<|24.18|>": 51574,
|
862 |
+
"<|24.20|>": 51575,
|
863 |
+
"<|24.22|>": 51576,
|
864 |
+
"<|24.24|>": 51577,
|
865 |
+
"<|24.26|>": 51578,
|
866 |
+
"<|24.28|>": 51579,
|
867 |
+
"<|24.30|>": 51580,
|
868 |
+
"<|24.32|>": 51581,
|
869 |
+
"<|24.34|>": 51582,
|
870 |
+
"<|24.36|>": 51583,
|
871 |
+
"<|24.38|>": 51584,
|
872 |
+
"<|24.40|>": 51585,
|
873 |
+
"<|24.42|>": 51586,
|
874 |
+
"<|24.44|>": 51587,
|
875 |
+
"<|24.46|>": 51588,
|
876 |
+
"<|24.48|>": 51589,
|
877 |
+
"<|24.50|>": 51590,
|
878 |
+
"<|24.52|>": 51591,
|
879 |
+
"<|24.54|>": 51592,
|
880 |
+
"<|24.56|>": 51593,
|
881 |
+
"<|24.58|>": 51594,
|
882 |
+
"<|24.60|>": 51595,
|
883 |
+
"<|24.62|>": 51596,
|
884 |
+
"<|24.64|>": 51597,
|
885 |
+
"<|24.66|>": 51598,
|
886 |
+
"<|24.68|>": 51599,
|
887 |
+
"<|24.70|>": 51600,
|
888 |
+
"<|24.72|>": 51601,
|
889 |
+
"<|24.74|>": 51602,
|
890 |
+
"<|24.76|>": 51603,
|
891 |
+
"<|24.78|>": 51604,
|
892 |
+
"<|24.80|>": 51605,
|
893 |
+
"<|24.82|>": 51606,
|
894 |
+
"<|24.84|>": 51607,
|
895 |
+
"<|24.86|>": 51608,
|
896 |
+
"<|24.88|>": 51609,
|
897 |
+
"<|24.90|>": 51610,
|
898 |
+
"<|24.92|>": 51611,
|
899 |
+
"<|24.94|>": 51612,
|
900 |
+
"<|24.96|>": 51613,
|
901 |
+
"<|24.98|>": 51614,
|
902 |
+
"<|25.00|>": 51615,
|
903 |
+
"<|25.02|>": 51616,
|
904 |
+
"<|25.04|>": 51617,
|
905 |
+
"<|25.06|>": 51618,
|
906 |
+
"<|25.08|>": 51619,
|
907 |
+
"<|25.10|>": 51620,
|
908 |
+
"<|25.12|>": 51621,
|
909 |
+
"<|25.14|>": 51622,
|
910 |
+
"<|25.16|>": 51623,
|
911 |
+
"<|25.18|>": 51624,
|
912 |
+
"<|25.20|>": 51625,
|
913 |
+
"<|25.22|>": 51626,
|
914 |
+
"<|25.24|>": 51627,
|
915 |
+
"<|25.26|>": 51628,
|
916 |
+
"<|25.28|>": 51629,
|
917 |
+
"<|25.30|>": 51630,
|
918 |
+
"<|25.32|>": 51631,
|
919 |
+
"<|25.34|>": 51632,
|
920 |
+
"<|25.36|>": 51633,
|
921 |
+
"<|25.38|>": 51634,
|
922 |
+
"<|25.40|>": 51635,
|
923 |
+
"<|25.42|>": 51636,
|
924 |
+
"<|25.44|>": 51637,
|
925 |
+
"<|25.46|>": 51638,
|
926 |
+
"<|25.48|>": 51639,
|
927 |
+
"<|25.50|>": 51640,
|
928 |
+
"<|25.52|>": 51641,
|
929 |
+
"<|25.54|>": 51642,
|
930 |
+
"<|25.56|>": 51643,
|
931 |
+
"<|25.58|>": 51644,
|
932 |
+
"<|25.60|>": 51645,
|
933 |
+
"<|25.62|>": 51646,
|
934 |
+
"<|25.64|>": 51647,
|
935 |
+
"<|25.66|>": 51648,
|
936 |
+
"<|25.68|>": 51649,
|
937 |
+
"<|25.70|>": 51650,
|
938 |
+
"<|25.72|>": 51651,
|
939 |
+
"<|25.74|>": 51652,
|
940 |
+
"<|25.76|>": 51653,
|
941 |
+
"<|25.78|>": 51654,
|
942 |
+
"<|25.80|>": 51655,
|
943 |
+
"<|25.82|>": 51656,
|
944 |
+
"<|25.84|>": 51657,
|
945 |
+
"<|25.86|>": 51658,
|
946 |
+
"<|25.88|>": 51659,
|
947 |
+
"<|25.90|>": 51660,
|
948 |
+
"<|25.92|>": 51661,
|
949 |
+
"<|25.94|>": 51662,
|
950 |
+
"<|25.96|>": 51663,
|
951 |
+
"<|25.98|>": 51664,
|
952 |
+
"<|26.00|>": 51665,
|
953 |
+
"<|26.02|>": 51666,
|
954 |
+
"<|26.04|>": 51667,
|
955 |
+
"<|26.06|>": 51668,
|
956 |
+
"<|26.08|>": 51669,
|
957 |
+
"<|26.10|>": 51670,
|
958 |
+
"<|26.12|>": 51671,
|
959 |
+
"<|26.14|>": 51672,
|
960 |
+
"<|26.16|>": 51673,
|
961 |
+
"<|26.18|>": 51674,
|
962 |
+
"<|26.20|>": 51675,
|
963 |
+
"<|26.22|>": 51676,
|
964 |
+
"<|26.24|>": 51677,
|
965 |
+
"<|26.26|>": 51678,
|
966 |
+
"<|26.28|>": 51679,
|
967 |
+
"<|26.30|>": 51680,
|
968 |
+
"<|26.32|>": 51681,
|
969 |
+
"<|26.34|>": 51682,
|
970 |
+
"<|26.36|>": 51683,
|
971 |
+
"<|26.38|>": 51684,
|
972 |
+
"<|26.40|>": 51685,
|
973 |
+
"<|26.42|>": 51686,
|
974 |
+
"<|26.44|>": 51687,
|
975 |
+
"<|26.46|>": 51688,
|
976 |
+
"<|26.48|>": 51689,
|
977 |
+
"<|26.50|>": 51690,
|
978 |
+
"<|26.52|>": 51691,
|
979 |
+
"<|26.54|>": 51692,
|
980 |
+
"<|26.56|>": 51693,
|
981 |
+
"<|26.58|>": 51694,
|
982 |
+
"<|26.60|>": 51695,
|
983 |
+
"<|26.62|>": 51696,
|
984 |
+
"<|26.64|>": 51697,
|
985 |
+
"<|26.66|>": 51698,
|
986 |
+
"<|26.68|>": 51699,
|
987 |
+
"<|26.70|>": 51700,
|
988 |
+
"<|26.72|>": 51701,
|
989 |
+
"<|26.74|>": 51702,
|
990 |
+
"<|26.76|>": 51703,
|
991 |
+
"<|26.78|>": 51704,
|
992 |
+
"<|26.80|>": 51705,
|
993 |
+
"<|26.82|>": 51706,
|
994 |
+
"<|26.84|>": 51707,
|
995 |
+
"<|26.86|>": 51708,
|
996 |
+
"<|26.88|>": 51709,
|
997 |
+
"<|26.90|>": 51710,
|
998 |
+
"<|26.92|>": 51711,
|
999 |
+
"<|26.94|>": 51712,
|
1000 |
+
"<|26.96|>": 51713,
|
1001 |
+
"<|26.98|>": 51714,
|
1002 |
+
"<|27.00|>": 51715,
|
1003 |
+
"<|27.02|>": 51716,
|
1004 |
+
"<|27.04|>": 51717,
|
1005 |
+
"<|27.06|>": 51718,
|
1006 |
+
"<|27.08|>": 51719,
|
1007 |
+
"<|27.10|>": 51720,
|
1008 |
+
"<|27.12|>": 51721,
|
1009 |
+
"<|27.14|>": 51722,
|
1010 |
+
"<|27.16|>": 51723,
|
1011 |
+
"<|27.18|>": 51724,
|
1012 |
+
"<|27.20|>": 51725,
|
1013 |
+
"<|27.22|>": 51726,
|
1014 |
+
"<|27.24|>": 51727,
|
1015 |
+
"<|27.26|>": 51728,
|
1016 |
+
"<|27.28|>": 51729,
|
1017 |
+
"<|27.30|>": 51730,
|
1018 |
+
"<|27.32|>": 51731,
|
1019 |
+
"<|27.34|>": 51732,
|
1020 |
+
"<|27.36|>": 51733,
|
1021 |
+
"<|27.38|>": 51734,
|
1022 |
+
"<|27.40|>": 51735,
|
1023 |
+
"<|27.42|>": 51736,
|
1024 |
+
"<|27.44|>": 51737,
|
1025 |
+
"<|27.46|>": 51738,
|
1026 |
+
"<|27.48|>": 51739,
|
1027 |
+
"<|27.50|>": 51740,
|
1028 |
+
"<|27.52|>": 51741,
|
1029 |
+
"<|27.54|>": 51742,
|
1030 |
+
"<|27.56|>": 51743,
|
1031 |
+
"<|27.58|>": 51744,
|
1032 |
+
"<|27.60|>": 51745,
|
1033 |
+
"<|27.62|>": 51746,
|
1034 |
+
"<|27.64|>": 51747,
|
1035 |
+
"<|27.66|>": 51748,
|
1036 |
+
"<|27.68|>": 51749,
|
1037 |
+
"<|27.70|>": 51750,
|
1038 |
+
"<|27.72|>": 51751,
|
1039 |
+
"<|27.74|>": 51752,
|
1040 |
+
"<|27.76|>": 51753,
|
1041 |
+
"<|27.78|>": 51754,
|
1042 |
+
"<|27.80|>": 51755,
|
1043 |
+
"<|27.82|>": 51756,
|
1044 |
+
"<|27.84|>": 51757,
|
1045 |
+
"<|27.86|>": 51758,
|
1046 |
+
"<|27.88|>": 51759,
|
1047 |
+
"<|27.90|>": 51760,
|
1048 |
+
"<|27.92|>": 51761,
|
1049 |
+
"<|27.94|>": 51762,
|
1050 |
+
"<|27.96|>": 51763,
|
1051 |
+
"<|27.98|>": 51764,
|
1052 |
+
"<|28.00|>": 51765,
|
1053 |
+
"<|28.02|>": 51766,
|
1054 |
+
"<|28.04|>": 51767,
|
1055 |
+
"<|28.06|>": 51768,
|
1056 |
+
"<|28.08|>": 51769,
|
1057 |
+
"<|28.10|>": 51770,
|
1058 |
+
"<|28.12|>": 51771,
|
1059 |
+
"<|28.14|>": 51772,
|
1060 |
+
"<|28.16|>": 51773,
|
1061 |
+
"<|28.18|>": 51774,
|
1062 |
+
"<|28.20|>": 51775,
|
1063 |
+
"<|28.22|>": 51776,
|
1064 |
+
"<|28.24|>": 51777,
|
1065 |
+
"<|28.26|>": 51778,
|
1066 |
+
"<|28.28|>": 51779,
|
1067 |
+
"<|28.30|>": 51780,
|
1068 |
+
"<|28.32|>": 51781,
|
1069 |
+
"<|28.34|>": 51782,
|
1070 |
+
"<|28.36|>": 51783,
|
1071 |
+
"<|28.38|>": 51784,
|
1072 |
+
"<|28.40|>": 51785,
|
1073 |
+
"<|28.42|>": 51786,
|
1074 |
+
"<|28.44|>": 51787,
|
1075 |
+
"<|28.46|>": 51788,
|
1076 |
+
"<|28.48|>": 51789,
|
1077 |
+
"<|28.50|>": 51790,
|
1078 |
+
"<|28.52|>": 51791,
|
1079 |
+
"<|28.54|>": 51792,
|
1080 |
+
"<|28.56|>": 51793,
|
1081 |
+
"<|28.58|>": 51794,
|
1082 |
+
"<|28.60|>": 51795,
|
1083 |
+
"<|28.62|>": 51796,
|
1084 |
+
"<|28.64|>": 51797,
|
1085 |
+
"<|28.66|>": 51798,
|
1086 |
+
"<|28.68|>": 51799,
|
1087 |
+
"<|28.70|>": 51800,
|
1088 |
+
"<|28.72|>": 51801,
|
1089 |
+
"<|28.74|>": 51802,
|
1090 |
+
"<|28.76|>": 51803,
|
1091 |
+
"<|28.78|>": 51804,
|
1092 |
+
"<|28.80|>": 51805,
|
1093 |
+
"<|28.82|>": 51806,
|
1094 |
+
"<|28.84|>": 51807,
|
1095 |
+
"<|28.86|>": 51808,
|
1096 |
+
"<|28.88|>": 51809,
|
1097 |
+
"<|28.90|>": 51810,
|
1098 |
+
"<|28.92|>": 51811,
|
1099 |
+
"<|28.94|>": 51812,
|
1100 |
+
"<|28.96|>": 51813,
|
1101 |
+
"<|28.98|>": 51814,
|
1102 |
+
"<|29.00|>": 51815,
|
1103 |
+
"<|29.02|>": 51816,
|
1104 |
+
"<|29.04|>": 51817,
|
1105 |
+
"<|29.06|>": 51818,
|
1106 |
+
"<|29.08|>": 51819,
|
1107 |
+
"<|29.10|>": 51820,
|
1108 |
+
"<|29.12|>": 51821,
|
1109 |
+
"<|29.14|>": 51822,
|
1110 |
+
"<|29.16|>": 51823,
|
1111 |
+
"<|29.18|>": 51824,
|
1112 |
+
"<|29.20|>": 51825,
|
1113 |
+
"<|29.22|>": 51826,
|
1114 |
+
"<|29.24|>": 51827,
|
1115 |
+
"<|29.26|>": 51828,
|
1116 |
+
"<|29.28|>": 51829,
|
1117 |
+
"<|29.30|>": 51830,
|
1118 |
+
"<|29.32|>": 51831,
|
1119 |
+
"<|29.34|>": 51832,
|
1120 |
+
"<|29.36|>": 51833,
|
1121 |
+
"<|29.38|>": 51834,
|
1122 |
+
"<|29.40|>": 51835,
|
1123 |
+
"<|29.42|>": 51836,
|
1124 |
+
"<|29.44|>": 51837,
|
1125 |
+
"<|29.46|>": 51838,
|
1126 |
+
"<|29.48|>": 51839,
|
1127 |
+
"<|29.50|>": 51840,
|
1128 |
+
"<|29.52|>": 51841,
|
1129 |
+
"<|29.54|>": 51842,
|
1130 |
+
"<|29.56|>": 51843,
|
1131 |
+
"<|29.58|>": 51844,
|
1132 |
+
"<|29.60|>": 51845,
|
1133 |
+
"<|29.62|>": 51846,
|
1134 |
+
"<|29.64|>": 51847,
|
1135 |
+
"<|29.66|>": 51848,
|
1136 |
+
"<|29.68|>": 51849,
|
1137 |
+
"<|29.70|>": 51850,
|
1138 |
+
"<|29.72|>": 51851,
|
1139 |
+
"<|29.74|>": 51852,
|
1140 |
+
"<|29.76|>": 51853,
|
1141 |
+
"<|29.78|>": 51854,
|
1142 |
+
"<|29.80|>": 51855,
|
1143 |
+
"<|29.82|>": 51856,
|
1144 |
+
"<|29.84|>": 51857,
|
1145 |
+
"<|29.86|>": 51858,
|
1146 |
+
"<|29.88|>": 51859,
|
1147 |
+
"<|29.90|>": 51860,
|
1148 |
+
"<|29.92|>": 51861,
|
1149 |
+
"<|29.94|>": 51862,
|
1150 |
+
"<|29.96|>": 51863,
|
1151 |
+
"<|29.98|>": 51864,
|
1152 |
+
"<|3.00|>": 50515,
|
1153 |
+
"<|3.02|>": 50516,
|
1154 |
+
"<|3.04|>": 50517,
|
1155 |
+
"<|3.06|>": 50518,
|
1156 |
+
"<|3.08|>": 50519,
|
1157 |
+
"<|3.10|>": 50520,
|
1158 |
+
"<|3.12|>": 50521,
|
1159 |
+
"<|3.14|>": 50522,
|
1160 |
+
"<|3.16|>": 50523,
|
1161 |
+
"<|3.18|>": 50524,
|
1162 |
+
"<|3.20|>": 50525,
|
1163 |
+
"<|3.22|>": 50526,
|
1164 |
+
"<|3.24|>": 50527,
|
1165 |
+
"<|3.26|>": 50528,
|
1166 |
+
"<|3.28|>": 50529,
|
1167 |
+
"<|3.30|>": 50530,
|
1168 |
+
"<|3.32|>": 50531,
|
1169 |
+
"<|3.34|>": 50532,
|
1170 |
+
"<|3.36|>": 50533,
|
1171 |
+
"<|3.38|>": 50534,
|
1172 |
+
"<|3.40|>": 50535,
|
1173 |
+
"<|3.42|>": 50536,
|
1174 |
+
"<|3.44|>": 50537,
|
1175 |
+
"<|3.46|>": 50538,
|
1176 |
+
"<|3.48|>": 50539,
|
1177 |
+
"<|3.50|>": 50540,
|
1178 |
+
"<|3.52|>": 50541,
|
1179 |
+
"<|3.54|>": 50542,
|
1180 |
+
"<|3.56|>": 50543,
|
1181 |
+
"<|3.58|>": 50544,
|
1182 |
+
"<|3.60|>": 50545,
|
1183 |
+
"<|3.62|>": 50546,
|
1184 |
+
"<|3.64|>": 50547,
|
1185 |
+
"<|3.66|>": 50548,
|
1186 |
+
"<|3.68|>": 50549,
|
1187 |
+
"<|3.70|>": 50550,
|
1188 |
+
"<|3.72|>": 50551,
|
1189 |
+
"<|3.74|>": 50552,
|
1190 |
+
"<|3.76|>": 50553,
|
1191 |
+
"<|3.78|>": 50554,
|
1192 |
+
"<|3.80|>": 50555,
|
1193 |
+
"<|3.82|>": 50556,
|
1194 |
+
"<|3.84|>": 50557,
|
1195 |
+
"<|3.86|>": 50558,
|
1196 |
+
"<|3.88|>": 50559,
|
1197 |
+
"<|3.90|>": 50560,
|
1198 |
+
"<|3.92|>": 50561,
|
1199 |
+
"<|3.94|>": 50562,
|
1200 |
+
"<|3.96|>": 50563,
|
1201 |
+
"<|3.98|>": 50564,
|
1202 |
+
"<|30.00|>": 51865,
|
1203 |
+
"<|4.00|>": 50565,
|
1204 |
+
"<|4.02|>": 50566,
|
1205 |
+
"<|4.04|>": 50567,
|
1206 |
+
"<|4.06|>": 50568,
|
1207 |
+
"<|4.08|>": 50569,
|
1208 |
+
"<|4.10|>": 50570,
|
1209 |
+
"<|4.12|>": 50571,
|
1210 |
+
"<|4.14|>": 50572,
|
1211 |
+
"<|4.16|>": 50573,
|
1212 |
+
"<|4.18|>": 50574,
|
1213 |
+
"<|4.20|>": 50575,
|
1214 |
+
"<|4.22|>": 50576,
|
1215 |
+
"<|4.24|>": 50577,
|
1216 |
+
"<|4.26|>": 50578,
|
1217 |
+
"<|4.28|>": 50579,
|
1218 |
+
"<|4.30|>": 50580,
|
1219 |
+
"<|4.32|>": 50581,
|
1220 |
+
"<|4.34|>": 50582,
|
1221 |
+
"<|4.36|>": 50583,
|
1222 |
+
"<|4.38|>": 50584,
|
1223 |
+
"<|4.40|>": 50585,
|
1224 |
+
"<|4.42|>": 50586,
|
1225 |
+
"<|4.44|>": 50587,
|
1226 |
+
"<|4.46|>": 50588,
|
1227 |
+
"<|4.48|>": 50589,
|
1228 |
+
"<|4.50|>": 50590,
|
1229 |
+
"<|4.52|>": 50591,
|
1230 |
+
"<|4.54|>": 50592,
|
1231 |
+
"<|4.56|>": 50593,
|
1232 |
+
"<|4.58|>": 50594,
|
1233 |
+
"<|4.60|>": 50595,
|
1234 |
+
"<|4.62|>": 50596,
|
1235 |
+
"<|4.64|>": 50597,
|
1236 |
+
"<|4.66|>": 50598,
|
1237 |
+
"<|4.68|>": 50599,
|
1238 |
+
"<|4.70|>": 50600,
|
1239 |
+
"<|4.72|>": 50601,
|
1240 |
+
"<|4.74|>": 50602,
|
1241 |
+
"<|4.76|>": 50603,
|
1242 |
+
"<|4.78|>": 50604,
|
1243 |
+
"<|4.80|>": 50605,
|
1244 |
+
"<|4.82|>": 50606,
|
1245 |
+
"<|4.84|>": 50607,
|
1246 |
+
"<|4.86|>": 50608,
|
1247 |
+
"<|4.88|>": 50609,
|
1248 |
+
"<|4.90|>": 50610,
|
1249 |
+
"<|4.92|>": 50611,
|
1250 |
+
"<|4.94|>": 50612,
|
1251 |
+
"<|4.96|>": 50613,
|
1252 |
+
"<|4.98|>": 50614,
|
1253 |
+
"<|5.00|>": 50615,
|
1254 |
+
"<|5.02|>": 50616,
|
1255 |
+
"<|5.04|>": 50617,
|
1256 |
+
"<|5.06|>": 50618,
|
1257 |
+
"<|5.08|>": 50619,
|
1258 |
+
"<|5.10|>": 50620,
|
1259 |
+
"<|5.12|>": 50621,
|
1260 |
+
"<|5.14|>": 50622,
|
1261 |
+
"<|5.16|>": 50623,
|
1262 |
+
"<|5.18|>": 50624,
|
1263 |
+
"<|5.20|>": 50625,
|
1264 |
+
"<|5.22|>": 50626,
|
1265 |
+
"<|5.24|>": 50627,
|
1266 |
+
"<|5.26|>": 50628,
|
1267 |
+
"<|5.28|>": 50629,
|
1268 |
+
"<|5.30|>": 50630,
|
1269 |
+
"<|5.32|>": 50631,
|
1270 |
+
"<|5.34|>": 50632,
|
1271 |
+
"<|5.36|>": 50633,
|
1272 |
+
"<|5.38|>": 50634,
|
1273 |
+
"<|5.40|>": 50635,
|
1274 |
+
"<|5.42|>": 50636,
|
1275 |
+
"<|5.44|>": 50637,
|
1276 |
+
"<|5.46|>": 50638,
|
1277 |
+
"<|5.48|>": 50639,
|
1278 |
+
"<|5.50|>": 50640,
|
1279 |
+
"<|5.52|>": 50641,
|
1280 |
+
"<|5.54|>": 50642,
|
1281 |
+
"<|5.56|>": 50643,
|
1282 |
+
"<|5.58|>": 50644,
|
1283 |
+
"<|5.60|>": 50645,
|
1284 |
+
"<|5.62|>": 50646,
|
1285 |
+
"<|5.64|>": 50647,
|
1286 |
+
"<|5.66|>": 50648,
|
1287 |
+
"<|5.68|>": 50649,
|
1288 |
+
"<|5.70|>": 50650,
|
1289 |
+
"<|5.72|>": 50651,
|
1290 |
+
"<|5.74|>": 50652,
|
1291 |
+
"<|5.76|>": 50653,
|
1292 |
+
"<|5.78|>": 50654,
|
1293 |
+
"<|5.80|>": 50655,
|
1294 |
+
"<|5.82|>": 50656,
|
1295 |
+
"<|5.84|>": 50657,
|
1296 |
+
"<|5.86|>": 50658,
|
1297 |
+
"<|5.88|>": 50659,
|
1298 |
+
"<|5.90|>": 50660,
|
1299 |
+
"<|5.92|>": 50661,
|
1300 |
+
"<|5.94|>": 50662,
|
1301 |
+
"<|5.96|>": 50663,
|
1302 |
+
"<|5.98|>": 50664,
|
1303 |
+
"<|6.00|>": 50665,
|
1304 |
+
"<|6.02|>": 50666,
|
1305 |
+
"<|6.04|>": 50667,
|
1306 |
+
"<|6.06|>": 50668,
|
1307 |
+
"<|6.08|>": 50669,
|
1308 |
+
"<|6.10|>": 50670,
|
1309 |
+
"<|6.12|>": 50671,
|
1310 |
+
"<|6.14|>": 50672,
|
1311 |
+
"<|6.16|>": 50673,
|
1312 |
+
"<|6.18|>": 50674,
|
1313 |
+
"<|6.20|>": 50675,
|
1314 |
+
"<|6.22|>": 50676,
|
1315 |
+
"<|6.24|>": 50677,
|
1316 |
+
"<|6.26|>": 50678,
|
1317 |
+
"<|6.28|>": 50679,
|
1318 |
+
"<|6.30|>": 50680,
|
1319 |
+
"<|6.32|>": 50681,
|
1320 |
+
"<|6.34|>": 50682,
|
1321 |
+
"<|6.36|>": 50683,
|
1322 |
+
"<|6.38|>": 50684,
|
1323 |
+
"<|6.40|>": 50685,
|
1324 |
+
"<|6.42|>": 50686,
|
1325 |
+
"<|6.44|>": 50687,
|
1326 |
+
"<|6.46|>": 50688,
|
1327 |
+
"<|6.48|>": 50689,
|
1328 |
+
"<|6.50|>": 50690,
|
1329 |
+
"<|6.52|>": 50691,
|
1330 |
+
"<|6.54|>": 50692,
|
1331 |
+
"<|6.56|>": 50693,
|
1332 |
+
"<|6.58|>": 50694,
|
1333 |
+
"<|6.60|>": 50695,
|
1334 |
+
"<|6.62|>": 50696,
|
1335 |
+
"<|6.64|>": 50697,
|
1336 |
+
"<|6.66|>": 50698,
|
1337 |
+
"<|6.68|>": 50699,
|
1338 |
+
"<|6.70|>": 50700,
|
1339 |
+
"<|6.72|>": 50701,
|
1340 |
+
"<|6.74|>": 50702,
|
1341 |
+
"<|6.76|>": 50703,
|
1342 |
+
"<|6.78|>": 50704,
|
1343 |
+
"<|6.80|>": 50705,
|
1344 |
+
"<|6.82|>": 50706,
|
1345 |
+
"<|6.84|>": 50707,
|
1346 |
+
"<|6.86|>": 50708,
|
1347 |
+
"<|6.88|>": 50709,
|
1348 |
+
"<|6.90|>": 50710,
|
1349 |
+
"<|6.92|>": 50711,
|
1350 |
+
"<|6.94|>": 50712,
|
1351 |
+
"<|6.96|>": 50713,
|
1352 |
+
"<|6.98|>": 50714,
|
1353 |
+
"<|7.00|>": 50715,
|
1354 |
+
"<|7.02|>": 50716,
|
1355 |
+
"<|7.04|>": 50717,
|
1356 |
+
"<|7.06|>": 50718,
|
1357 |
+
"<|7.08|>": 50719,
|
1358 |
+
"<|7.10|>": 50720,
|
1359 |
+
"<|7.12|>": 50721,
|
1360 |
+
"<|7.14|>": 50722,
|
1361 |
+
"<|7.16|>": 50723,
|
1362 |
+
"<|7.18|>": 50724,
|
1363 |
+
"<|7.20|>": 50725,
|
1364 |
+
"<|7.22|>": 50726,
|
1365 |
+
"<|7.24|>": 50727,
|
1366 |
+
"<|7.26|>": 50728,
|
1367 |
+
"<|7.28|>": 50729,
|
1368 |
+
"<|7.30|>": 50730,
|
1369 |
+
"<|7.32|>": 50731,
|
1370 |
+
"<|7.34|>": 50732,
|
1371 |
+
"<|7.36|>": 50733,
|
1372 |
+
"<|7.38|>": 50734,
|
1373 |
+
"<|7.40|>": 50735,
|
1374 |
+
"<|7.42|>": 50736,
|
1375 |
+
"<|7.44|>": 50737,
|
1376 |
+
"<|7.46|>": 50738,
|
1377 |
+
"<|7.48|>": 50739,
|
1378 |
+
"<|7.50|>": 50740,
|
1379 |
+
"<|7.52|>": 50741,
|
1380 |
+
"<|7.54|>": 50742,
|
1381 |
+
"<|7.56|>": 50743,
|
1382 |
+
"<|7.58|>": 50744,
|
1383 |
+
"<|7.60|>": 50745,
|
1384 |
+
"<|7.62|>": 50746,
|
1385 |
+
"<|7.64|>": 50747,
|
1386 |
+
"<|7.66|>": 50748,
|
1387 |
+
"<|7.68|>": 50749,
|
1388 |
+
"<|7.70|>": 50750,
|
1389 |
+
"<|7.72|>": 50751,
|
1390 |
+
"<|7.74|>": 50752,
|
1391 |
+
"<|7.76|>": 50753,
|
1392 |
+
"<|7.78|>": 50754,
|
1393 |
+
"<|7.80|>": 50755,
|
1394 |
+
"<|7.82|>": 50756,
|
1395 |
+
"<|7.84|>": 50757,
|
1396 |
+
"<|7.86|>": 50758,
|
1397 |
+
"<|7.88|>": 50759,
|
1398 |
+
"<|7.90|>": 50760,
|
1399 |
+
"<|7.92|>": 50761,
|
1400 |
+
"<|7.94|>": 50762,
|
1401 |
+
"<|7.96|>": 50763,
|
1402 |
+
"<|7.98|>": 50764,
|
1403 |
+
"<|8.00|>": 50765,
|
1404 |
+
"<|8.02|>": 50766,
|
1405 |
+
"<|8.04|>": 50767,
|
1406 |
+
"<|8.06|>": 50768,
|
1407 |
+
"<|8.08|>": 50769,
|
1408 |
+
"<|8.10|>": 50770,
|
1409 |
+
"<|8.12|>": 50771,
|
1410 |
+
"<|8.14|>": 50772,
|
1411 |
+
"<|8.16|>": 50773,
|
1412 |
+
"<|8.18|>": 50774,
|
1413 |
+
"<|8.20|>": 50775,
|
1414 |
+
"<|8.22|>": 50776,
|
1415 |
+
"<|8.24|>": 50777,
|
1416 |
+
"<|8.26|>": 50778,
|
1417 |
+
"<|8.28|>": 50779,
|
1418 |
+
"<|8.30|>": 50780,
|
1419 |
+
"<|8.32|>": 50781,
|
1420 |
+
"<|8.34|>": 50782,
|
1421 |
+
"<|8.36|>": 50783,
|
1422 |
+
"<|8.38|>": 50784,
|
1423 |
+
"<|8.40|>": 50785,
|
1424 |
+
"<|8.42|>": 50786,
|
1425 |
+
"<|8.44|>": 50787,
|
1426 |
+
"<|8.46|>": 50788,
|
1427 |
+
"<|8.48|>": 50789,
|
1428 |
+
"<|8.50|>": 50790,
|
1429 |
+
"<|8.52|>": 50791,
|
1430 |
+
"<|8.54|>": 50792,
|
1431 |
+
"<|8.56|>": 50793,
|
1432 |
+
"<|8.58|>": 50794,
|
1433 |
+
"<|8.60|>": 50795,
|
1434 |
+
"<|8.62|>": 50796,
|
1435 |
+
"<|8.64|>": 50797,
|
1436 |
+
"<|8.66|>": 50798,
|
1437 |
+
"<|8.68|>": 50799,
|
1438 |
+
"<|8.70|>": 50800,
|
1439 |
+
"<|8.72|>": 50801,
|
1440 |
+
"<|8.74|>": 50802,
|
1441 |
+
"<|8.76|>": 50803,
|
1442 |
+
"<|8.78|>": 50804,
|
1443 |
+
"<|8.80|>": 50805,
|
1444 |
+
"<|8.82|>": 50806,
|
1445 |
+
"<|8.84|>": 50807,
|
1446 |
+
"<|8.86|>": 50808,
|
1447 |
+
"<|8.88|>": 50809,
|
1448 |
+
"<|8.90|>": 50810,
|
1449 |
+
"<|8.92|>": 50811,
|
1450 |
+
"<|8.94|>": 50812,
|
1451 |
+
"<|8.96|>": 50813,
|
1452 |
+
"<|8.98|>": 50814,
|
1453 |
+
"<|9.00|>": 50815,
|
1454 |
+
"<|9.02|>": 50816,
|
1455 |
+
"<|9.04|>": 50817,
|
1456 |
+
"<|9.06|>": 50818,
|
1457 |
+
"<|9.08|>": 50819,
|
1458 |
+
"<|9.10|>": 50820,
|
1459 |
+
"<|9.12|>": 50821,
|
1460 |
+
"<|9.14|>": 50822,
|
1461 |
+
"<|9.16|>": 50823,
|
1462 |
+
"<|9.18|>": 50824,
|
1463 |
+
"<|9.20|>": 50825,
|
1464 |
+
"<|9.22|>": 50826,
|
1465 |
+
"<|9.24|>": 50827,
|
1466 |
+
"<|9.26|>": 50828,
|
1467 |
+
"<|9.28|>": 50829,
|
1468 |
+
"<|9.30|>": 50830,
|
1469 |
+
"<|9.32|>": 50831,
|
1470 |
+
"<|9.34|>": 50832,
|
1471 |
+
"<|9.36|>": 50833,
|
1472 |
+
"<|9.38|>": 50834,
|
1473 |
+
"<|9.40|>": 50835,
|
1474 |
+
"<|9.42|>": 50836,
|
1475 |
+
"<|9.44|>": 50837,
|
1476 |
+
"<|9.46|>": 50838,
|
1477 |
+
"<|9.48|>": 50839,
|
1478 |
+
"<|9.50|>": 50840,
|
1479 |
+
"<|9.52|>": 50841,
|
1480 |
+
"<|9.54|>": 50842,
|
1481 |
+
"<|9.56|>": 50843,
|
1482 |
+
"<|9.58|>": 50844,
|
1483 |
+
"<|9.60|>": 50845,
|
1484 |
+
"<|9.62|>": 50846,
|
1485 |
+
"<|9.64|>": 50847,
|
1486 |
+
"<|9.66|>": 50848,
|
1487 |
+
"<|9.68|>": 50849,
|
1488 |
+
"<|9.70|>": 50850,
|
1489 |
+
"<|9.72|>": 50851,
|
1490 |
+
"<|9.74|>": 50852,
|
1491 |
+
"<|9.76|>": 50853,
|
1492 |
+
"<|9.78|>": 50854,
|
1493 |
+
"<|9.80|>": 50855,
|
1494 |
+
"<|9.82|>": 50856,
|
1495 |
+
"<|9.84|>": 50857,
|
1496 |
+
"<|9.86|>": 50858,
|
1497 |
+
"<|9.88|>": 50859,
|
1498 |
+
"<|9.90|>": 50860,
|
1499 |
+
"<|9.92|>": 50861,
|
1500 |
+
"<|9.94|>": 50862,
|
1501 |
+
"<|9.96|>": 50863,
|
1502 |
+
"<|9.98|>": 50864,
|
1503 |
+
"<|af|>": 50327,
|
1504 |
+
"<|am|>": 50334,
|
1505 |
+
"<|ar|>": 50272,
|
1506 |
+
"<|as|>": 50350,
|
1507 |
+
"<|az|>": 50304,
|
1508 |
+
"<|ba|>": 50355,
|
1509 |
+
"<|be|>": 50330,
|
1510 |
+
"<|bg|>": 50292,
|
1511 |
+
"<|bn|>": 50302,
|
1512 |
+
"<|bo|>": 50347,
|
1513 |
+
"<|br|>": 50309,
|
1514 |
+
"<|bs|>": 50315,
|
1515 |
+
"<|ca|>": 50270,
|
1516 |
+
"<|cs|>": 50283,
|
1517 |
+
"<|cy|>": 50297,
|
1518 |
+
"<|da|>": 50285,
|
1519 |
+
"<|de|>": 50261,
|
1520 |
+
"<|el|>": 50281,
|
1521 |
+
"<|endoftext|>": 50257,
|
1522 |
+
"<|en|>": 50259,
|
1523 |
+
"<|es|>": 50262,
|
1524 |
+
"<|et|>": 50307,
|
1525 |
+
"<|eu|>": 50310,
|
1526 |
+
"<|fa|>": 50300,
|
1527 |
+
"<|fi|>": 50277,
|
1528 |
+
"<|fo|>": 50338,
|
1529 |
+
"<|fr|>": 50265,
|
1530 |
+
"<|gl|>": 50319,
|
1531 |
+
"<|gu|>": 50333,
|
1532 |
+
"<|haw|>": 50352,
|
1533 |
+
"<|ha|>": 50354,
|
1534 |
+
"<|he|>": 50279,
|
1535 |
+
"<|hi|>": 50276,
|
1536 |
+
"<|hr|>": 50291,
|
1537 |
+
"<|ht|>": 50339,
|
1538 |
+
"<|hu|>": 50286,
|
1539 |
+
"<|hy|>": 50312,
|
1540 |
+
"<|id|>": 50275,
|
1541 |
+
"<|is|>": 50311,
|
1542 |
+
"<|it|>": 50274,
|
1543 |
+
"<|ja|>": 50266,
|
1544 |
+
"<|jw|>": 50356,
|
1545 |
+
"<|ka|>": 50329,
|
1546 |
+
"<|kk|>": 50316,
|
1547 |
+
"<|km|>": 50323,
|
1548 |
+
"<|kn|>": 50306,
|
1549 |
+
"<|ko|>": 50264,
|
1550 |
+
"<|la|>": 50294,
|
1551 |
+
"<|lb|>": 50345,
|
1552 |
+
"<|ln|>": 50353,
|
1553 |
+
"<|lo|>": 50336,
|
1554 |
+
"<|lt|>": 50293,
|
1555 |
+
"<|lv|>": 50301,
|
1556 |
+
"<|mg|>": 50349,
|
1557 |
+
"<|mi|>": 50295,
|
1558 |
+
"<|mk|>": 50308,
|
1559 |
+
"<|ml|>": 50296,
|
1560 |
+
"<|mn|>": 50314,
|
1561 |
+
"<|mr|>": 50320,
|
1562 |
+
"<|ms|>": 50282,
|
1563 |
+
"<|mt|>": 50343,
|
1564 |
+
"<|my|>": 50346,
|
1565 |
+
"<|ne|>": 50313,
|
1566 |
+
"<|nl|>": 50271,
|
1567 |
+
"<|nn|>": 50342,
|
1568 |
+
"<|nospeech|>": 50363,
|
1569 |
+
"<|notimestamps|>": 50364,
|
1570 |
+
"<|no|>": 50288,
|
1571 |
+
"<|oc|>": 50328,
|
1572 |
+
"<|pa|>": 50321,
|
1573 |
+
"<|pl|>": 50269,
|
1574 |
+
"<|ps|>": 50340,
|
1575 |
+
"<|pt|>": 50267,
|
1576 |
+
"<|ro|>": 50284,
|
1577 |
+
"<|ru|>": 50263,
|
1578 |
+
"<|sa|>": 50344,
|
1579 |
+
"<|sd|>": 50332,
|
1580 |
+
"<|si|>": 50322,
|
1581 |
+
"<|sk|>": 50298,
|
1582 |
+
"<|sl|>": 50305,
|
1583 |
+
"<|sn|>": 50324,
|
1584 |
+
"<|so|>": 50326,
|
1585 |
+
"<|sq|>": 50317,
|
1586 |
+
"<|sr|>": 50303,
|
1587 |
+
"<|startoflm|>": 50361,
|
1588 |
+
"<|startofprev|>": 50362,
|
1589 |
+
"<|startoftranscript|>": 50258,
|
1590 |
+
"<|su|>": 50357,
|
1591 |
+
"<|sv|>": 50273,
|
1592 |
+
"<|sw|>": 50318,
|
1593 |
+
"<|ta|>": 50287,
|
1594 |
+
"<|te|>": 50299,
|
1595 |
+
"<|tg|>": 50331,
|
1596 |
+
"<|th|>": 50289,
|
1597 |
+
"<|tk|>": 50341,
|
1598 |
+
"<|tl|>": 50348,
|
1599 |
+
"<|transcribe|>": 50360,
|
1600 |
+
"<|translate|>": 50359,
|
1601 |
+
"<|tr|>": 50268,
|
1602 |
+
"<|tt|>": 50351,
|
1603 |
+
"<|uk|>": 50280,
|
1604 |
+
"<|ur|>": 50290,
|
1605 |
+
"<|uz|>": 50337,
|
1606 |
+
"<|vi|>": 50278,
|
1607 |
+
"<|yi|>": 50335,
|
1608 |
+
"<|yo|>": 50325,
|
1609 |
+
"<|yue|>": 50358,
|
1610 |
+
"<|zh|>": 50260
|
1611 |
+
}
|
config.json
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "distil-whisper/distil-large-v3",
|
3 |
+
"activation_dropout": 0.0,
|
4 |
+
"activation_function": "gelu",
|
5 |
+
"apply_spec_augment": false,
|
6 |
+
"architectures": [
|
7 |
+
"WhisperForConditionalGeneration"
|
8 |
+
],
|
9 |
+
"attention_dropout": 0.0,
|
10 |
+
"begin_suppress_tokens": [
|
11 |
+
220,
|
12 |
+
50257
|
13 |
+
],
|
14 |
+
"bos_token_id": 50257,
|
15 |
+
"classifier_proj_size": 256,
|
16 |
+
"d_model": 1280,
|
17 |
+
"decoder_attention_heads": 20,
|
18 |
+
"decoder_ffn_dim": 5120,
|
19 |
+
"decoder_layerdrop": 0.0,
|
20 |
+
"decoder_layers": 2,
|
21 |
+
"decoder_start_token_id": 50258,
|
22 |
+
"dropout": 0.0,
|
23 |
+
"encoder_attention_heads": 20,
|
24 |
+
"encoder_ffn_dim": 5120,
|
25 |
+
"encoder_layerdrop": 0.0,
|
26 |
+
"encoder_layers": 32,
|
27 |
+
"eos_token_id": 50257,
|
28 |
+
"forced_decoder_ids": null,
|
29 |
+
"init_std": 0.02,
|
30 |
+
"is_encoder_decoder": true,
|
31 |
+
"mask_feature_length": 10,
|
32 |
+
"mask_feature_min_masks": 0,
|
33 |
+
"mask_feature_prob": 0.0,
|
34 |
+
"mask_time_length": 10,
|
35 |
+
"mask_time_min_masks": 2,
|
36 |
+
"mask_time_prob": 0.05,
|
37 |
+
"max_length": 448,
|
38 |
+
"max_source_positions": 1500,
|
39 |
+
"max_target_positions": 448,
|
40 |
+
"median_filter_width": 7,
|
41 |
+
"model_type": "whisper",
|
42 |
+
"num_hidden_layers": 32,
|
43 |
+
"num_mel_bins": 128,
|
44 |
+
"pad_token_id": 50256,
|
45 |
+
"scale_embedding": false,
|
46 |
+
"torch_dtype": "float32",
|
47 |
+
"transformers_version": "4.40.0.dev0",
|
48 |
+
"use_cache": true,
|
49 |
+
"use_weighted_layer_sum": false,
|
50 |
+
"vocab_size": 51866
|
51 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aa171c05efba7d14b34ef63f5bdfa65b4bd12f881fb079b9f674f2a4bd087996
|
3 |
+
size 3025686376
|
normalizer.json
ADDED
@@ -0,0 +1,1742 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"accessorise": "accessorize",
|
3 |
+
"accessorised": "accessorized",
|
4 |
+
"accessorises": "accessorizes",
|
5 |
+
"accessorising": "accessorizing",
|
6 |
+
"acclimatisation": "acclimatization",
|
7 |
+
"acclimatise": "acclimatize",
|
8 |
+
"acclimatised": "acclimatized",
|
9 |
+
"acclimatises": "acclimatizes",
|
10 |
+
"acclimatising": "acclimatizing",
|
11 |
+
"accoutrements": "accouterments",
|
12 |
+
"aeon": "eon",
|
13 |
+
"aeons": "eons",
|
14 |
+
"aerogramme": "aerogram",
|
15 |
+
"aerogrammes": "aerograms",
|
16 |
+
"aeroplane": "airplane",
|
17 |
+
"aeroplanes": "airplanes",
|
18 |
+
"aesthete": "esthete",
|
19 |
+
"aesthetes": "esthetes",
|
20 |
+
"aesthetic": "esthetic",
|
21 |
+
"aesthetically": "esthetically",
|
22 |
+
"aesthetics": "esthetics",
|
23 |
+
"aetiology": "etiology",
|
24 |
+
"ageing": "aging",
|
25 |
+
"aggrandisement": "aggrandizement",
|
26 |
+
"agonise": "agonize",
|
27 |
+
"agonised": "agonized",
|
28 |
+
"agonises": "agonizes",
|
29 |
+
"agonising": "agonizing",
|
30 |
+
"agonisingly": "agonizingly",
|
31 |
+
"almanack": "almanac",
|
32 |
+
"almanacks": "almanacs",
|
33 |
+
"aluminium": "aluminum",
|
34 |
+
"amortisable": "amortizable",
|
35 |
+
"amortisation": "amortization",
|
36 |
+
"amortisations": "amortizations",
|
37 |
+
"amortise": "amortize",
|
38 |
+
"amortised": "amortized",
|
39 |
+
"amortises": "amortizes",
|
40 |
+
"amortising": "amortizing",
|
41 |
+
"amphitheatre": "amphitheater",
|
42 |
+
"amphitheatres": "amphitheaters",
|
43 |
+
"anaemia": "anemia",
|
44 |
+
"anaemic": "anemic",
|
45 |
+
"anaesthesia": "anesthesia",
|
46 |
+
"anaesthetic": "anesthetic",
|
47 |
+
"anaesthetics": "anesthetics",
|
48 |
+
"anaesthetise": "anesthetize",
|
49 |
+
"anaesthetised": "anesthetized",
|
50 |
+
"anaesthetises": "anesthetizes",
|
51 |
+
"anaesthetising": "anesthetizing",
|
52 |
+
"anaesthetist": "anesthetist",
|
53 |
+
"anaesthetists": "anesthetists",
|
54 |
+
"anaesthetize": "anesthetize",
|
55 |
+
"anaesthetized": "anesthetized",
|
56 |
+
"anaesthetizes": "anesthetizes",
|
57 |
+
"anaesthetizing": "anesthetizing",
|
58 |
+
"analogue": "analog",
|
59 |
+
"analogues": "analogs",
|
60 |
+
"analyse": "analyze",
|
61 |
+
"analysed": "analyzed",
|
62 |
+
"analyses": "analyzes",
|
63 |
+
"analysing": "analyzing",
|
64 |
+
"anglicise": "anglicize",
|
65 |
+
"anglicised": "anglicized",
|
66 |
+
"anglicises": "anglicizes",
|
67 |
+
"anglicising": "anglicizing",
|
68 |
+
"annualised": "annualized",
|
69 |
+
"antagonise": "antagonize",
|
70 |
+
"antagonised": "antagonized",
|
71 |
+
"antagonises": "antagonizes",
|
72 |
+
"antagonising": "antagonizing",
|
73 |
+
"apologise": "apologize",
|
74 |
+
"apologised": "apologized",
|
75 |
+
"apologises": "apologizes",
|
76 |
+
"apologising": "apologizing",
|
77 |
+
"appal": "appall",
|
78 |
+
"appals": "appalls",
|
79 |
+
"appetiser": "appetizer",
|
80 |
+
"appetisers": "appetizers",
|
81 |
+
"appetising": "appetizing",
|
82 |
+
"appetisingly": "appetizingly",
|
83 |
+
"arbour": "arbor",
|
84 |
+
"arbours": "arbors",
|
85 |
+
"archaeologically": "archeologically",
|
86 |
+
"archaeologist": "archeologist",
|
87 |
+
"archaeologists": "archeologists",
|
88 |
+
"archaeology": "archeology</span>",
|
89 |
+
"archeological": "archaeological",
|
90 |
+
"ardour": "ardor",
|
91 |
+
"armour": "armor",
|
92 |
+
"armoured": "armored",
|
93 |
+
"armourer": "armorer",
|
94 |
+
"armourers": "armorers",
|
95 |
+
"armouries": "armories",
|
96 |
+
"armoury": "armory",
|
97 |
+
"artefact": "artifact",
|
98 |
+
"artefacts": "artifacts",
|
99 |
+
"authorise": "authorize",
|
100 |
+
"authorised": "authorized",
|
101 |
+
"authorises": "authorizes",
|
102 |
+
"authorising": "authorizing",
|
103 |
+
"axe": "ax",
|
104 |
+
"backpedalled": "backpedaled",
|
105 |
+
"backpedalling": "backpedaling",
|
106 |
+
"bannister": "banister",
|
107 |
+
"bannisters": "banisters",
|
108 |
+
"baptise": "baptize",
|
109 |
+
"baptised": "baptized",
|
110 |
+
"baptises": "baptizes",
|
111 |
+
"baptising": "baptizing",
|
112 |
+
"bastardise": "bastardize",
|
113 |
+
"bastardised": "bastardized",
|
114 |
+
"bastardises": "bastardizes",
|
115 |
+
"bastardising": "bastardizing",
|
116 |
+
"battleax": "battleaxe",
|
117 |
+
"baulk": "balk",
|
118 |
+
"baulked": "balked",
|
119 |
+
"baulking": "balking",
|
120 |
+
"baulks": "balks",
|
121 |
+
"bedevilled": "bedeviled",
|
122 |
+
"bedevilling": "bedeviling",
|
123 |
+
"behaviour": "behavior",
|
124 |
+
"behavioural": "behavioral",
|
125 |
+
"behaviourism": "behaviorism",
|
126 |
+
"behaviourist": "behaviorist",
|
127 |
+
"behaviourists": "behaviorists",
|
128 |
+
"behaviours": "behaviors",
|
129 |
+
"behove": "behoove",
|
130 |
+
"behoved": "behooved",
|
131 |
+
"behoves": "behooves",
|
132 |
+
"bejewelled": "bejeweled",
|
133 |
+
"belabour": "belabor",
|
134 |
+
"belaboured": "belabored",
|
135 |
+
"belabouring": "belaboring",
|
136 |
+
"belabours": "belabors",
|
137 |
+
"bevelled": "beveled",
|
138 |
+
"bevvies": "bevies",
|
139 |
+
"bevvy": "bevy",
|
140 |
+
"biassed": "biased",
|
141 |
+
"biassing": "biasing",
|
142 |
+
"bingeing": "binging",
|
143 |
+
"bougainvillaea": "bougainvillea",
|
144 |
+
"bougainvillaeas": "bougainvilleas",
|
145 |
+
"bowdlerise": "bowdlerize",
|
146 |
+
"bowdlerised": "bowdlerized",
|
147 |
+
"bowdlerises": "bowdlerizes",
|
148 |
+
"bowdlerising": "bowdlerizing",
|
149 |
+
"breathalyse": "breathalyze",
|
150 |
+
"breathalysed": "breathalyzed",
|
151 |
+
"breathalyser": "breathalyzer",
|
152 |
+
"breathalysers": "breathalyzers",
|
153 |
+
"breathalyses": "breathalyzes",
|
154 |
+
"breathalysing": "breathalyzing",
|
155 |
+
"brutalise": "brutalize",
|
156 |
+
"brutalised": "brutalized",
|
157 |
+
"brutalises": "brutalizes",
|
158 |
+
"brutalising": "brutalizing",
|
159 |
+
"busses": "buses",
|
160 |
+
"bussing": "busing",
|
161 |
+
"caesarean": "cesarean",
|
162 |
+
"caesareans": "cesareans",
|
163 |
+
"calibre": "caliber",
|
164 |
+
"calibres": "calibers",
|
165 |
+
"calliper": "caliper",
|
166 |
+
"callipers": "calipers",
|
167 |
+
"callisthenics": "calisthenics",
|
168 |
+
"canalise": "canalize",
|
169 |
+
"canalised": "canalized",
|
170 |
+
"canalises": "canalizes",
|
171 |
+
"canalising": "canalizing",
|
172 |
+
"cancelation": "cancellation",
|
173 |
+
"cancelations": "cancellations",
|
174 |
+
"cancelled": "canceled",
|
175 |
+
"cancelling": "canceling",
|
176 |
+
"candour": "candor",
|
177 |
+
"cannibalise": "cannibalize",
|
178 |
+
"cannibalised": "cannibalized",
|
179 |
+
"cannibalises": "cannibalizes",
|
180 |
+
"cannibalising": "cannibalizing",
|
181 |
+
"canonise": "canonize",
|
182 |
+
"canonised": "canonized",
|
183 |
+
"canonises": "canonizes",
|
184 |
+
"canonising": "canonizing",
|
185 |
+
"capitalise": "capitalize",
|
186 |
+
"capitalised": "capitalized",
|
187 |
+
"capitalises": "capitalizes",
|
188 |
+
"capitalising": "capitalizing",
|
189 |
+
"caramelise": "caramelize",
|
190 |
+
"caramelised": "caramelized",
|
191 |
+
"caramelises": "caramelizes",
|
192 |
+
"caramelising": "caramelizing",
|
193 |
+
"carbonise": "carbonize",
|
194 |
+
"carbonised": "carbonized",
|
195 |
+
"carbonises": "carbonizes",
|
196 |
+
"carbonising": "carbonizing",
|
197 |
+
"carolled": "caroled",
|
198 |
+
"carolling": "caroling",
|
199 |
+
"catalogue": "catalog",
|
200 |
+
"catalogued": "cataloged",
|
201 |
+
"catalogues": "catalogs",
|
202 |
+
"cataloguing": "cataloging",
|
203 |
+
"catalyse": "catalyze",
|
204 |
+
"catalysed": "catalyzed",
|
205 |
+
"catalyses": "catalyzes",
|
206 |
+
"catalysing": "catalyzing",
|
207 |
+
"categorise": "categorize",
|
208 |
+
"categorised": "categorized",
|
209 |
+
"categorises": "categorizes",
|
210 |
+
"categorising": "categorizing",
|
211 |
+
"cauterise": "cauterize",
|
212 |
+
"cauterised": "cauterized",
|
213 |
+
"cauterises": "cauterizes",
|
214 |
+
"cauterising": "cauterizing",
|
215 |
+
"cavilled": "caviled",
|
216 |
+
"cavilling": "caviling",
|
217 |
+
"centigramme": "centigram",
|
218 |
+
"centigrammes": "centigrams",
|
219 |
+
"centilitre": "centiliter",
|
220 |
+
"centilitres": "centiliters",
|
221 |
+
"centimetre": "centimeter",
|
222 |
+
"centimetres": "centimeters",
|
223 |
+
"centralise": "centralize",
|
224 |
+
"centralised": "centralized",
|
225 |
+
"centralises": "centralizes",
|
226 |
+
"centralising": "centralizing",
|
227 |
+
"centre": "center",
|
228 |
+
"centred": "centered",
|
229 |
+
"centrefold": "centerfold",
|
230 |
+
"centrefolds": "centerfolds",
|
231 |
+
"centrepiece": "centerpiece",
|
232 |
+
"centrepieces": "centerpieces",
|
233 |
+
"centres": "centers",
|
234 |
+
"channelled": "channeled",
|
235 |
+
"channelling": "channeling",
|
236 |
+
"characterise": "characterize",
|
237 |
+
"characterised": "characterized",
|
238 |
+
"characterises": "characterizes",
|
239 |
+
"characterising": "characterizing",
|
240 |
+
"cheque": "check",
|
241 |
+
"chequebook": "checkbook",
|
242 |
+
"chequebooks": "checkbooks",
|
243 |
+
"chequered": "checkered",
|
244 |
+
"cheques": "checks",
|
245 |
+
"chilli": "chili",
|
246 |
+
"chimaera": "chimera",
|
247 |
+
"chimaeras": "chimeras",
|
248 |
+
"chiselled": "chiseled",
|
249 |
+
"chiselling": "chiseling",
|
250 |
+
"circularise": "circularize",
|
251 |
+
"circularised": "circularized",
|
252 |
+
"circularises": "circularizes",
|
253 |
+
"circularising": "circularizing",
|
254 |
+
"civilise": "civilize",
|
255 |
+
"civilised": "civilized",
|
256 |
+
"civilises": "civilizes",
|
257 |
+
"civilising": "civilizing",
|
258 |
+
"clamour": "clamor",
|
259 |
+
"clamoured": "clamored",
|
260 |
+
"clamouring": "clamoring",
|
261 |
+
"clamours": "clamors",
|
262 |
+
"clangour": "clangor",
|
263 |
+
"clarinettist": "clarinetist",
|
264 |
+
"clarinettists": "clarinetists",
|
265 |
+
"collectivise": "collectivize",
|
266 |
+
"collectivised": "collectivized",
|
267 |
+
"collectivises": "collectivizes",
|
268 |
+
"collectivising": "collectivizing",
|
269 |
+
"colonisation": "colonization",
|
270 |
+
"colonise": "colonize",
|
271 |
+
"colonised": "colonized",
|
272 |
+
"coloniser": "colonizer",
|
273 |
+
"colonisers": "colonizers",
|
274 |
+
"colonises": "colonizes",
|
275 |
+
"colonising": "colonizing",
|
276 |
+
"colour": "color",
|
277 |
+
"colourant": "colorant",
|
278 |
+
"colourants": "colorants",
|
279 |
+
"coloured": "colored",
|
280 |
+
"coloureds": "coloreds",
|
281 |
+
"colourful": "colorful",
|
282 |
+
"colourfully": "colorfully",
|
283 |
+
"colouring": "coloring",
|
284 |
+
"colourize": "colorize",
|
285 |
+
"colourized": "colorized",
|
286 |
+
"colourizes": "colorizes",
|
287 |
+
"colourizing": "colorizing",
|
288 |
+
"colourless": "colorless",
|
289 |
+
"colours": "colors",
|
290 |
+
"commercialise": "commercialize",
|
291 |
+
"commercialised": "commercialized",
|
292 |
+
"commercialises": "commercializes",
|
293 |
+
"commercialising": "commercializing",
|
294 |
+
"compartmentalise": "compartmentalize",
|
295 |
+
"compartmentalised": "compartmentalized",
|
296 |
+
"compartmentalises": "compartmentalizes",
|
297 |
+
"compartmentalising": "compartmentalizing",
|
298 |
+
"computerise": "computerize",
|
299 |
+
"computerised": "computerized",
|
300 |
+
"computerises": "computerizes",
|
301 |
+
"computerising": "computerizing",
|
302 |
+
"conceptualise": "conceptualize",
|
303 |
+
"conceptualised": "conceptualized",
|
304 |
+
"conceptualises": "conceptualizes",
|
305 |
+
"conceptualising": "conceptualizing",
|
306 |
+
"connexion": "connection",
|
307 |
+
"connexions": "connections",
|
308 |
+
"contextualise": "contextualize",
|
309 |
+
"contextualised": "contextualized",
|
310 |
+
"contextualises": "contextualizes",
|
311 |
+
"contextualising": "contextualizing",
|
312 |
+
"cosier": "cozier",
|
313 |
+
"cosies": "cozies",
|
314 |
+
"cosiest": "coziest",
|
315 |
+
"cosily": "cozily",
|
316 |
+
"cosiness": "coziness",
|
317 |
+
"cosy": "cozy",
|
318 |
+
"councillor": "councilor",
|
319 |
+
"councillors": "councilors",
|
320 |
+
"counselled": "counseled",
|
321 |
+
"counselling": "counseling",
|
322 |
+
"counsellor": "counselor",
|
323 |
+
"counsellors": "counselors",
|
324 |
+
"crenelated": "crenellated",
|
325 |
+
"criminalise": "criminalize",
|
326 |
+
"criminalised": "criminalized",
|
327 |
+
"criminalises": "criminalizes",
|
328 |
+
"criminalising": "criminalizing",
|
329 |
+
"criticise": "criticize",
|
330 |
+
"criticised": "criticized",
|
331 |
+
"criticises": "criticizes",
|
332 |
+
"criticising": "criticizing",
|
333 |
+
"crueller": "crueler",
|
334 |
+
"cruellest": "cruelest",
|
335 |
+
"crystallisation": "crystallization",
|
336 |
+
"crystallise": "crystallize",
|
337 |
+
"crystallised": "crystallized",
|
338 |
+
"crystallises": "crystallizes",
|
339 |
+
"crystallising": "crystallizing",
|
340 |
+
"cudgelled": "cudgeled",
|
341 |
+
"cudgelling": "cudgeling",
|
342 |
+
"customise": "customize",
|
343 |
+
"customised": "customized",
|
344 |
+
"customises": "customizes",
|
345 |
+
"customising": "customizing",
|
346 |
+
"cypher": "cipher",
|
347 |
+
"cyphers": "ciphers",
|
348 |
+
"decentralisation": "decentralization",
|
349 |
+
"decentralise": "decentralize",
|
350 |
+
"decentralised": "decentralized",
|
351 |
+
"decentralises": "decentralizes",
|
352 |
+
"decentralising": "decentralizing",
|
353 |
+
"decriminalisation": "decriminalization",
|
354 |
+
"decriminalise": "decriminalize",
|
355 |
+
"decriminalised": "decriminalized",
|
356 |
+
"decriminalises": "decriminalizes",
|
357 |
+
"decriminalising": "decriminalizing",
|
358 |
+
"defence": "defense",
|
359 |
+
"defenceless": "defenseless",
|
360 |
+
"defences": "defenses",
|
361 |
+
"dehumanisation": "dehumanization",
|
362 |
+
"dehumanise": "dehumanize",
|
363 |
+
"dehumanised": "dehumanized",
|
364 |
+
"dehumanises": "dehumanizes",
|
365 |
+
"dehumanising": "dehumanizing",
|
366 |
+
"demeanour": "demeanor",
|
367 |
+
"demilitarisation": "demilitarization",
|
368 |
+
"demilitarise": "demilitarize",
|
369 |
+
"demilitarised": "demilitarized",
|
370 |
+
"demilitarises": "demilitarizes",
|
371 |
+
"demilitarising": "demilitarizing",
|
372 |
+
"demobilisation": "demobilization",
|
373 |
+
"demobilise": "demobilize",
|
374 |
+
"demobilised": "demobilized",
|
375 |
+
"demobilises": "demobilizes",
|
376 |
+
"demobilising": "demobilizing",
|
377 |
+
"democratisation": "democratization",
|
378 |
+
"democratise": "democratize",
|
379 |
+
"democratised": "democratized",
|
380 |
+
"democratises": "democratizes",
|
381 |
+
"democratising": "democratizing",
|
382 |
+
"demonise": "demonize",
|
383 |
+
"demonised": "demonized",
|
384 |
+
"demonises": "demonizes",
|
385 |
+
"demonising": "demonizing",
|
386 |
+
"demoralisation": "demoralization",
|
387 |
+
"demoralise": "demoralize",
|
388 |
+
"demoralised": "demoralized",
|
389 |
+
"demoralises": "demoralizes",
|
390 |
+
"demoralising": "demoralizing",
|
391 |
+
"denationalisation": "denationalization",
|
392 |
+
"denationalise": "denationalize",
|
393 |
+
"denationalised": "denationalized",
|
394 |
+
"denationalises": "denationalizes",
|
395 |
+
"denationalising": "denationalizing",
|
396 |
+
"deodorise": "deodorize",
|
397 |
+
"deodorised": "deodorized",
|
398 |
+
"deodorises": "deodorizes",
|
399 |
+
"deodorising": "deodorizing",
|
400 |
+
"depersonalise": "depersonalize",
|
401 |
+
"depersonalised": "depersonalized",
|
402 |
+
"depersonalises": "depersonalizes",
|
403 |
+
"depersonalising": "depersonalizing",
|
404 |
+
"deputise": "deputize",
|
405 |
+
"deputised": "deputized",
|
406 |
+
"deputises": "deputizes",
|
407 |
+
"deputising": "deputizing",
|
408 |
+
"desensitisation": "desensitization",
|
409 |
+
"desensitise": "desensitize",
|
410 |
+
"desensitised": "desensitized",
|
411 |
+
"desensitises": "desensitizes",
|
412 |
+
"desensitising": "desensitizing",
|
413 |
+
"destabilisation": "destabilization",
|
414 |
+
"destabilise": "destabilize",
|
415 |
+
"destabilised": "destabilized",
|
416 |
+
"destabilises": "destabilizes",
|
417 |
+
"destabilising": "destabilizing",
|
418 |
+
"dialled": "dialed",
|
419 |
+
"dialling": "dialing",
|
420 |
+
"dialogue": "dialog",
|
421 |
+
"dialogues": "dialogs",
|
422 |
+
"diarrhoea": "diarrhea",
|
423 |
+
"digitise": "digitize",
|
424 |
+
"digitised": "digitized",
|
425 |
+
"digitises": "digitizes",
|
426 |
+
"digitising": "digitizing",
|
427 |
+
"disc": "disk",
|
428 |
+
"discolour": "discolor",
|
429 |
+
"discoloured": "discolored",
|
430 |
+
"discolouring": "discoloring",
|
431 |
+
"discolours": "discolors",
|
432 |
+
"discs": "disks",
|
433 |
+
"disembowelled": "disemboweled",
|
434 |
+
"disembowelling": "disemboweling",
|
435 |
+
"disfavour": "disfavor",
|
436 |
+
"dishevelled": "disheveled",
|
437 |
+
"dishonour": "dishonor",
|
438 |
+
"dishonourable": "dishonorable",
|
439 |
+
"dishonourably": "dishonorably",
|
440 |
+
"dishonoured": "dishonored",
|
441 |
+
"dishonouring": "dishonoring",
|
442 |
+
"dishonours": "dishonors",
|
443 |
+
"disorganisation": "disorganization",
|
444 |
+
"disorganised": "disorganized",
|
445 |
+
"distil": "distill",
|
446 |
+
"distils": "distills",
|
447 |
+
"dramatisation": "dramatization",
|
448 |
+
"dramatisations": "dramatizations",
|
449 |
+
"dramatise": "dramatize",
|
450 |
+
"dramatised": "dramatized",
|
451 |
+
"dramatises": "dramatizes",
|
452 |
+
"dramatising": "dramatizing",
|
453 |
+
"draught": "draft",
|
454 |
+
"draughtboard": "draftboard",
|
455 |
+
"draughtboards": "draftboards",
|
456 |
+
"draughtier": "draftier",
|
457 |
+
"draughtiest": "draftiest",
|
458 |
+
"draughts": "drafts",
|
459 |
+
"draughtsman": "draftsman",
|
460 |
+
"draughtsmanship": "draftsmanship",
|
461 |
+
"draughtsmen": "draftsmen",
|
462 |
+
"draughtswoman": "draftswoman",
|
463 |
+
"draughtswomen": "draftswomen",
|
464 |
+
"draughty": "drafty",
|
465 |
+
"drivelled": "driveled",
|
466 |
+
"drivelling": "driveling",
|
467 |
+
"duelled": "dueled",
|
468 |
+
"duelling": "dueling",
|
469 |
+
"economise": "economize",
|
470 |
+
"economised": "economized",
|
471 |
+
"economises": "economizes",
|
472 |
+
"economising": "economizing",
|
473 |
+
"editorialise": "editorialize",
|
474 |
+
"editorialised": "editorialized",
|
475 |
+
"editorialises": "editorializes",
|
476 |
+
"editorialising": "editorializing",
|
477 |
+
"edoema": "edema",
|
478 |
+
"empathise": "empathize",
|
479 |
+
"empathised": "empathized",
|
480 |
+
"empathises": "empathizes",
|
481 |
+
"empathising": "empathizing",
|
482 |
+
"emphasise": "emphasize",
|
483 |
+
"emphasised": "emphasized",
|
484 |
+
"emphasises": "emphasizes",
|
485 |
+
"emphasising": "emphasizing",
|
486 |
+
"enamelled": "enameled",
|
487 |
+
"enamelling": "enameling",
|
488 |
+
"enamoured": "enamored",
|
489 |
+
"encyclopaedia": "encyclopedia",
|
490 |
+
"encyclopaedias": "encyclopedias",
|
491 |
+
"encyclopaedic": "encyclopedic",
|
492 |
+
"endeavour": "endeavor",
|
493 |
+
"endeavoured": "endeavored",
|
494 |
+
"endeavouring": "endeavoring",
|
495 |
+
"endeavours": "endeavors",
|
496 |
+
"energise": "energize",
|
497 |
+
"energised": "energized",
|
498 |
+
"energises": "energizes",
|
499 |
+
"energising": "energizing",
|
500 |
+
"enrol": "enroll",
|
501 |
+
"enrols": "enrolls",
|
502 |
+
"enthral": "enthrall",
|
503 |
+
"enthrals": "enthralls",
|
504 |
+
"epaulette": "epaulet",
|
505 |
+
"epaulettes": "epaulets",
|
506 |
+
"epicentre": "epicenter",
|
507 |
+
"epicentres": "epicenters",
|
508 |
+
"epilogue": "epilog",
|
509 |
+
"epilogues": "epilogs",
|
510 |
+
"epitomise": "epitomize",
|
511 |
+
"epitomised": "epitomized",
|
512 |
+
"epitomises": "epitomizes",
|
513 |
+
"epitomising": "epitomizing",
|
514 |
+
"equalisation": "equalization",
|
515 |
+
"equalise": "equalize",
|
516 |
+
"equalised": "equalized",
|
517 |
+
"equaliser": "equalizer",
|
518 |
+
"equalisers": "equalizers",
|
519 |
+
"equalises": "equalizes",
|
520 |
+
"equalising": "equalizing",
|
521 |
+
"eulogise": "eulogize",
|
522 |
+
"eulogised": "eulogized",
|
523 |
+
"eulogises": "eulogizes",
|
524 |
+
"eulogising": "eulogizing",
|
525 |
+
"evangelise": "evangelize",
|
526 |
+
"evangelised": "evangelized",
|
527 |
+
"evangelises": "evangelizes",
|
528 |
+
"evangelising": "evangelizing",
|
529 |
+
"exorcise": "exorcize",
|
530 |
+
"exorcised": "exorcized",
|
531 |
+
"exorcises": "exorcizes",
|
532 |
+
"exorcising": "exorcizing",
|
533 |
+
"extemporisation": "extemporization",
|
534 |
+
"extemporise": "extemporize",
|
535 |
+
"extemporised": "extemporized",
|
536 |
+
"extemporises": "extemporizes",
|
537 |
+
"extemporising": "extemporizing",
|
538 |
+
"externalisation": "externalization",
|
539 |
+
"externalisations": "externalizations",
|
540 |
+
"externalise": "externalize",
|
541 |
+
"externalised": "externalized",
|
542 |
+
"externalises": "externalizes",
|
543 |
+
"externalising": "externalizing",
|
544 |
+
"factorise": "factorize",
|
545 |
+
"factorised": "factorized",
|
546 |
+
"factorises": "factorizes",
|
547 |
+
"factorising": "factorizing",
|
548 |
+
"faecal": "fecal",
|
549 |
+
"faeces": "feces",
|
550 |
+
"familiarisation": "familiarization",
|
551 |
+
"familiarise": "familiarize",
|
552 |
+
"familiarised": "familiarized",
|
553 |
+
"familiarises": "familiarizes",
|
554 |
+
"familiarising": "familiarizing",
|
555 |
+
"fantasise": "fantasize",
|
556 |
+
"fantasised": "fantasized",
|
557 |
+
"fantasises": "fantasizes",
|
558 |
+
"fantasising": "fantasizing",
|
559 |
+
"favour": "favor",
|
560 |
+
"favourable": "favorable",
|
561 |
+
"favourably": "favorably",
|
562 |
+
"favoured": "favored",
|
563 |
+
"favouring": "favoring",
|
564 |
+
"favourite": "favorite",
|
565 |
+
"favourites": "favorites",
|
566 |
+
"favouritism": "favoritism",
|
567 |
+
"favours": "favors",
|
568 |
+
"feminise": "feminize",
|
569 |
+
"feminised": "feminized",
|
570 |
+
"feminises": "feminizes",
|
571 |
+
"feminising": "feminizing",
|
572 |
+
"fertilisation": "fertilization",
|
573 |
+
"fertilise": "fertilize",
|
574 |
+
"fertilised": "fertilized",
|
575 |
+
"fertiliser": "fertilizer",
|
576 |
+
"fertilisers": "fertilizers",
|
577 |
+
"fertilises": "fertilizes",
|
578 |
+
"fertilising": "fertilizing",
|
579 |
+
"fervour": "fervor",
|
580 |
+
"fibre": "fiber",
|
581 |
+
"fibreglass": "fiberglass",
|
582 |
+
"fibres": "fibers",
|
583 |
+
"fictionalisation": "fictionalization",
|
584 |
+
"fictionalisations": "fictionalizations",
|
585 |
+
"fictionalise": "fictionalize",
|
586 |
+
"fictionalised": "fictionalized",
|
587 |
+
"fictionalises": "fictionalizes",
|
588 |
+
"fictionalising": "fictionalizing",
|
589 |
+
"fillet": "filet",
|
590 |
+
"filleted": "fileted",
|
591 |
+
"filleting": "fileting",
|
592 |
+
"fillets": "filets",
|
593 |
+
"finalisation": "finalization",
|
594 |
+
"finalise": "finalize",
|
595 |
+
"finalised": "finalized",
|
596 |
+
"finalises": "finalizes",
|
597 |
+
"finalising": "finalizing",
|
598 |
+
"flautist": "flutist",
|
599 |
+
"flautists": "flutists",
|
600 |
+
"flavour": "flavor",
|
601 |
+
"flavoured": "flavored",
|
602 |
+
"flavouring": "flavoring",
|
603 |
+
"flavourings": "flavorings",
|
604 |
+
"flavourless": "flavorless",
|
605 |
+
"flavours": "flavors",
|
606 |
+
"flavoursome": "flavorsome",
|
607 |
+
"flyer / flier": "flier / flyer",
|
608 |
+
"foetal": "fetal",
|
609 |
+
"foetid": "fetid",
|
610 |
+
"foetus": "fetus",
|
611 |
+
"foetuses": "fetuses",
|
612 |
+
"formalisation": "formalization",
|
613 |
+
"formalise": "formalize",
|
614 |
+
"formalised": "formalized",
|
615 |
+
"formalises": "formalizes",
|
616 |
+
"formalising": "formalizing",
|
617 |
+
"fossilisation": "fossilization",
|
618 |
+
"fossilise": "fossilize",
|
619 |
+
"fossilised": "fossilized",
|
620 |
+
"fossilises": "fossilizes",
|
621 |
+
"fossilising": "fossilizing",
|
622 |
+
"fraternisation": "fraternization",
|
623 |
+
"fraternise": "fraternize",
|
624 |
+
"fraternised": "fraternized",
|
625 |
+
"fraternises": "fraternizes",
|
626 |
+
"fraternising": "fraternizing",
|
627 |
+
"fulfil": "fulfill",
|
628 |
+
"fulfilment": "fulfillment",
|
629 |
+
"fulfils": "fulfills",
|
630 |
+
"funnelled": "funneled",
|
631 |
+
"funnelling": "funneling",
|
632 |
+
"gage": "gauge",
|
633 |
+
"gaged": "gauged",
|
634 |
+
"gages": "gauges",
|
635 |
+
"gaging": "gauging",
|
636 |
+
"galvanise": "galvanize",
|
637 |
+
"galvanised": "galvanized",
|
638 |
+
"galvanises": "galvanizes",
|
639 |
+
"galvanising": "galvanizing",
|
640 |
+
"gambolled": "gamboled",
|
641 |
+
"gambolling": "gamboling",
|
642 |
+
"gaol": "jail",
|
643 |
+
"gaolbird": "jailbird",
|
644 |
+
"gaolbirds": "jailbirds",
|
645 |
+
"gaolbreak": "jailbreak",
|
646 |
+
"gaolbreaks": "jailbreaks",
|
647 |
+
"gaoled": "jailed",
|
648 |
+
"gaoler": "jailer",
|
649 |
+
"gaolers": "jailers",
|
650 |
+
"gaoling": "jailing",
|
651 |
+
"gaols": "jails",
|
652 |
+
"gasses": "gases",
|
653 |
+
"generalisation": "generalization",
|
654 |
+
"generalisations": "generalizations",
|
655 |
+
"generalise": "generalize",
|
656 |
+
"generalised": "generalized",
|
657 |
+
"generalises": "generalizes",
|
658 |
+
"generalising": "generalizing",
|
659 |
+
"ghettoise": "ghettoize",
|
660 |
+
"ghettoised": "ghettoized",
|
661 |
+
"ghettoises": "ghettoizes",
|
662 |
+
"ghettoising": "ghettoizing",
|
663 |
+
"gipsies": "gypsies",
|
664 |
+
"glamor": "glamour",
|
665 |
+
"glamorise": "glamorize",
|
666 |
+
"glamorised": "glamorized",
|
667 |
+
"glamorises": "glamorizes",
|
668 |
+
"glamorising": "glamorizing",
|
669 |
+
"globalisation": "globalization",
|
670 |
+
"globalise": "globalize",
|
671 |
+
"globalised": "globalized",
|
672 |
+
"globalises": "globalizes",
|
673 |
+
"globalising": "globalizing",
|
674 |
+
"glueing": "gluing",
|
675 |
+
"goitre": "goiter",
|
676 |
+
"goitres": "goiters",
|
677 |
+
"gonorrhoea": "gonorrhea",
|
678 |
+
"gramme": "gram",
|
679 |
+
"grammes": "grams",
|
680 |
+
"gravelled": "graveled",
|
681 |
+
"grey": "gray",
|
682 |
+
"greyed": "grayed",
|
683 |
+
"greying": "graying",
|
684 |
+
"greyish": "grayish",
|
685 |
+
"greyness": "grayness",
|
686 |
+
"greys": "grays",
|
687 |
+
"grovelled": "groveled",
|
688 |
+
"grovelling": "groveling",
|
689 |
+
"groyne": "groin",
|
690 |
+
"groynes": "groins",
|
691 |
+
"gruelling": "grueling",
|
692 |
+
"gruellingly": "gruelingly",
|
693 |
+
"gryphon": "griffin",
|
694 |
+
"gryphons": "griffins",
|
695 |
+
"gynaecological": "gynecological",
|
696 |
+
"gynaecologist": "gynecologist",
|
697 |
+
"gynaecologists": "gynecologists",
|
698 |
+
"gynaecology": "gynecology",
|
699 |
+
"haematological": "hematological",
|
700 |
+
"haematologist": "hematologist",
|
701 |
+
"haematologists": "hematologists",
|
702 |
+
"haematology": "hematology",
|
703 |
+
"haemoglobin": "hemoglobin",
|
704 |
+
"haemophilia": "hemophilia",
|
705 |
+
"haemophiliac": "hemophiliac",
|
706 |
+
"haemophiliacs": "hemophiliacs",
|
707 |
+
"haemorrhage": "hemorrhage",
|
708 |
+
"haemorrhaged": "hemorrhaged",
|
709 |
+
"haemorrhages": "hemorrhages",
|
710 |
+
"haemorrhaging": "hemorrhaging",
|
711 |
+
"haemorrhoids": "hemorrhoids",
|
712 |
+
"harbour": "harbor",
|
713 |
+
"harboured": "harbored",
|
714 |
+
"harbouring": "harboring",
|
715 |
+
"harbours": "harbors",
|
716 |
+
"harmonisation": "harmonization",
|
717 |
+
"harmonise": "harmonize",
|
718 |
+
"harmonised": "harmonized",
|
719 |
+
"harmonises": "harmonizes",
|
720 |
+
"harmonising": "harmonizing",
|
721 |
+
"homoeopath": "homeopath",
|
722 |
+
"homoeopathic": "homeopathic",
|
723 |
+
"homoeopaths": "homeopaths",
|
724 |
+
"homoeopathy": "homeopathy",
|
725 |
+
"homogenise": "homogenize",
|
726 |
+
"homogenised": "homogenized",
|
727 |
+
"homogenises": "homogenizes",
|
728 |
+
"homogenising": "homogenizing",
|
729 |
+
"honour": "honor",
|
730 |
+
"honourable": "honorable",
|
731 |
+
"honourably": "honorably",
|
732 |
+
"honoured": "honored",
|
733 |
+
"honouring": "honoring",
|
734 |
+
"honours": "honors",
|
735 |
+
"hospitalisation": "hospitalization",
|
736 |
+
"hospitalise": "hospitalize",
|
737 |
+
"hospitalised": "hospitalized",
|
738 |
+
"hospitalises": "hospitalizes",
|
739 |
+
"hospitalising": "hospitalizing",
|
740 |
+
"humanise": "humanize",
|
741 |
+
"humanised": "humanized",
|
742 |
+
"humanises": "humanizes",
|
743 |
+
"humanising": "humanizing",
|
744 |
+
"humour": "humor",
|
745 |
+
"humoured": "humored",
|
746 |
+
"humouring": "humoring",
|
747 |
+
"humourless": "humorless",
|
748 |
+
"humours": "humors",
|
749 |
+
"hybridise": "hybridize",
|
750 |
+
"hybridised": "hybridized",
|
751 |
+
"hybridises": "hybridizes",
|
752 |
+
"hybridising": "hybridizing",
|
753 |
+
"hypnotise": "hypnotize",
|
754 |
+
"hypnotised": "hypnotized",
|
755 |
+
"hypnotises": "hypnotizes",
|
756 |
+
"hypnotising": "hypnotizing",
|
757 |
+
"hypothesise": "hypothesize",
|
758 |
+
"hypothesised": "hypothesized",
|
759 |
+
"hypothesises": "hypothesizes",
|
760 |
+
"hypothesising": "hypothesizing",
|
761 |
+
"idealisation": "idealization",
|
762 |
+
"idealise": "idealize",
|
763 |
+
"idealised": "idealized",
|
764 |
+
"idealises": "idealizes",
|
765 |
+
"idealising": "idealizing",
|
766 |
+
"idolise": "idolize",
|
767 |
+
"idolised": "idolized",
|
768 |
+
"idolises": "idolizes",
|
769 |
+
"idolising": "idolizing",
|
770 |
+
"immobilisation": "immobilization",
|
771 |
+
"immobilise": "immobilize",
|
772 |
+
"immobilised": "immobilized",
|
773 |
+
"immobiliser": "immobilizer",
|
774 |
+
"immobilisers": "immobilizers",
|
775 |
+
"immobilises": "immobilizes",
|
776 |
+
"immobilising": "immobilizing",
|
777 |
+
"immortalise": "immortalize",
|
778 |
+
"immortalised": "immortalized",
|
779 |
+
"immortalises": "immortalizes",
|
780 |
+
"immortalising": "immortalizing",
|
781 |
+
"immunisation": "immunization",
|
782 |
+
"immunise": "immunize",
|
783 |
+
"immunised": "immunized",
|
784 |
+
"immunises": "immunizes",
|
785 |
+
"immunising": "immunizing",
|
786 |
+
"impanelled": "impaneled",
|
787 |
+
"impanelling": "impaneling",
|
788 |
+
"imperilled": "imperiled",
|
789 |
+
"imperilling": "imperiling",
|
790 |
+
"individualise": "individualize",
|
791 |
+
"individualised": "individualized",
|
792 |
+
"individualises": "individualizes",
|
793 |
+
"individualising": "individualizing",
|
794 |
+
"industrialise": "industrialize",
|
795 |
+
"industrialised": "industrialized",
|
796 |
+
"industrialises": "industrializes",
|
797 |
+
"industrialising": "industrializing",
|
798 |
+
"inflexion": "inflection",
|
799 |
+
"inflexions": "inflections",
|
800 |
+
"initialise": "initialize",
|
801 |
+
"initialised": "initialized",
|
802 |
+
"initialises": "initializes",
|
803 |
+
"initialising": "initializing",
|
804 |
+
"initialled": "initialed",
|
805 |
+
"initialling": "initialing",
|
806 |
+
"instal": "install",
|
807 |
+
"instalment": "installment",
|
808 |
+
"instalments": "installments",
|
809 |
+
"instals": "installs",
|
810 |
+
"instil": "instill",
|
811 |
+
"instils": "instills",
|
812 |
+
"institutionalisation": "institutionalization",
|
813 |
+
"institutionalise": "institutionalize",
|
814 |
+
"institutionalised": "institutionalized",
|
815 |
+
"institutionalises": "institutionalizes",
|
816 |
+
"institutionalising": "institutionalizing",
|
817 |
+
"intellectualise": "intellectualize",
|
818 |
+
"intellectualised": "intellectualized",
|
819 |
+
"intellectualises": "intellectualizes",
|
820 |
+
"intellectualising": "intellectualizing",
|
821 |
+
"internalisation": "internalization",
|
822 |
+
"internalise": "internalize",
|
823 |
+
"internalised": "internalized",
|
824 |
+
"internalises": "internalizes",
|
825 |
+
"internalising": "internalizing",
|
826 |
+
"internationalisation": "internationalization",
|
827 |
+
"internationalise": "internationalize",
|
828 |
+
"internationalised": "internationalized",
|
829 |
+
"internationalises": "internationalizes",
|
830 |
+
"internationalising": "internationalizing",
|
831 |
+
"ionisation": "ionization",
|
832 |
+
"ionise": "ionize",
|
833 |
+
"ionised": "ionized",
|
834 |
+
"ioniser": "ionizer",
|
835 |
+
"ionisers": "ionizers",
|
836 |
+
"ionises": "ionizes",
|
837 |
+
"ionising": "ionizing",
|
838 |
+
"italicise": "italicize",
|
839 |
+
"italicised": "italicized",
|
840 |
+
"italicises": "italicizes",
|
841 |
+
"italicising": "italicizing",
|
842 |
+
"itemise": "itemize",
|
843 |
+
"itemised": "itemized",
|
844 |
+
"itemises": "itemizes",
|
845 |
+
"itemising": "itemizing",
|
846 |
+
"jeopardise": "jeopardize",
|
847 |
+
"jeopardised": "jeopardized",
|
848 |
+
"jeopardises": "jeopardizes",
|
849 |
+
"jeopardising": "jeopardizing",
|
850 |
+
"jewelled": "jeweled",
|
851 |
+
"jeweller": "jeweler",
|
852 |
+
"jewellers": "jewelers",
|
853 |
+
"jewellery": "jewelry",
|
854 |
+
"judgement": "judgment",
|
855 |
+
"kilogramme": "kilogram",
|
856 |
+
"kilogrammes": "kilograms",
|
857 |
+
"kilometre": "kilometer",
|
858 |
+
"kilometres": "kilometers",
|
859 |
+
"labelled": "labeled",
|
860 |
+
"labelling": "labeling",
|
861 |
+
"labour": "labor",
|
862 |
+
"laboured": "labored",
|
863 |
+
"labourer": "laborer",
|
864 |
+
"labourers": "laborers",
|
865 |
+
"labouring": "laboring",
|
866 |
+
"labours": "labors",
|
867 |
+
"lacklustre": "lackluster",
|
868 |
+
"legalisation": "legalization",
|
869 |
+
"legalise": "legalize",
|
870 |
+
"legalised": "legalized",
|
871 |
+
"legalises": "legalizes",
|
872 |
+
"legalising": "legalizing",
|
873 |
+
"legitimise": "legitimize",
|
874 |
+
"legitimised": "legitimized",
|
875 |
+
"legitimises": "legitimizes",
|
876 |
+
"legitimising": "legitimizing",
|
877 |
+
"leukaemia": "leukemia",
|
878 |
+
"levelled": "leveled",
|
879 |
+
"leveller": "leveler",
|
880 |
+
"levellers": "levelers",
|
881 |
+
"levelling": "leveling",
|
882 |
+
"libelled": "libeled",
|
883 |
+
"libelling": "libeling",
|
884 |
+
"libellous": "libelous",
|
885 |
+
"liberalisation": "liberalization",
|
886 |
+
"liberalise": "liberalize",
|
887 |
+
"liberalised": "liberalized",
|
888 |
+
"liberalises": "liberalizes",
|
889 |
+
"liberalising": "liberalizing",
|
890 |
+
"licence": "license",
|
891 |
+
"licenced": "licensed",
|
892 |
+
"licences": "licenses",
|
893 |
+
"licencing": "licensing",
|
894 |
+
"likeable": "likable",
|
895 |
+
"lionisation": "lionization",
|
896 |
+
"lionise": "lionize",
|
897 |
+
"lionised": "lionized",
|
898 |
+
"lionises": "lionizes",
|
899 |
+
"lionising": "lionizing",
|
900 |
+
"liquidise": "liquidize",
|
901 |
+
"liquidised": "liquidized",
|
902 |
+
"liquidiser": "liquidizer",
|
903 |
+
"liquidisers": "liquidizers",
|
904 |
+
"liquidises": "liquidizes",
|
905 |
+
"liquidising": "liquidizing",
|
906 |
+
"litre": "liter",
|
907 |
+
"litres": "liters",
|
908 |
+
"localise": "localize",
|
909 |
+
"localised": "localized",
|
910 |
+
"localises": "localizes",
|
911 |
+
"localising": "localizing",
|
912 |
+
"louvre": "louver",
|
913 |
+
"louvred": "louvered",
|
914 |
+
"louvres": "louvers",
|
915 |
+
"lustre": "luster",
|
916 |
+
"magnetise": "magnetize",
|
917 |
+
"magnetised": "magnetized",
|
918 |
+
"magnetises": "magnetizes",
|
919 |
+
"magnetising": "magnetizing",
|
920 |
+
"manoeuvrability": "maneuverability",
|
921 |
+
"manoeuvrable": "maneuverable",
|
922 |
+
"manoeuvre": "maneuver",
|
923 |
+
"manoeuvred": "maneuvered",
|
924 |
+
"manoeuvres": "maneuvers",
|
925 |
+
"manoeuvring": "maneuvering",
|
926 |
+
"manoeuvrings": "maneuverings",
|
927 |
+
"marginalisation": "marginalization",
|
928 |
+
"marginalise": "marginalize",
|
929 |
+
"marginalised": "marginalized",
|
930 |
+
"marginalises": "marginalizes",
|
931 |
+
"marginalising": "marginalizing",
|
932 |
+
"marshalled": "marshaled",
|
933 |
+
"marshalling": "marshaling",
|
934 |
+
"marvelled": "marveled",
|
935 |
+
"marvelling": "marveling",
|
936 |
+
"marvellous": "marvelous",
|
937 |
+
"marvellously": "marvelously",
|
938 |
+
"materialisation": "materialization",
|
939 |
+
"materialise": "materialize",
|
940 |
+
"materialised": "materialized",
|
941 |
+
"materialises": "materializes",
|
942 |
+
"materialising": "materializing",
|
943 |
+
"maximisation": "maximization",
|
944 |
+
"maximise": "maximize",
|
945 |
+
"maximised": "maximized",
|
946 |
+
"maximises": "maximizes",
|
947 |
+
"maximising": "maximizing",
|
948 |
+
"meagre": "meager",
|
949 |
+
"mechanisation": "mechanization",
|
950 |
+
"mechanise": "mechanize",
|
951 |
+
"mechanised": "mechanized",
|
952 |
+
"mechanises": "mechanizes",
|
953 |
+
"mechanising": "mechanizing",
|
954 |
+
"mediaeval": "medieval",
|
955 |
+
"memorialise": "memorialize",
|
956 |
+
"memorialised": "memorialized",
|
957 |
+
"memorialises": "memorializes",
|
958 |
+
"memorialising": "memorializing",
|
959 |
+
"memorise": "memorize",
|
960 |
+
"memorised": "memorized",
|
961 |
+
"memorises": "memorizes",
|
962 |
+
"memorising": "memorizing",
|
963 |
+
"mesmerise": "mesmerize",
|
964 |
+
"mesmerised": "mesmerized",
|
965 |
+
"mesmerises": "mesmerizes",
|
966 |
+
"mesmerising": "mesmerizing",
|
967 |
+
"metabolise": "metabolize",
|
968 |
+
"metabolised": "metabolized",
|
969 |
+
"metabolises": "metabolizes",
|
970 |
+
"metabolising": "metabolizing",
|
971 |
+
"metre": "meter",
|
972 |
+
"metres": "meters",
|
973 |
+
"mhm": "hmm",
|
974 |
+
"micrometre": "micrometer",
|
975 |
+
"micrometres": "micrometers",
|
976 |
+
"militarise": "militarize",
|
977 |
+
"militarised": "militarized",
|
978 |
+
"militarises": "militarizes",
|
979 |
+
"militarising": "militarizing",
|
980 |
+
"milligramme": "milligram",
|
981 |
+
"milligrammes": "milligrams",
|
982 |
+
"millilitre": "milliliter",
|
983 |
+
"millilitres": "milliliters",
|
984 |
+
"millimetre": "millimeter",
|
985 |
+
"millimetres": "millimeters",
|
986 |
+
"miniaturisation": "miniaturization",
|
987 |
+
"miniaturise": "miniaturize",
|
988 |
+
"miniaturised": "miniaturized",
|
989 |
+
"miniaturises": "miniaturizes",
|
990 |
+
"miniaturising": "miniaturizing",
|
991 |
+
"minibusses": "minibuses",
|
992 |
+
"minimise": "minimize",
|
993 |
+
"minimised": "minimized",
|
994 |
+
"minimises": "minimizes",
|
995 |
+
"minimising": "minimizing",
|
996 |
+
"misbehaviour": "misbehavior",
|
997 |
+
"misdemeanour": "misdemeanor",
|
998 |
+
"misdemeanours": "misdemeanors",
|
999 |
+
"misspelt": "misspelled",
|
1000 |
+
"mitre": "miter",
|
1001 |
+
"mitres": "miters",
|
1002 |
+
"mm": "hmm",
|
1003 |
+
"mmm": "hmm",
|
1004 |
+
"mobilisation": "mobilization",
|
1005 |
+
"mobilise": "mobilize",
|
1006 |
+
"mobilised": "mobilized",
|
1007 |
+
"mobilises": "mobilizes",
|
1008 |
+
"mobilising": "mobilizing",
|
1009 |
+
"modelled": "modeled",
|
1010 |
+
"modeller": "modeler",
|
1011 |
+
"modellers": "modelers",
|
1012 |
+
"modelling": "modeling",
|
1013 |
+
"modernise": "modernize",
|
1014 |
+
"modernised": "modernized",
|
1015 |
+
"modernises": "modernizes",
|
1016 |
+
"modernising": "modernizing",
|
1017 |
+
"moisturise": "moisturize",
|
1018 |
+
"moisturised": "moisturized",
|
1019 |
+
"moisturiser": "moisturizer",
|
1020 |
+
"moisturisers": "moisturizers",
|
1021 |
+
"moisturises": "moisturizes",
|
1022 |
+
"moisturising": "moisturizing",
|
1023 |
+
"monologue": "monolog",
|
1024 |
+
"monologues": "monologs",
|
1025 |
+
"monopolisation": "monopolization",
|
1026 |
+
"monopolise": "monopolize",
|
1027 |
+
"monopolised": "monopolized",
|
1028 |
+
"monopolises": "monopolizes",
|
1029 |
+
"monopolising": "monopolizing",
|
1030 |
+
"moralise": "moralize",
|
1031 |
+
"moralised": "moralized",
|
1032 |
+
"moralises": "moralizes",
|
1033 |
+
"moralising": "moralizing",
|
1034 |
+
"motorised": "motorized",
|
1035 |
+
"mould": "mold",
|
1036 |
+
"moulded": "molded",
|
1037 |
+
"moulder": "molder",
|
1038 |
+
"mouldered": "moldered",
|
1039 |
+
"mouldering": "moldering",
|
1040 |
+
"moulders": "molders",
|
1041 |
+
"mouldier": "moldier",
|
1042 |
+
"mouldiest": "moldiest",
|
1043 |
+
"moulding": "molding",
|
1044 |
+
"mouldings": "moldings",
|
1045 |
+
"moulds": "molds",
|
1046 |
+
"mouldy": "moldy",
|
1047 |
+
"moult": "molt",
|
1048 |
+
"moulted": "molted",
|
1049 |
+
"moulting": "molting",
|
1050 |
+
"moults": "molts",
|
1051 |
+
"moustache": "mustache",
|
1052 |
+
"moustached": "mustached",
|
1053 |
+
"moustaches": "mustaches",
|
1054 |
+
"moustachioed": "mustachioed",
|
1055 |
+
"multicoloured": "multicolored",
|
1056 |
+
"nationalisation": "nationalization",
|
1057 |
+
"nationalisations": "nationalizations",
|
1058 |
+
"nationalise": "nationalize",
|
1059 |
+
"nationalised": "nationalized",
|
1060 |
+
"nationalises": "nationalizes",
|
1061 |
+
"nationalising": "nationalizing",
|
1062 |
+
"naturalisation": "naturalization",
|
1063 |
+
"naturalise": "naturalize",
|
1064 |
+
"naturalised": "naturalized",
|
1065 |
+
"naturalises": "naturalizes",
|
1066 |
+
"naturalising": "naturalizing",
|
1067 |
+
"neighbour": "neighbor",
|
1068 |
+
"neighbourhood": "neighborhood",
|
1069 |
+
"neighbourhoods": "neighborhoods",
|
1070 |
+
"neighbouring": "neighboring",
|
1071 |
+
"neighbourliness": "neighborliness",
|
1072 |
+
"neighbourly": "neighborly",
|
1073 |
+
"neighbours": "neighbors",
|
1074 |
+
"neutralisation": "neutralization",
|
1075 |
+
"neutralise": "neutralize",
|
1076 |
+
"neutralised": "neutralized",
|
1077 |
+
"neutralises": "neutralizes",
|
1078 |
+
"neutralising": "neutralizing",
|
1079 |
+
"normalisation": "normalization",
|
1080 |
+
"normalise": "normalize",
|
1081 |
+
"normalised": "normalized",
|
1082 |
+
"normalises": "normalizes",
|
1083 |
+
"normalising": "normalizing",
|
1084 |
+
"odour": "odor",
|
1085 |
+
"odourless": "odorless",
|
1086 |
+
"odours": "odors",
|
1087 |
+
"oesophagus": "esophagus",
|
1088 |
+
"oesophaguses": "esophaguses",
|
1089 |
+
"oestrogen": "estrogen",
|
1090 |
+
"offence": "offense",
|
1091 |
+
"offences": "offenses",
|
1092 |
+
"omelette": "omelet",
|
1093 |
+
"omelettes": "omelets",
|
1094 |
+
"optimise": "optimize",
|
1095 |
+
"optimised": "optimized",
|
1096 |
+
"optimises": "optimizes",
|
1097 |
+
"optimising": "optimizing",
|
1098 |
+
"organisation": "organization",
|
1099 |
+
"organisational": "organizational",
|
1100 |
+
"organisations": "organizations",
|
1101 |
+
"organise": "organize",
|
1102 |
+
"organised": "organized",
|
1103 |
+
"organiser": "organizer",
|
1104 |
+
"organisers": "organizers",
|
1105 |
+
"organises": "organizes",
|
1106 |
+
"organising": "organizing",
|
1107 |
+
"orthopaedic": "orthopedic",
|
1108 |
+
"orthopaedics": "orthopedics",
|
1109 |
+
"ostracise": "ostracize",
|
1110 |
+
"ostracised": "ostracized",
|
1111 |
+
"ostracises": "ostracizes",
|
1112 |
+
"ostracising": "ostracizing",
|
1113 |
+
"outmanoeuvre": "outmaneuver",
|
1114 |
+
"outmanoeuvred": "outmaneuvered",
|
1115 |
+
"outmanoeuvres": "outmaneuvers",
|
1116 |
+
"outmanoeuvring": "outmaneuvering",
|
1117 |
+
"overemphasise": "overemphasize",
|
1118 |
+
"overemphasised": "overemphasized",
|
1119 |
+
"overemphasises": "overemphasizes",
|
1120 |
+
"overemphasising": "overemphasizing",
|
1121 |
+
"oxidisation": "oxidization",
|
1122 |
+
"oxidise": "oxidize",
|
1123 |
+
"oxidised": "oxidized",
|
1124 |
+
"oxidises": "oxidizes",
|
1125 |
+
"oxidising": "oxidizing",
|
1126 |
+
"paederast": "pederast",
|
1127 |
+
"paederasts": "pederasts",
|
1128 |
+
"paediatric": "pediatric",
|
1129 |
+
"paediatrician": "pediatrician",
|
1130 |
+
"paediatricians": "pediatricians",
|
1131 |
+
"paediatrics": "pediatrics",
|
1132 |
+
"paedophile": "pedophile",
|
1133 |
+
"paedophiles": "pedophiles",
|
1134 |
+
"paedophilia": "pedophilia",
|
1135 |
+
"palaeolithic": "paleolithic",
|
1136 |
+
"palaeontologist": "paleontologist",
|
1137 |
+
"palaeontologists": "paleontologists",
|
1138 |
+
"palaeontology": "paleontology",
|
1139 |
+
"panelled": "paneled",
|
1140 |
+
"panelling": "paneling",
|
1141 |
+
"panellist": "panelist",
|
1142 |
+
"panellists": "panelists",
|
1143 |
+
"paralyse": "paralyze",
|
1144 |
+
"paralysed": "paralyzed",
|
1145 |
+
"paralyses": "paralyzes",
|
1146 |
+
"paralysing": "paralyzing",
|
1147 |
+
"parcelled": "parceled",
|
1148 |
+
"parcelling": "parceling",
|
1149 |
+
"parlour": "parlor",
|
1150 |
+
"parlours": "parlors",
|
1151 |
+
"particularise": "particularize",
|
1152 |
+
"particularised": "particularized",
|
1153 |
+
"particularises": "particularizes",
|
1154 |
+
"particularising": "particularizing",
|
1155 |
+
"passivisation": "passivization",
|
1156 |
+
"passivise": "passivize",
|
1157 |
+
"passivised": "passivized",
|
1158 |
+
"passivises": "passivizes",
|
1159 |
+
"passivising": "passivizing",
|
1160 |
+
"pasteurisation": "pasteurization",
|
1161 |
+
"pasteurise": "pasteurize",
|
1162 |
+
"pasteurised": "pasteurized",
|
1163 |
+
"pasteurises": "pasteurizes",
|
1164 |
+
"pasteurising": "pasteurizing",
|
1165 |
+
"patronise": "patronize",
|
1166 |
+
"patronised": "patronized",
|
1167 |
+
"patronises": "patronizes",
|
1168 |
+
"patronising": "patronizing",
|
1169 |
+
"patronisingly": "patronizingly",
|
1170 |
+
"pedalled": "pedaled",
|
1171 |
+
"pedalling": "pedaling",
|
1172 |
+
"pedestrianisation": "pedestrianization",
|
1173 |
+
"pedestrianise": "pedestrianize",
|
1174 |
+
"pedestrianised": "pedestrianized",
|
1175 |
+
"pedestrianises": "pedestrianizes",
|
1176 |
+
"pedestrianising": "pedestrianizing",
|
1177 |
+
"penalise": "penalize",
|
1178 |
+
"penalised": "penalized",
|
1179 |
+
"penalises": "penalizes",
|
1180 |
+
"penalising": "penalizing",
|
1181 |
+
"pencilled": "penciled",
|
1182 |
+
"pencilling": "penciling",
|
1183 |
+
"personalise": "personalize",
|
1184 |
+
"personalised": "personalized",
|
1185 |
+
"personalises": "personalizes",
|
1186 |
+
"personalising": "personalizing",
|
1187 |
+
"pharmacopoeia": "pharmacopeia",
|
1188 |
+
"pharmacopoeias": "pharmacopeias",
|
1189 |
+
"philosophise": "philosophize",
|
1190 |
+
"philosophised": "philosophized",
|
1191 |
+
"philosophises": "philosophizes",
|
1192 |
+
"philosophising": "philosophizing",
|
1193 |
+
"philtre": "filter",
|
1194 |
+
"philtres": "filters",
|
1195 |
+
"phoney": "phony",
|
1196 |
+
"plagiarise": "plagiarize",
|
1197 |
+
"plagiarised": "plagiarized",
|
1198 |
+
"plagiarises": "plagiarizes",
|
1199 |
+
"plagiarising": "plagiarizing",
|
1200 |
+
"plough": "plow",
|
1201 |
+
"ploughed": "plowed",
|
1202 |
+
"ploughing": "plowing",
|
1203 |
+
"ploughman": "plowman",
|
1204 |
+
"ploughmen": "plowmen",
|
1205 |
+
"ploughs": "plows",
|
1206 |
+
"ploughshare": "plowshare",
|
1207 |
+
"ploughshares": "plowshares",
|
1208 |
+
"polarisation": "polarization",
|
1209 |
+
"polarise": "polarize",
|
1210 |
+
"polarised": "polarized",
|
1211 |
+
"polarises": "polarizes",
|
1212 |
+
"polarising": "polarizing",
|
1213 |
+
"politicisation": "politicization",
|
1214 |
+
"politicise": "politicize",
|
1215 |
+
"politicised": "politicized",
|
1216 |
+
"politicises": "politicizes",
|
1217 |
+
"politicising": "politicizing",
|
1218 |
+
"popularisation": "popularization",
|
1219 |
+
"popularise": "popularize",
|
1220 |
+
"popularised": "popularized",
|
1221 |
+
"popularises": "popularizes",
|
1222 |
+
"popularising": "popularizing",
|
1223 |
+
"pouffe": "pouf",
|
1224 |
+
"pouffes": "poufs",
|
1225 |
+
"practise": "practice",
|
1226 |
+
"practised": "practiced",
|
1227 |
+
"practises": "practices",
|
1228 |
+
"practising": "practicing",
|
1229 |
+
"praesidium": "presidium",
|
1230 |
+
"praesidiums": "presidiums",
|
1231 |
+
"pressurisation": "pressurization",
|
1232 |
+
"pressurise": "pressurize",
|
1233 |
+
"pressurised": "pressurized",
|
1234 |
+
"pressurises": "pressurizes",
|
1235 |
+
"pressurising": "pressurizing",
|
1236 |
+
"pretence": "pretense",
|
1237 |
+
"pretences": "pretenses",
|
1238 |
+
"primaeval": "primeval",
|
1239 |
+
"prioritisation": "prioritization",
|
1240 |
+
"prioritise": "prioritize",
|
1241 |
+
"prioritised": "prioritized",
|
1242 |
+
"prioritises": "prioritizes",
|
1243 |
+
"prioritising": "prioritizing",
|
1244 |
+
"privatisation": "privatization",
|
1245 |
+
"privatisations": "privatizations",
|
1246 |
+
"privatise": "privatize",
|
1247 |
+
"privatised": "privatized",
|
1248 |
+
"privatises": "privatizes",
|
1249 |
+
"privatising": "privatizing",
|
1250 |
+
"professionalisation": "professionalization",
|
1251 |
+
"professionalise": "professionalize",
|
1252 |
+
"professionalised": "professionalized",
|
1253 |
+
"professionalises": "professionalizes",
|
1254 |
+
"professionalising": "professionalizing",
|
1255 |
+
"programme": "program",
|
1256 |
+
"programmes": "programs",
|
1257 |
+
"prologue": "prolog",
|
1258 |
+
"prologues": "prologs",
|
1259 |
+
"propagandise": "propagandize",
|
1260 |
+
"propagandised": "propagandized",
|
1261 |
+
"propagandises": "propagandizes",
|
1262 |
+
"propagandising": "propagandizing",
|
1263 |
+
"proselytise": "proselytize",
|
1264 |
+
"proselytised": "proselytized",
|
1265 |
+
"proselytiser": "proselytizer",
|
1266 |
+
"proselytisers": "proselytizers",
|
1267 |
+
"proselytises": "proselytizes",
|
1268 |
+
"proselytising": "proselytizing",
|
1269 |
+
"psychoanalyse": "psychoanalyze",
|
1270 |
+
"psychoanalysed": "psychoanalyzed",
|
1271 |
+
"psychoanalyses": "psychoanalyzes",
|
1272 |
+
"psychoanalysing": "psychoanalyzing",
|
1273 |
+
"publicise": "publicize",
|
1274 |
+
"publicised": "publicized",
|
1275 |
+
"publicises": "publicizes",
|
1276 |
+
"publicising": "publicizing",
|
1277 |
+
"pulverisation": "pulverization",
|
1278 |
+
"pulverise": "pulverize",
|
1279 |
+
"pulverised": "pulverized",
|
1280 |
+
"pulverises": "pulverizes",
|
1281 |
+
"pulverising": "pulverizing",
|
1282 |
+
"pummelled": "pummel",
|
1283 |
+
"pummelling": "pummeled",
|
1284 |
+
"pyjama": "pajama",
|
1285 |
+
"pyjamas": "pajamas",
|
1286 |
+
"pzazz": "pizzazz",
|
1287 |
+
"quarrelled": "quarreled",
|
1288 |
+
"quarrelling": "quarreling",
|
1289 |
+
"radicalise": "radicalize",
|
1290 |
+
"radicalised": "radicalized",
|
1291 |
+
"radicalises": "radicalizes",
|
1292 |
+
"radicalising": "radicalizing",
|
1293 |
+
"rancour": "rancor",
|
1294 |
+
"randomise": "randomize",
|
1295 |
+
"randomised": "randomized",
|
1296 |
+
"randomises": "randomizes",
|
1297 |
+
"randomising": "randomizing",
|
1298 |
+
"rationalisation": "rationalization",
|
1299 |
+
"rationalisations": "rationalizations",
|
1300 |
+
"rationalise": "rationalize",
|
1301 |
+
"rationalised": "rationalized",
|
1302 |
+
"rationalises": "rationalizes",
|
1303 |
+
"rationalising": "rationalizing",
|
1304 |
+
"ravelled": "raveled",
|
1305 |
+
"ravelling": "raveling",
|
1306 |
+
"realisable": "realizable",
|
1307 |
+
"realisation": "realization",
|
1308 |
+
"realisations": "realizations",
|
1309 |
+
"realise": "realize",
|
1310 |
+
"realised": "realized",
|
1311 |
+
"realises": "realizes",
|
1312 |
+
"realising": "realizing",
|
1313 |
+
"recognisable": "recognizable",
|
1314 |
+
"recognisably": "recognizably",
|
1315 |
+
"recognisance": "recognizance",
|
1316 |
+
"recognise": "recognize",
|
1317 |
+
"recognised": "recognized",
|
1318 |
+
"recognises": "recognizes",
|
1319 |
+
"recognising": "recognizing",
|
1320 |
+
"reconnoitre": "reconnoiter",
|
1321 |
+
"reconnoitred": "reconnoitered",
|
1322 |
+
"reconnoitres": "reconnoiters",
|
1323 |
+
"reconnoitring": "reconnoitering",
|
1324 |
+
"refuelled": "refueled",
|
1325 |
+
"refuelling": "refueling",
|
1326 |
+
"regularisation": "regularization",
|
1327 |
+
"regularise": "regularize",
|
1328 |
+
"regularised": "regularized",
|
1329 |
+
"regularises": "regularizes",
|
1330 |
+
"regularising": "regularizing",
|
1331 |
+
"remodelled": "remodeled",
|
1332 |
+
"remodelling": "remodeling",
|
1333 |
+
"remould": "remold",
|
1334 |
+
"remoulded": "remolded",
|
1335 |
+
"remoulding": "remolding",
|
1336 |
+
"remoulds": "remolds",
|
1337 |
+
"reorganisation": "reorganization",
|
1338 |
+
"reorganisations": "reorganizations",
|
1339 |
+
"reorganise": "reorganize",
|
1340 |
+
"reorganised": "reorganized",
|
1341 |
+
"reorganises": "reorganizes",
|
1342 |
+
"reorganising": "reorganizing",
|
1343 |
+
"revelled": "reveled",
|
1344 |
+
"reveller": "reveler",
|
1345 |
+
"revellers": "revelers",
|
1346 |
+
"revelling": "reveling",
|
1347 |
+
"revitalise": "revitalize",
|
1348 |
+
"revitalised": "revitalized",
|
1349 |
+
"revitalises": "revitalizes",
|
1350 |
+
"revitalising": "revitalizing",
|
1351 |
+
"revolutionise": "revolutionize",
|
1352 |
+
"revolutionised": "revolutionized",
|
1353 |
+
"revolutionises": "revolutionizes",
|
1354 |
+
"revolutionising": "revolutionizing",
|
1355 |
+
"rhapsodise": "rhapsodize",
|
1356 |
+
"rhapsodised": "rhapsodized",
|
1357 |
+
"rhapsodises": "rhapsodizes",
|
1358 |
+
"rhapsodising": "rhapsodizing",
|
1359 |
+
"rigour": "rigor",
|
1360 |
+
"rigours": "rigors",
|
1361 |
+
"ritualised": "ritualized",
|
1362 |
+
"rivalled": "rivaled",
|
1363 |
+
"rivalling": "rivaling",
|
1364 |
+
"romanticise": "romanticize",
|
1365 |
+
"romanticised": "romanticized",
|
1366 |
+
"romanticises": "romanticizes",
|
1367 |
+
"romanticising": "romanticizing",
|
1368 |
+
"rumour": "rumor",
|
1369 |
+
"rumoured": "rumored",
|
1370 |
+
"rumours": "rumors",
|
1371 |
+
"sabre": "saber",
|
1372 |
+
"sabres": "sabers",
|
1373 |
+
"saltpetre": "saltpeter",
|
1374 |
+
"sanitise": "sanitize",
|
1375 |
+
"sanitised": "sanitized",
|
1376 |
+
"sanitises": "sanitizes",
|
1377 |
+
"sanitising": "sanitizing",
|
1378 |
+
"satirise": "satirize",
|
1379 |
+
"satirised": "satirized",
|
1380 |
+
"satirises": "satirizes",
|
1381 |
+
"satirising": "satirizing",
|
1382 |
+
"saviour": "savior",
|
1383 |
+
"saviours": "saviors",
|
1384 |
+
"savour": "savor",
|
1385 |
+
"savoured": "savored",
|
1386 |
+
"savouries": "savories",
|
1387 |
+
"savouring": "savoring",
|
1388 |
+
"savours": "savors",
|
1389 |
+
"savoury": "savory",
|
1390 |
+
"scandalise": "scandalize",
|
1391 |
+
"scandalised": "scandalized",
|
1392 |
+
"scandalises": "scandalizes",
|
1393 |
+
"scandalising": "scandalizing",
|
1394 |
+
"sceptic": "skeptic",
|
1395 |
+
"sceptical": "skeptical",
|
1396 |
+
"sceptically": "skeptically",
|
1397 |
+
"scepticism": "skepticism",
|
1398 |
+
"sceptics": "skeptics",
|
1399 |
+
"sceptre": "scepter",
|
1400 |
+
"sceptres": "scepters",
|
1401 |
+
"scrutinise": "scrutinize",
|
1402 |
+
"scrutinised": "scrutinized",
|
1403 |
+
"scrutinises": "scrutinizes",
|
1404 |
+
"scrutinising": "scrutinizing",
|
1405 |
+
"secularisation": "secularization",
|
1406 |
+
"secularise": "secularize",
|
1407 |
+
"secularised": "secularized",
|
1408 |
+
"secularises": "secularizes",
|
1409 |
+
"secularising": "secularizing",
|
1410 |
+
"sensationalise": "sensationalize",
|
1411 |
+
"sensationalised": "sensationalized",
|
1412 |
+
"sensationalises": "sensationalizes",
|
1413 |
+
"sensationalising": "sensationalizing",
|
1414 |
+
"sensitise": "sensitize",
|
1415 |
+
"sensitised": "sensitized",
|
1416 |
+
"sensitises": "sensitizes",
|
1417 |
+
"sensitising": "sensitizing",
|
1418 |
+
"sentimentalise": "sentimentalize",
|
1419 |
+
"sentimentalised": "sentimentalized",
|
1420 |
+
"sentimentalises": "sentimentalizes",
|
1421 |
+
"sentimentalising": "sentimentalizing",
|
1422 |
+
"sepulchre": "sepulcher",
|
1423 |
+
"sepulchres": "sepulchers",
|
1424 |
+
"serialisation": "serialization",
|
1425 |
+
"serialisations": "serializations",
|
1426 |
+
"serialise": "serialize",
|
1427 |
+
"serialised": "serialized",
|
1428 |
+
"serialises": "serializes",
|
1429 |
+
"serialising": "serializing",
|
1430 |
+
"sermonise": "sermonize",
|
1431 |
+
"sermonised": "sermonized",
|
1432 |
+
"sermonises": "sermonizes",
|
1433 |
+
"sermonising": "sermonizing",
|
1434 |
+
"sheikh": "sheik",
|
1435 |
+
"shovelled": "shoveled",
|
1436 |
+
"shovelling": "shoveling",
|
1437 |
+
"shrivelled": "shriveled",
|
1438 |
+
"shrivelling": "shriveling",
|
1439 |
+
"signalise": "signalize",
|
1440 |
+
"signalised": "signalized",
|
1441 |
+
"signalises": "signalizes",
|
1442 |
+
"signalising": "signalizing",
|
1443 |
+
"signalled": "signaled",
|
1444 |
+
"signalling": "signaling",
|
1445 |
+
"smoulder": "smolder",
|
1446 |
+
"smouldered": "smoldered",
|
1447 |
+
"smouldering": "smoldering",
|
1448 |
+
"smoulders": "smolders",
|
1449 |
+
"snivelled": "sniveled",
|
1450 |
+
"snivelling": "sniveling",
|
1451 |
+
"snorkelled": "snorkeled",
|
1452 |
+
"snorkelling": "snorkeling",
|
1453 |
+
"snowplough": "snowplow",
|
1454 |
+
"snowploughs": "snowplow",
|
1455 |
+
"socialisation": "socialization",
|
1456 |
+
"socialise": "socialize",
|
1457 |
+
"socialised": "socialized",
|
1458 |
+
"socialises": "socializes",
|
1459 |
+
"socialising": "socializing",
|
1460 |
+
"sodomise": "sodomize",
|
1461 |
+
"sodomised": "sodomized",
|
1462 |
+
"sodomises": "sodomizes",
|
1463 |
+
"sodomising": "sodomizing",
|
1464 |
+
"solemnise": "solemnize",
|
1465 |
+
"solemnised": "solemnized",
|
1466 |
+
"solemnises": "solemnizes",
|
1467 |
+
"solemnising": "solemnizing",
|
1468 |
+
"sombre": "somber",
|
1469 |
+
"specialisation": "specialization",
|
1470 |
+
"specialisations": "specializations",
|
1471 |
+
"specialise": "specialize",
|
1472 |
+
"specialised": "specialized",
|
1473 |
+
"specialises": "specializes",
|
1474 |
+
"specialising": "specializing",
|
1475 |
+
"spectre": "specter",
|
1476 |
+
"spectres": "specters",
|
1477 |
+
"spiralled": "spiraled",
|
1478 |
+
"spiralling": "spiraling",
|
1479 |
+
"splendour": "splendor",
|
1480 |
+
"splendours": "splendors",
|
1481 |
+
"squirrelled": "squirreled",
|
1482 |
+
"squirrelling": "squirreling",
|
1483 |
+
"stabilisation": "stabilization",
|
1484 |
+
"stabilise": "stabilize",
|
1485 |
+
"stabilised": "stabilized",
|
1486 |
+
"stabiliser": "stabilizer",
|
1487 |
+
"stabilisers": "stabilizers",
|
1488 |
+
"stabilises": "stabilizes",
|
1489 |
+
"stabilising": "stabilizing",
|
1490 |
+
"standardisation": "standardization",
|
1491 |
+
"standardise": "standardize",
|
1492 |
+
"standardised": "standardized",
|
1493 |
+
"standardises": "standardizes",
|
1494 |
+
"standardising": "standardizing",
|
1495 |
+
"stencilled": "stenciled",
|
1496 |
+
"stencilling": "stenciling",
|
1497 |
+
"sterilisation": "sterilization",
|
1498 |
+
"sterilisations": "sterilizations",
|
1499 |
+
"sterilise": "sterilize",
|
1500 |
+
"sterilised": "sterilized",
|
1501 |
+
"steriliser": "sterilizer",
|
1502 |
+
"sterilisers": "sterilizers",
|
1503 |
+
"sterilises": "sterilizes",
|
1504 |
+
"sterilising": "sterilizing",
|
1505 |
+
"stigmatisation": "stigmatization",
|
1506 |
+
"stigmatise": "stigmatize",
|
1507 |
+
"stigmatised": "stigmatized",
|
1508 |
+
"stigmatises": "stigmatizes",
|
1509 |
+
"stigmatising": "stigmatizing",
|
1510 |
+
"storey": "story",
|
1511 |
+
"storeys": "stories",
|
1512 |
+
"subsidisation": "subsidization",
|
1513 |
+
"subsidise": "subsidize",
|
1514 |
+
"subsidised": "subsidized",
|
1515 |
+
"subsidiser": "subsidizer",
|
1516 |
+
"subsidisers": "subsidizers",
|
1517 |
+
"subsidises": "subsidizes",
|
1518 |
+
"subsidising": "subsidizing",
|
1519 |
+
"succour": "succor",
|
1520 |
+
"succoured": "succored",
|
1521 |
+
"succouring": "succoring",
|
1522 |
+
"succours": "succors",
|
1523 |
+
"sulphate": "sulfate",
|
1524 |
+
"sulphates": "sulfates",
|
1525 |
+
"sulphide": "sulfide",
|
1526 |
+
"sulphides": "sulfides",
|
1527 |
+
"sulphur": "sulfur",
|
1528 |
+
"sulphurous": "sulfurous",
|
1529 |
+
"summarise": "summarize",
|
1530 |
+
"summarised": "summarized",
|
1531 |
+
"summarises": "summarizes",
|
1532 |
+
"summarising": "summarizing",
|
1533 |
+
"swivelled": "swiveled",
|
1534 |
+
"swivelling": "swiveling",
|
1535 |
+
"symbolise": "symbolize",
|
1536 |
+
"symbolised": "symbolized",
|
1537 |
+
"symbolises": "symbolizes",
|
1538 |
+
"symbolising": "symbolizing",
|
1539 |
+
"sympathise": "sympathize",
|
1540 |
+
"sympathised": "sympathized",
|
1541 |
+
"sympathiser": "sympathizer",
|
1542 |
+
"sympathisers": "sympathizers",
|
1543 |
+
"sympathises": "sympathizes",
|
1544 |
+
"sympathising": "sympathizing",
|
1545 |
+
"synchronisation": "synchronization",
|
1546 |
+
"synchronise": "synchronize",
|
1547 |
+
"synchronised": "synchronized",
|
1548 |
+
"synchronises": "synchronizes",
|
1549 |
+
"synchronising": "synchronizing",
|
1550 |
+
"synthesise": "synthesize",
|
1551 |
+
"synthesised": "synthesized",
|
1552 |
+
"synthesiser": "synthesizer",
|
1553 |
+
"synthesisers": "synthesizers",
|
1554 |
+
"synthesises": "synthesizes",
|
1555 |
+
"synthesising": "synthesizing",
|
1556 |
+
"syphon": "siphon",
|
1557 |
+
"syphoned": "siphoned",
|
1558 |
+
"syphoning": "siphoning",
|
1559 |
+
"syphons": "siphons",
|
1560 |
+
"systematisation": "systematization",
|
1561 |
+
"systematise": "systematize",
|
1562 |
+
"systematised": "systematized",
|
1563 |
+
"systematises": "systematizes",
|
1564 |
+
"systematising": "systematizing",
|
1565 |
+
"tantalise": "tantalize",
|
1566 |
+
"tantalised": "tantalized",
|
1567 |
+
"tantalises": "tantalizes",
|
1568 |
+
"tantalising": "tantalizing",
|
1569 |
+
"tantalisingly": "tantalizingly",
|
1570 |
+
"tasselled": "tasseled",
|
1571 |
+
"technicolour": "technicolor",
|
1572 |
+
"temporise": "temporize",
|
1573 |
+
"temporised": "temporized",
|
1574 |
+
"temporises": "temporizes",
|
1575 |
+
"temporising": "temporizing",
|
1576 |
+
"tenderise": "tenderize",
|
1577 |
+
"tenderised": "tenderized",
|
1578 |
+
"tenderises": "tenderizes",
|
1579 |
+
"tenderising": "tenderizing",
|
1580 |
+
"terrorise": "terrorize",
|
1581 |
+
"terrorised": "terrorized",
|
1582 |
+
"terrorises": "terrorizes",
|
1583 |
+
"terrorising": "terrorizing",
|
1584 |
+
"theatre": "theater",
|
1585 |
+
"theatregoer": "theatergoer",
|
1586 |
+
"theatregoers": "theatergoers",
|
1587 |
+
"theatres": "theaters",
|
1588 |
+
"theorise": "theorize",
|
1589 |
+
"theorised": "theorized",
|
1590 |
+
"theorises": "theorizes",
|
1591 |
+
"theorising": "theorizing",
|
1592 |
+
"tonne": "ton",
|
1593 |
+
"tonnes": "tons",
|
1594 |
+
"towelled": "toweled",
|
1595 |
+
"towelling": "toweling",
|
1596 |
+
"toxaemia": "toxemia",
|
1597 |
+
"tranquillise": "tranquilize",
|
1598 |
+
"tranquillised": "tranquilized",
|
1599 |
+
"tranquilliser": "tranquilizer",
|
1600 |
+
"tranquillisers": "tranquilizers",
|
1601 |
+
"tranquillises": "tranquilizes",
|
1602 |
+
"tranquillising": "tranquilizing",
|
1603 |
+
"tranquillity": "tranquility",
|
1604 |
+
"tranquillize": "tranquilize",
|
1605 |
+
"tranquillized": "tranquilized",
|
1606 |
+
"tranquillizer": "tranquilizer",
|
1607 |
+
"tranquillizers": "tranquilizers",
|
1608 |
+
"tranquillizes": "tranquilizes",
|
1609 |
+
"tranquillizing": "tranquilizing",
|
1610 |
+
"tranquilly": "tranquility",
|
1611 |
+
"transistorised": "transistorized",
|
1612 |
+
"traumatise": "traumatize",
|
1613 |
+
"traumatised": "traumatized",
|
1614 |
+
"traumatises": "traumatizes",
|
1615 |
+
"traumatising": "traumatizing",
|
1616 |
+
"travelled": "traveled",
|
1617 |
+
"traveller": "traveler",
|
1618 |
+
"travellers": "travelers",
|
1619 |
+
"travelling": "traveling",
|
1620 |
+
"travelog": "travelogue",
|
1621 |
+
"travelogs": "travelogues",
|
1622 |
+
"trialled": "trialed",
|
1623 |
+
"trialling": "trialing",
|
1624 |
+
"tricolour": "tricolor",
|
1625 |
+
"tricolours": "tricolors",
|
1626 |
+
"trivialise": "trivialize",
|
1627 |
+
"trivialised": "trivialized",
|
1628 |
+
"trivialises": "trivializes",
|
1629 |
+
"trivialising": "trivializing",
|
1630 |
+
"tumour": "tumor",
|
1631 |
+
"tumours": "tumors",
|
1632 |
+
"tunnelled": "tunneled",
|
1633 |
+
"tunnelling": "tunneling",
|
1634 |
+
"tyrannise": "tyrannize",
|
1635 |
+
"tyrannised": "tyrannized",
|
1636 |
+
"tyrannises": "tyrannizes",
|
1637 |
+
"tyrannising": "tyrannizing",
|
1638 |
+
"tyre": "tire",
|
1639 |
+
"tyres": "tires",
|
1640 |
+
"unauthorised": "unauthorized",
|
1641 |
+
"uncivilised": "uncivilized",
|
1642 |
+
"underutilised": "underutilized",
|
1643 |
+
"unequalled": "unequaled",
|
1644 |
+
"unfavourable": "unfavorable",
|
1645 |
+
"unfavourably": "unfavorably",
|
1646 |
+
"unionisation": "unionization",
|
1647 |
+
"unionise": "unionize",
|
1648 |
+
"unionised": "unionized",
|
1649 |
+
"unionises": "unionizes",
|
1650 |
+
"unionising": "unionizing",
|
1651 |
+
"unorganised": "unorganized",
|
1652 |
+
"unravelled": "unraveled",
|
1653 |
+
"unravelling": "unraveling",
|
1654 |
+
"unrecognisable": "unrecognizable",
|
1655 |
+
"unrecognised": "unrecognized",
|
1656 |
+
"unrivalled": "unrivaled",
|
1657 |
+
"unsavoury": "unsavory",
|
1658 |
+
"untrammelled": "untrammeled",
|
1659 |
+
"urbanisation": "urbanization",
|
1660 |
+
"urbanise": "urbanize",
|
1661 |
+
"urbanised": "urbanized",
|
1662 |
+
"urbanises": "urbanizes",
|
1663 |
+
"urbanising": "urbanizing",
|
1664 |
+
"utilisable": "utilizable",
|
1665 |
+
"utilisation": "utilization",
|
1666 |
+
"utilise": "utilize",
|
1667 |
+
"utilised": "utilized",
|
1668 |
+
"utilises": "utilizes",
|
1669 |
+
"utilising": "utilizing",
|
1670 |
+
"valour": "valor",
|
1671 |
+
"vandalise": "vandalize",
|
1672 |
+
"vandalised": "vandalized",
|
1673 |
+
"vandalises": "vandalizes",
|
1674 |
+
"vandalising": "vandalizing",
|
1675 |
+
"vaporisation": "vaporization",
|
1676 |
+
"vaporise": "vaporize",
|
1677 |
+
"vaporised": "vaporized",
|
1678 |
+
"vaporises": "vaporizes",
|
1679 |
+
"vaporising": "vaporizing",
|
1680 |
+
"vapour": "vapor",
|
1681 |
+
"vapours": "vapors",
|
1682 |
+
"verbalise": "verbalize",
|
1683 |
+
"verbalised": "verbalized",
|
1684 |
+
"verbalises": "verbalizes",
|
1685 |
+
"verbalising": "verbalizing",
|
1686 |
+
"victimisation": "victimization",
|
1687 |
+
"victimise": "victimize",
|
1688 |
+
"victimised": "victimized",
|
1689 |
+
"victimises": "victimizes",
|
1690 |
+
"victimising": "victimizing",
|
1691 |
+
"videodisc": "videodisk",
|
1692 |
+
"videodiscs": "videodisks",
|
1693 |
+
"vigour": "vigor",
|
1694 |
+
"visualisation": "visualization",
|
1695 |
+
"visualisations": "visualizations",
|
1696 |
+
"visualise": "visualize",
|
1697 |
+
"visualised": "visualized",
|
1698 |
+
"visualises": "visualizes",
|
1699 |
+
"visualising": "visualizing",
|
1700 |
+
"vocalisation": "vocalization",
|
1701 |
+
"vocalisations": "vocalizations",
|
1702 |
+
"vocalise": "vocalize",
|
1703 |
+
"vocalised": "vocalized",
|
1704 |
+
"vocalises": "vocalizes",
|
1705 |
+
"vocalising": "vocalizing",
|
1706 |
+
"vulcanised": "vulcanized",
|
1707 |
+
"vulgarisation": "vulgarization",
|
1708 |
+
"vulgarise": "vulgarize",
|
1709 |
+
"vulgarised": "vulgarized",
|
1710 |
+
"vulgarises": "vulgarizes",
|
1711 |
+
"vulgarising": "vulgarizing",
|
1712 |
+
"waggon": "wagon",
|
1713 |
+
"waggons": "wagons",
|
1714 |
+
"watercolour": "watercolor",
|
1715 |
+
"watercolours": "watercolors",
|
1716 |
+
"weaselled": "weaseled",
|
1717 |
+
"weaselling": "weaseling",
|
1718 |
+
"westernisation": "westernization",
|
1719 |
+
"westernise": "westernize",
|
1720 |
+
"westernised": "westernized",
|
1721 |
+
"westernises": "westernizes",
|
1722 |
+
"westernising": "westernizing",
|
1723 |
+
"womanise": "womanize",
|
1724 |
+
"womanised": "womanized",
|
1725 |
+
"womaniser": "womanizer",
|
1726 |
+
"womanisers": "womanizers",
|
1727 |
+
"womanises": "womanizes",
|
1728 |
+
"womanising": "womanizing",
|
1729 |
+
"woollen": "woolen",
|
1730 |
+
"woollens": "woolens",
|
1731 |
+
"woollies": "woolies",
|
1732 |
+
"woolly": "wooly",
|
1733 |
+
"worshipped": "worshiped",
|
1734 |
+
"worshipper": "worshiper",
|
1735 |
+
"worshipping": "worshiping",
|
1736 |
+
"yodelled": "yodeled",
|
1737 |
+
"yodelling": "yodeling",
|
1738 |
+
"yoghourt": "yogurt",
|
1739 |
+
"yoghourts": "yogurts",
|
1740 |
+
"yoghurt": "yogurt",
|
1741 |
+
"yoghurts": "yogurts"
|
1742 |
+
}
|
preprocessor_config.json
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"chunk_length": 30,
|
3 |
+
"feature_extractor_type": "WhisperFeatureExtractor",
|
4 |
+
"feature_size": 128,
|
5 |
+
"hop_length": 160,
|
6 |
+
"n_fft": 400,
|
7 |
+
"n_samples": 480000,
|
8 |
+
"nb_max_frames": 3000,
|
9 |
+
"padding_side": "right",
|
10 |
+
"padding_value": 0.0,
|
11 |
+
"processor_class": "WhisperProcessor",
|
12 |
+
"return_attention_mask": false,
|
13 |
+
"sampling_rate": 16000
|
14 |
+
}
|
run.sh
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
python run_speech_recognition_seq2seq.py \
|
2 |
+
--model_name_or_path="distil-whisper/distil-large-v3" \
|
3 |
+
--dataset_name="mozilla-foundation/common_voice_16_1" \
|
4 |
+
--dataset_config_name="hi" \
|
5 |
+
--language="hindi" \
|
6 |
+
--train_split_name="train+validation" \
|
7 |
+
--eval_split_name="test" \
|
8 |
+
--max_steps="5000" \
|
9 |
+
--output_dir="./" \
|
10 |
+
--per_device_train_batch_size="32" \
|
11 |
+
--per_device_eval_batch_size="32" \
|
12 |
+
--logging_steps="25" \
|
13 |
+
--learning_rate="1e-4" \
|
14 |
+
--warmup_steps="500" \
|
15 |
+
--evaluation_strategy="steps" \
|
16 |
+
--eval_steps="1000" \
|
17 |
+
--save_strategy="steps" \
|
18 |
+
--save_steps="1000" \
|
19 |
+
--generation_max_length="225" \
|
20 |
+
--preprocessing_num_workers="1" \
|
21 |
+
--dataloader_num_workers="4" \
|
22 |
+
--length_column_name="input_length" \
|
23 |
+
--max_duration_in_seconds="30" \
|
24 |
+
--text_column_name="sentence" \
|
25 |
+
--freeze_feature_encoder="False" \
|
26 |
+
--gradient_checkpointing \
|
27 |
+
--fp16 \
|
28 |
+
--overwrite_output_dir \
|
29 |
+
--do_train \
|
30 |
+
--do_eval \
|
31 |
+
--predict_with_generate \
|
32 |
+
--use_auth_token \
|
33 |
+
--push_to_hub
|
run_speech_recognition_seq2seq.py
ADDED
@@ -0,0 +1,627 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
# coding=utf-8
|
3 |
+
# Copyright 2021 The HuggingFace Team. All rights reserved.
|
4 |
+
#
|
5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6 |
+
# you may not use this file except in compliance with the License.
|
7 |
+
# You may obtain a copy of the License at
|
8 |
+
#
|
9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10 |
+
#
|
11 |
+
# Unless required by applicable law or agreed to in writing, software
|
12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14 |
+
# See the License for the specific language governing permissions and
|
15 |
+
# limitations under the License.
|
16 |
+
"""
|
17 |
+
Fine-tuning the library models for sequence to sequence speech recognition.
|
18 |
+
"""
|
19 |
+
# You can also adapt this script on your own sequence to sequence speech
|
20 |
+
# recognition task. Pointers for this are left as comments.
|
21 |
+
|
22 |
+
import logging
|
23 |
+
import os
|
24 |
+
import sys
|
25 |
+
import warnings
|
26 |
+
from dataclasses import dataclass, field
|
27 |
+
from typing import Any, Dict, List, Optional, Union
|
28 |
+
|
29 |
+
import datasets
|
30 |
+
import evaluate
|
31 |
+
import torch
|
32 |
+
from datasets import DatasetDict, load_dataset
|
33 |
+
|
34 |
+
import transformers
|
35 |
+
from transformers import (
|
36 |
+
AutoConfig,
|
37 |
+
AutoFeatureExtractor,
|
38 |
+
AutoModelForSpeechSeq2Seq,
|
39 |
+
AutoProcessor,
|
40 |
+
AutoTokenizer,
|
41 |
+
HfArgumentParser,
|
42 |
+
Seq2SeqTrainer,
|
43 |
+
Seq2SeqTrainingArguments,
|
44 |
+
set_seed,
|
45 |
+
)
|
46 |
+
from transformers.trainer_utils import get_last_checkpoint, is_main_process
|
47 |
+
from transformers.utils import check_min_version, send_example_telemetry
|
48 |
+
from transformers.utils.versions import require_version
|
49 |
+
|
50 |
+
|
51 |
+
# Will error if the minimal version of Transformers is not installed. Remove at your own risks.
|
52 |
+
check_min_version("4.40.0.dev0")
|
53 |
+
|
54 |
+
require_version("datasets>=1.18.0", "To fix: pip install -r examples/pytorch/speech-recognition/requirements.txt")
|
55 |
+
|
56 |
+
logger = logging.getLogger(__name__)
|
57 |
+
|
58 |
+
|
59 |
+
@dataclass
|
60 |
+
class ModelArguments:
|
61 |
+
"""
|
62 |
+
Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.
|
63 |
+
"""
|
64 |
+
|
65 |
+
model_name_or_path: str = field(
|
66 |
+
metadata={"help": "Path to pretrained model or model identifier from huggingface.co/models"}
|
67 |
+
)
|
68 |
+
config_name: Optional[str] = field(
|
69 |
+
default=None, metadata={"help": "Pretrained config name or path if not the same as model_name"}
|
70 |
+
)
|
71 |
+
tokenizer_name: Optional[str] = field(
|
72 |
+
default=None, metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"}
|
73 |
+
)
|
74 |
+
feature_extractor_name: Optional[str] = field(
|
75 |
+
default=None, metadata={"help": "feature extractor name or path if not the same as model_name"}
|
76 |
+
)
|
77 |
+
cache_dir: Optional[str] = field(
|
78 |
+
default=None,
|
79 |
+
metadata={"help": "Where to store the pretrained models downloaded from huggingface.co"},
|
80 |
+
)
|
81 |
+
use_fast_tokenizer: bool = field(
|
82 |
+
default=True,
|
83 |
+
metadata={"help": "Whether to use one of the fast tokenizer (backed by the tokenizers library) or not."},
|
84 |
+
)
|
85 |
+
model_revision: str = field(
|
86 |
+
default="main",
|
87 |
+
metadata={"help": "The specific model version to use (can be a branch name, tag name or commit id)."},
|
88 |
+
)
|
89 |
+
token: str = field(
|
90 |
+
default=None,
|
91 |
+
metadata={
|
92 |
+
"help": (
|
93 |
+
"The token to use as HTTP bearer authorization for remote files. If not specified, will use the token "
|
94 |
+
"generated when running `huggingface-cli login` (stored in `~/.huggingface`)."
|
95 |
+
)
|
96 |
+
},
|
97 |
+
)
|
98 |
+
use_auth_token: bool = field(
|
99 |
+
default=None,
|
100 |
+
metadata={
|
101 |
+
"help": "The `use_auth_token` argument is deprecated and will be removed in v4.34. Please use `token` instead."
|
102 |
+
},
|
103 |
+
)
|
104 |
+
trust_remote_code: bool = field(
|
105 |
+
default=False,
|
106 |
+
metadata={
|
107 |
+
"help": (
|
108 |
+
"Whether or not to allow for custom models defined on the Hub in their own modeling files. This option "
|
109 |
+
"should only be set to `True` for repositories you trust and in which you have read the code, as it will "
|
110 |
+
"execute code present on the Hub on your local machine."
|
111 |
+
)
|
112 |
+
},
|
113 |
+
)
|
114 |
+
freeze_feature_encoder: bool = field(
|
115 |
+
default=True, metadata={"help": "Whether to freeze the feature encoder layers of the model."}
|
116 |
+
)
|
117 |
+
freeze_encoder: bool = field(
|
118 |
+
default=False, metadata={"help": "Whether to freeze the entire encoder of the seq2seq model."}
|
119 |
+
)
|
120 |
+
forced_decoder_ids: List[List[int]] = field(
|
121 |
+
default=None,
|
122 |
+
metadata={
|
123 |
+
"help": (
|
124 |
+
"A list of pairs of integers which indicates a mapping from generation indices to token indices "
|
125 |
+
"that will be forced before sampling. For example, [[0, 123]] means the first generated token "
|
126 |
+
"will always be a token of index 123."
|
127 |
+
)
|
128 |
+
},
|
129 |
+
)
|
130 |
+
suppress_tokens: List[int] = field(
|
131 |
+
default=None, metadata={"help": "A list of tokens that will be suppressed at generation."}
|
132 |
+
)
|
133 |
+
apply_spec_augment: bool = field(
|
134 |
+
default=False,
|
135 |
+
metadata={
|
136 |
+
"help": "Whether to apply *SpecAugment* data augmentation to the input features. This is currently only relevant for Wav2Vec2, HuBERT, WavLM and Whisper models."
|
137 |
+
},
|
138 |
+
)
|
139 |
+
|
140 |
+
|
141 |
+
@dataclass
|
142 |
+
class DataTrainingArguments:
|
143 |
+
"""
|
144 |
+
Arguments pertaining to what data we are going to input our model for training and eval.
|
145 |
+
"""
|
146 |
+
|
147 |
+
dataset_name: str = field(
|
148 |
+
default=None, metadata={"help": "The name of the dataset to use (via the datasets library)."}
|
149 |
+
)
|
150 |
+
dataset_config_name: Optional[str] = field(
|
151 |
+
default=None, metadata={"help": "The configuration name of the dataset to use (via the datasets library)."}
|
152 |
+
)
|
153 |
+
overwrite_cache: bool = field(
|
154 |
+
default=False, metadata={"help": "Overwrite the cached training and evaluation sets"}
|
155 |
+
)
|
156 |
+
preprocessing_num_workers: Optional[int] = field(
|
157 |
+
default=None,
|
158 |
+
metadata={"help": "The number of processes to use for the preprocessing."},
|
159 |
+
)
|
160 |
+
max_train_samples: Optional[int] = field(
|
161 |
+
default=None,
|
162 |
+
metadata={
|
163 |
+
"help": (
|
164 |
+
"For debugging purposes or quicker training, truncate the number of training examples to this "
|
165 |
+
"value if set."
|
166 |
+
)
|
167 |
+
},
|
168 |
+
)
|
169 |
+
max_eval_samples: Optional[int] = field(
|
170 |
+
default=None,
|
171 |
+
metadata={
|
172 |
+
"help": (
|
173 |
+
"For debugging purposes or quicker training, truncate the number of evaluation examples to this "
|
174 |
+
"value if set."
|
175 |
+
)
|
176 |
+
},
|
177 |
+
)
|
178 |
+
audio_column_name: str = field(
|
179 |
+
default="audio",
|
180 |
+
metadata={"help": "The name of the dataset column containing the audio data. Defaults to 'audio'"},
|
181 |
+
)
|
182 |
+
text_column_name: str = field(
|
183 |
+
default="text",
|
184 |
+
metadata={"help": "The name of the dataset column containing the text data. Defaults to 'text'"},
|
185 |
+
)
|
186 |
+
max_duration_in_seconds: float = field(
|
187 |
+
default=20.0,
|
188 |
+
metadata={
|
189 |
+
"help": (
|
190 |
+
"Truncate audio files that are longer than `max_duration_in_seconds` seconds to"
|
191 |
+
" 'max_duration_in_seconds`"
|
192 |
+
)
|
193 |
+
},
|
194 |
+
)
|
195 |
+
min_duration_in_seconds: float = field(
|
196 |
+
default=0.0, metadata={"help": "Filter audio files that are shorter than `min_duration_in_seconds` seconds"}
|
197 |
+
)
|
198 |
+
preprocessing_only: bool = field(
|
199 |
+
default=False,
|
200 |
+
metadata={
|
201 |
+
"help": (
|
202 |
+
"Whether to only do data preprocessing and skip training. This is especially useful when data"
|
203 |
+
" preprocessing errors out in distributed training due to timeout. In this case, one should run the"
|
204 |
+
" preprocessing in a non-distributed setup with `preprocessing_only=True` so that the cached datasets"
|
205 |
+
" can consequently be loaded in distributed training"
|
206 |
+
)
|
207 |
+
},
|
208 |
+
)
|
209 |
+
train_split_name: str = field(
|
210 |
+
default="train",
|
211 |
+
metadata={
|
212 |
+
"help": "The name of the training data set split to use (via the datasets library). Defaults to 'train'"
|
213 |
+
},
|
214 |
+
)
|
215 |
+
eval_split_name: str = field(
|
216 |
+
default="test",
|
217 |
+
metadata={
|
218 |
+
"help": "The name of the training data set split to use (via the datasets library). Defaults to 'train'"
|
219 |
+
},
|
220 |
+
)
|
221 |
+
do_lower_case: bool = field(
|
222 |
+
default=True,
|
223 |
+
metadata={"help": "Whether the target text should be lower cased."},
|
224 |
+
)
|
225 |
+
language: str = field(
|
226 |
+
default=None,
|
227 |
+
metadata={
|
228 |
+
"help": (
|
229 |
+
"Language for multilingual fine-tuning. This argument should be set for multilingual fine-tuning "
|
230 |
+
"only. For English speech recognition, it should be set to `None`."
|
231 |
+
)
|
232 |
+
},
|
233 |
+
)
|
234 |
+
task: str = field(
|
235 |
+
default="transcribe",
|
236 |
+
metadata={"help": "Task, either `transcribe` for speech recognition or `translate` for speech translation."},
|
237 |
+
)
|
238 |
+
|
239 |
+
|
240 |
+
@dataclass
|
241 |
+
class DataCollatorSpeechSeq2SeqWithPadding:
|
242 |
+
"""
|
243 |
+
Data collator that will dynamically pad the inputs received.
|
244 |
+
Args:
|
245 |
+
processor ([`WhisperProcessor`])
|
246 |
+
The processor used for processing the data.
|
247 |
+
decoder_start_token_id (`int`)
|
248 |
+
The begin-of-sentence of the decoder.
|
249 |
+
forward_attention_mask (`bool`)
|
250 |
+
Whether to return attention_mask.
|
251 |
+
"""
|
252 |
+
|
253 |
+
processor: Any
|
254 |
+
decoder_start_token_id: int
|
255 |
+
forward_attention_mask: bool
|
256 |
+
|
257 |
+
def __call__(self, features: List[Dict[str, Union[List[int], torch.Tensor]]]) -> Dict[str, torch.Tensor]:
|
258 |
+
# split inputs and labels since they have to be of different lengths and need
|
259 |
+
# different padding methods
|
260 |
+
model_input_name = self.processor.model_input_names[0]
|
261 |
+
input_features = [{model_input_name: feature[model_input_name]} for feature in features]
|
262 |
+
label_features = [{"input_ids": feature["labels"]} for feature in features]
|
263 |
+
|
264 |
+
batch = self.processor.feature_extractor.pad(input_features, return_tensors="pt")
|
265 |
+
|
266 |
+
if self.forward_attention_mask:
|
267 |
+
batch["attention_mask"] = torch.LongTensor([feature["attention_mask"] for feature in features])
|
268 |
+
|
269 |
+
labels_batch = self.processor.tokenizer.pad(label_features, return_tensors="pt")
|
270 |
+
|
271 |
+
# replace padding with -100 to ignore loss correctly
|
272 |
+
labels = labels_batch["input_ids"].masked_fill(labels_batch.attention_mask.ne(1), -100)
|
273 |
+
|
274 |
+
# if bos token is appended in previous tokenization step,
|
275 |
+
# cut bos token here as it's append later anyways
|
276 |
+
if (labels[:, 0] == self.decoder_start_token_id).all().cpu().item():
|
277 |
+
labels = labels[:, 1:]
|
278 |
+
|
279 |
+
batch["labels"] = labels
|
280 |
+
|
281 |
+
return batch
|
282 |
+
|
283 |
+
|
284 |
+
def main():
|
285 |
+
# 1. Parse input arguments
|
286 |
+
# See all possible arguments in src/transformers/training_args.py
|
287 |
+
# or by passing the --help flag to this script.
|
288 |
+
# We now keep distinct sets of args, for a cleaner separation of concerns.
|
289 |
+
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, Seq2SeqTrainingArguments))
|
290 |
+
|
291 |
+
if len(sys.argv) == 2 and sys.argv[1].endswith(".json"):
|
292 |
+
# If we pass only one argument to the script and it's the path to a json file,
|
293 |
+
# let's parse it to get our arguments.
|
294 |
+
model_args, data_args, training_args = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
|
295 |
+
else:
|
296 |
+
model_args, data_args, training_args = parser.parse_args_into_dataclasses()
|
297 |
+
|
298 |
+
if model_args.use_auth_token is not None:
|
299 |
+
warnings.warn(
|
300 |
+
"The `use_auth_token` argument is deprecated and will be removed in v4.34. Please use `token` instead.",
|
301 |
+
FutureWarning,
|
302 |
+
)
|
303 |
+
if model_args.token is not None:
|
304 |
+
raise ValueError("`token` and `use_auth_token` are both specified. Please set only the argument `token`.")
|
305 |
+
model_args.token = model_args.use_auth_token
|
306 |
+
|
307 |
+
# Sending telemetry. Tracking the example usage helps us better allocate resources to maintain them. The
|
308 |
+
# information sent is the one passed as arguments along with your Python/PyTorch versions.
|
309 |
+
send_example_telemetry("run_speech_recognition_seq2seq", model_args, data_args)
|
310 |
+
|
311 |
+
# 2. Setup logging
|
312 |
+
logging.basicConfig(
|
313 |
+
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
|
314 |
+
datefmt="%m/%d/%Y %H:%M:%S",
|
315 |
+
handlers=[logging.StreamHandler(sys.stdout)],
|
316 |
+
)
|
317 |
+
log_level = training_args.get_process_log_level()
|
318 |
+
logger.setLevel(log_level)
|
319 |
+
datasets.utils.logging.set_verbosity(log_level)
|
320 |
+
transformers.utils.logging.set_verbosity(log_level)
|
321 |
+
transformers.utils.logging.enable_default_handler()
|
322 |
+
transformers.utils.logging.enable_explicit_format()
|
323 |
+
|
324 |
+
logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN)
|
325 |
+
|
326 |
+
# Log on each process the small summary:
|
327 |
+
logger.warning(
|
328 |
+
f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}, "
|
329 |
+
f"distributed training: {training_args.parallel_mode.value == 'distributed'}, 16-bits training: {training_args.fp16}"
|
330 |
+
)
|
331 |
+
logger.info(f"Training/evaluation parameters {training_args}")
|
332 |
+
|
333 |
+
# Set the verbosity to info of the Transformers logger (on main process only):
|
334 |
+
if is_main_process(training_args.local_rank):
|
335 |
+
transformers.utils.logging.set_verbosity_info()
|
336 |
+
logger.info("Training/evaluation parameters %s", training_args)
|
337 |
+
|
338 |
+
# 3. Detecting last checkpoint and eventually continue from last checkpoint
|
339 |
+
last_checkpoint = None
|
340 |
+
if os.path.isdir(training_args.output_dir) and training_args.do_train and not training_args.overwrite_output_dir:
|
341 |
+
last_checkpoint = get_last_checkpoint(training_args.output_dir)
|
342 |
+
if last_checkpoint is None and len(os.listdir(training_args.output_dir)) > 0:
|
343 |
+
raise ValueError(
|
344 |
+
f"Output directory ({training_args.output_dir}) already exists and is not empty. "
|
345 |
+
"Use --overwrite_output_dir to overcome."
|
346 |
+
)
|
347 |
+
elif last_checkpoint is not None and training_args.resume_from_checkpoint is None:
|
348 |
+
logger.info(
|
349 |
+
f"Checkpoint detected, resuming training at {last_checkpoint}. To avoid this behavior, change "
|
350 |
+
"the `--output_dir` or add `--overwrite_output_dir` to train from scratch."
|
351 |
+
)
|
352 |
+
|
353 |
+
# Set seed before initializing model.
|
354 |
+
set_seed(training_args.seed)
|
355 |
+
|
356 |
+
# 4. Load dataset
|
357 |
+
raw_datasets = DatasetDict()
|
358 |
+
|
359 |
+
if training_args.do_train:
|
360 |
+
raw_datasets["train"] = load_dataset(
|
361 |
+
data_args.dataset_name,
|
362 |
+
data_args.dataset_config_name,
|
363 |
+
split=data_args.train_split_name,
|
364 |
+
cache_dir=model_args.cache_dir,
|
365 |
+
token=model_args.token,
|
366 |
+
)
|
367 |
+
|
368 |
+
if training_args.do_eval:
|
369 |
+
raw_datasets["eval"] = load_dataset(
|
370 |
+
data_args.dataset_name,
|
371 |
+
data_args.dataset_config_name,
|
372 |
+
split=data_args.eval_split_name,
|
373 |
+
cache_dir=model_args.cache_dir,
|
374 |
+
token=model_args.token,
|
375 |
+
)
|
376 |
+
|
377 |
+
if data_args.audio_column_name not in next(iter(raw_datasets.values())).column_names:
|
378 |
+
raise ValueError(
|
379 |
+
f"--audio_column_name '{data_args.audio_column_name}' not found in dataset '{data_args.dataset_name}'. "
|
380 |
+
"Make sure to set `--audio_column_name` to the correct audio column - one of "
|
381 |
+
f"{', '.join(next(iter(raw_datasets.values())).column_names)}."
|
382 |
+
)
|
383 |
+
|
384 |
+
if data_args.text_column_name not in next(iter(raw_datasets.values())).column_names:
|
385 |
+
raise ValueError(
|
386 |
+
f"--text_column_name {data_args.text_column_name} not found in dataset '{data_args.dataset_name}'. "
|
387 |
+
"Make sure to set `--text_column_name` to the correct text column - one of "
|
388 |
+
f"{', '.join(next(iter(raw_datasets.values())).column_names)}."
|
389 |
+
)
|
390 |
+
|
391 |
+
# 5. Load pretrained model, tokenizer, and feature extractor
|
392 |
+
#
|
393 |
+
# Distributed training:
|
394 |
+
# The .from_pretrained methods guarantee that only one local process can concurrently
|
395 |
+
config = AutoConfig.from_pretrained(
|
396 |
+
model_args.config_name if model_args.config_name else model_args.model_name_or_path,
|
397 |
+
cache_dir=model_args.cache_dir,
|
398 |
+
revision=model_args.model_revision,
|
399 |
+
token=model_args.token,
|
400 |
+
trust_remote_code=model_args.trust_remote_code,
|
401 |
+
)
|
402 |
+
|
403 |
+
config.update({"forced_decoder_ids": model_args.forced_decoder_ids, "suppress_tokens": model_args.suppress_tokens})
|
404 |
+
|
405 |
+
# SpecAugment for whisper models
|
406 |
+
if getattr(config, "model_type", None) == "whisper":
|
407 |
+
config.update({"apply_spec_augment": model_args.apply_spec_augment})
|
408 |
+
|
409 |
+
feature_extractor = AutoFeatureExtractor.from_pretrained(
|
410 |
+
model_args.feature_extractor_name if model_args.feature_extractor_name else model_args.model_name_or_path,
|
411 |
+
cache_dir=model_args.cache_dir,
|
412 |
+
revision=model_args.model_revision,
|
413 |
+
token=model_args.token,
|
414 |
+
trust_remote_code=model_args.trust_remote_code,
|
415 |
+
)
|
416 |
+
tokenizer = AutoTokenizer.from_pretrained(
|
417 |
+
model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path,
|
418 |
+
cache_dir=model_args.cache_dir,
|
419 |
+
use_fast=model_args.use_fast_tokenizer,
|
420 |
+
revision=model_args.model_revision,
|
421 |
+
token=model_args.token,
|
422 |
+
trust_remote_code=model_args.trust_remote_code,
|
423 |
+
)
|
424 |
+
model = AutoModelForSpeechSeq2Seq.from_pretrained(
|
425 |
+
model_args.model_name_or_path,
|
426 |
+
config=config,
|
427 |
+
cache_dir=model_args.cache_dir,
|
428 |
+
revision=model_args.model_revision,
|
429 |
+
token=model_args.token,
|
430 |
+
trust_remote_code=model_args.trust_remote_code,
|
431 |
+
)
|
432 |
+
|
433 |
+
if model.config.decoder_start_token_id is None:
|
434 |
+
raise ValueError("Make sure that `config.decoder_start_token_id` is correctly defined")
|
435 |
+
|
436 |
+
if model_args.freeze_feature_encoder:
|
437 |
+
model.freeze_feature_encoder()
|
438 |
+
|
439 |
+
if model_args.freeze_encoder:
|
440 |
+
model.freeze_encoder()
|
441 |
+
model.model.encoder.gradient_checkpointing = False
|
442 |
+
|
443 |
+
if data_args.language is not None:
|
444 |
+
# We only need to set the task id when the language is specified (i.e. in a multilingual setting)
|
445 |
+
tokenizer.set_prefix_tokens(language=data_args.language, task=data_args.task)
|
446 |
+
model.generation_config.language = data_args.language
|
447 |
+
model.generation_config.task = data_args.task
|
448 |
+
|
449 |
+
# 6. Resample speech dataset if necessary
|
450 |
+
dataset_sampling_rate = next(iter(raw_datasets.values())).features[data_args.audio_column_name].sampling_rate
|
451 |
+
if dataset_sampling_rate != feature_extractor.sampling_rate:
|
452 |
+
raw_datasets = raw_datasets.cast_column(
|
453 |
+
data_args.audio_column_name, datasets.features.Audio(sampling_rate=feature_extractor.sampling_rate)
|
454 |
+
)
|
455 |
+
|
456 |
+
# 7. Preprocessing the datasets.
|
457 |
+
# We need to read the audio files as arrays and tokenize the targets.
|
458 |
+
max_input_length = data_args.max_duration_in_seconds * feature_extractor.sampling_rate
|
459 |
+
min_input_length = data_args.min_duration_in_seconds * feature_extractor.sampling_rate
|
460 |
+
audio_column_name = data_args.audio_column_name
|
461 |
+
num_workers = data_args.preprocessing_num_workers
|
462 |
+
text_column_name = data_args.text_column_name
|
463 |
+
model_input_name = feature_extractor.model_input_names[0]
|
464 |
+
do_lower_case = data_args.do_lower_case
|
465 |
+
# if SpecAugment is used for whisper models, return attention_mask to guide the mask along time axis
|
466 |
+
forward_attention_mask = (
|
467 |
+
getattr(config, "model_type", None) == "whisper"
|
468 |
+
and getattr(config, "apply_spec_augment", False)
|
469 |
+
and getattr(config, "mask_time_prob", 0) > 0
|
470 |
+
)
|
471 |
+
|
472 |
+
if data_args.max_train_samples is not None:
|
473 |
+
raw_datasets["train"] = raw_datasets["train"].select(range(data_args.max_train_samples))
|
474 |
+
|
475 |
+
if data_args.max_eval_samples is not None:
|
476 |
+
raw_datasets["eval"] = raw_datasets["eval"].select(range(data_args.max_eval_samples))
|
477 |
+
|
478 |
+
def prepare_dataset(batch):
|
479 |
+
# process audio
|
480 |
+
sample = batch[audio_column_name]
|
481 |
+
inputs = feature_extractor(
|
482 |
+
sample["array"], sampling_rate=sample["sampling_rate"], return_attention_mask=forward_attention_mask
|
483 |
+
)
|
484 |
+
# process audio length
|
485 |
+
batch[model_input_name] = inputs.get(model_input_name)[0]
|
486 |
+
batch["input_length"] = len(sample["array"])
|
487 |
+
if forward_attention_mask:
|
488 |
+
batch["attention_mask"] = inputs.get("attention_mask")[0]
|
489 |
+
|
490 |
+
# process targets
|
491 |
+
input_str = batch[text_column_name].lower() if do_lower_case else batch[text_column_name]
|
492 |
+
batch["labels"] = tokenizer(input_str).input_ids
|
493 |
+
return batch
|
494 |
+
|
495 |
+
with training_args.main_process_first(desc="dataset map pre-processing"):
|
496 |
+
vectorized_datasets = raw_datasets.map(
|
497 |
+
prepare_dataset,
|
498 |
+
remove_columns=next(iter(raw_datasets.values())).column_names,
|
499 |
+
num_proc=data_args.preprocessing_num_workers,
|
500 |
+
desc="preprocess train dataset",
|
501 |
+
)
|
502 |
+
|
503 |
+
# filter data that is shorter than min_input_length or longer than
|
504 |
+
# max_input_length
|
505 |
+
def is_audio_in_length_range(length):
|
506 |
+
return length > min_input_length and length < max_input_length
|
507 |
+
|
508 |
+
vectorized_datasets = vectorized_datasets.filter(
|
509 |
+
is_audio_in_length_range,
|
510 |
+
num_proc=num_workers,
|
511 |
+
input_columns=["input_length"],
|
512 |
+
)
|
513 |
+
|
514 |
+
# for large datasets it is advised to run the preprocessing on a
|
515 |
+
# single machine first with `args.preprocessing_only` since there will mostly likely
|
516 |
+
# be a timeout when running the script in distributed mode.
|
517 |
+
# In a second step `args.preprocessing_only` can then be set to `False` to load the
|
518 |
+
# cached dataset
|
519 |
+
if data_args.preprocessing_only:
|
520 |
+
cache = {k: v.cache_files for k, v in vectorized_datasets.items()}
|
521 |
+
logger.info(f"Data preprocessing finished. Files cached at {cache}.")
|
522 |
+
return
|
523 |
+
|
524 |
+
# 8. Load Metric
|
525 |
+
metric = evaluate.load("wer", cache_dir=model_args.cache_dir)
|
526 |
+
|
527 |
+
def compute_metrics(pred):
|
528 |
+
pred_ids = pred.predictions
|
529 |
+
|
530 |
+
pred.label_ids[pred.label_ids == -100] = tokenizer.pad_token_id
|
531 |
+
|
532 |
+
pred_str = tokenizer.batch_decode(pred_ids, skip_special_tokens=True)
|
533 |
+
# we do not want to group tokens when computing the metrics
|
534 |
+
label_str = tokenizer.batch_decode(pred.label_ids, skip_special_tokens=True)
|
535 |
+
|
536 |
+
wer = metric.compute(predictions=pred_str, references=label_str)
|
537 |
+
|
538 |
+
return {"wer": wer}
|
539 |
+
|
540 |
+
# 9. Create a single speech processor
|
541 |
+
# make sure all processes wait until data is saved
|
542 |
+
with training_args.main_process_first():
|
543 |
+
# only the main process saves them
|
544 |
+
if is_main_process(training_args.local_rank):
|
545 |
+
# save feature extractor, tokenizer and config
|
546 |
+
feature_extractor.save_pretrained(training_args.output_dir)
|
547 |
+
tokenizer.save_pretrained(training_args.output_dir)
|
548 |
+
config.save_pretrained(training_args.output_dir)
|
549 |
+
|
550 |
+
processor = AutoProcessor.from_pretrained(training_args.output_dir)
|
551 |
+
|
552 |
+
# 10. Define data collator
|
553 |
+
data_collator = DataCollatorSpeechSeq2SeqWithPadding(
|
554 |
+
processor=processor,
|
555 |
+
decoder_start_token_id=model.config.decoder_start_token_id,
|
556 |
+
forward_attention_mask=forward_attention_mask,
|
557 |
+
)
|
558 |
+
|
559 |
+
# 11. Initialize Trainer
|
560 |
+
trainer = Seq2SeqTrainer(
|
561 |
+
model=model,
|
562 |
+
args=training_args,
|
563 |
+
train_dataset=vectorized_datasets["train"] if training_args.do_train else None,
|
564 |
+
eval_dataset=vectorized_datasets["eval"] if training_args.do_eval else None,
|
565 |
+
tokenizer=feature_extractor,
|
566 |
+
data_collator=data_collator,
|
567 |
+
compute_metrics=compute_metrics if training_args.predict_with_generate else None,
|
568 |
+
)
|
569 |
+
|
570 |
+
# 12. Training
|
571 |
+
if training_args.do_train:
|
572 |
+
checkpoint = None
|
573 |
+
if training_args.resume_from_checkpoint is not None:
|
574 |
+
checkpoint = training_args.resume_from_checkpoint
|
575 |
+
elif last_checkpoint is not None:
|
576 |
+
checkpoint = last_checkpoint
|
577 |
+
train_result = trainer.train(resume_from_checkpoint=checkpoint)
|
578 |
+
trainer.save_model() # Saves the feature extractor too for easy upload
|
579 |
+
|
580 |
+
metrics = train_result.metrics
|
581 |
+
max_train_samples = (
|
582 |
+
data_args.max_train_samples
|
583 |
+
if data_args.max_train_samples is not None
|
584 |
+
else len(vectorized_datasets["train"])
|
585 |
+
)
|
586 |
+
metrics["train_samples"] = min(max_train_samples, len(vectorized_datasets["train"]))
|
587 |
+
trainer.log_metrics("train", metrics)
|
588 |
+
trainer.save_metrics("train", metrics)
|
589 |
+
trainer.save_state()
|
590 |
+
|
591 |
+
# 13. Evaluation
|
592 |
+
results = {}
|
593 |
+
if training_args.do_eval:
|
594 |
+
logger.info("*** Evaluate ***")
|
595 |
+
metrics = trainer.evaluate(
|
596 |
+
metric_key_prefix="eval",
|
597 |
+
max_length=training_args.generation_max_length,
|
598 |
+
num_beams=training_args.generation_num_beams,
|
599 |
+
)
|
600 |
+
max_eval_samples = (
|
601 |
+
data_args.max_eval_samples if data_args.max_eval_samples is not None else len(vectorized_datasets["eval"])
|
602 |
+
)
|
603 |
+
metrics["eval_samples"] = min(max_eval_samples, len(vectorized_datasets["eval"]))
|
604 |
+
|
605 |
+
trainer.log_metrics("eval", metrics)
|
606 |
+
trainer.save_metrics("eval", metrics)
|
607 |
+
|
608 |
+
# 14. Write Training Stats
|
609 |
+
kwargs = {"finetuned_from": model_args.model_name_or_path, "tasks": "automatic-speech-recognition"}
|
610 |
+
if data_args.dataset_name is not None:
|
611 |
+
kwargs["dataset_tags"] = data_args.dataset_name
|
612 |
+
if data_args.dataset_config_name is not None:
|
613 |
+
kwargs["dataset_args"] = data_args.dataset_config_name
|
614 |
+
kwargs["dataset"] = f"{data_args.dataset_name} {data_args.dataset_config_name}"
|
615 |
+
else:
|
616 |
+
kwargs["dataset"] = data_args.dataset_name
|
617 |
+
|
618 |
+
if training_args.push_to_hub:
|
619 |
+
trainer.push_to_hub(**kwargs)
|
620 |
+
else:
|
621 |
+
trainer.create_model_card(**kwargs)
|
622 |
+
|
623 |
+
return results
|
624 |
+
|
625 |
+
|
626 |
+
if __name__ == "__main__":
|
627 |
+
main()
|
runs/Mar27_12-33-46_hf-dgx-01/events.out.tfevents.1711539342.hf-dgx-01.1366875.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:30a33333b4095ce7ec76a87086a837393125adf84c477447f2ea6acea837038d
|
3 |
+
size 6421
|
runs/Mar27_12-56-36_hf-dgx-01/events.out.tfevents.1711540605.hf-dgx-01.1386946.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:683a9616a1b7e25f19264466bd653d1245cd902adc45dbcb56d12b6386ea2958
|
3 |
+
size 4184
|
runs/Mar27_12-58-17_hf-dgx-01/events.out.tfevents.1711540707.hf-dgx-01.1389604.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:344dcffec2b9fcd6ffbd33eb8d27d7c88d5ba692e6e072559b6f5999489d7962
|
3 |
+
size 13808
|
runs/Mar27_14-10-21_hf-dgx-01/events.out.tfevents.1711545030.hf-dgx-01.1482719.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:27284ca4a5239a0b455ea689b42785c3f6a2327eefc1c6fdd5348ac1feea3c31
|
3 |
+
size 14126
|
special_tokens_map.json
ADDED
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<|startoftranscript|>",
|
4 |
+
"<|en|>",
|
5 |
+
"<|zh|>",
|
6 |
+
"<|de|>",
|
7 |
+
"<|es|>",
|
8 |
+
"<|ru|>",
|
9 |
+
"<|ko|>",
|
10 |
+
"<|fr|>",
|
11 |
+
"<|ja|>",
|
12 |
+
"<|pt|>",
|
13 |
+
"<|tr|>",
|
14 |
+
"<|pl|>",
|
15 |
+
"<|ca|>",
|
16 |
+
"<|nl|>",
|
17 |
+
"<|ar|>",
|
18 |
+
"<|sv|>",
|
19 |
+
"<|it|>",
|
20 |
+
"<|id|>",
|
21 |
+
"<|hi|>",
|
22 |
+
"<|fi|>",
|
23 |
+
"<|vi|>",
|
24 |
+
"<|he|>",
|
25 |
+
"<|uk|>",
|
26 |
+
"<|el|>",
|
27 |
+
"<|ms|>",
|
28 |
+
"<|cs|>",
|
29 |
+
"<|ro|>",
|
30 |
+
"<|da|>",
|
31 |
+
"<|hu|>",
|
32 |
+
"<|ta|>",
|
33 |
+
"<|no|>",
|
34 |
+
"<|th|>",
|
35 |
+
"<|ur|>",
|
36 |
+
"<|hr|>",
|
37 |
+
"<|bg|>",
|
38 |
+
"<|lt|>",
|
39 |
+
"<|la|>",
|
40 |
+
"<|mi|>",
|
41 |
+
"<|ml|>",
|
42 |
+
"<|cy|>",
|
43 |
+
"<|sk|>",
|
44 |
+
"<|te|>",
|
45 |
+
"<|fa|>",
|
46 |
+
"<|lv|>",
|
47 |
+
"<|bn|>",
|
48 |
+
"<|sr|>",
|
49 |
+
"<|az|>",
|
50 |
+
"<|sl|>",
|
51 |
+
"<|kn|>",
|
52 |
+
"<|et|>",
|
53 |
+
"<|mk|>",
|
54 |
+
"<|br|>",
|
55 |
+
"<|eu|>",
|
56 |
+
"<|is|>",
|
57 |
+
"<|hy|>",
|
58 |
+
"<|ne|>",
|
59 |
+
"<|mn|>",
|
60 |
+
"<|bs|>",
|
61 |
+
"<|kk|>",
|
62 |
+
"<|sq|>",
|
63 |
+
"<|sw|>",
|
64 |
+
"<|gl|>",
|
65 |
+
"<|mr|>",
|
66 |
+
"<|pa|>",
|
67 |
+
"<|si|>",
|
68 |
+
"<|km|>",
|
69 |
+
"<|sn|>",
|
70 |
+
"<|yo|>",
|
71 |
+
"<|so|>",
|
72 |
+
"<|af|>",
|
73 |
+
"<|oc|>",
|
74 |
+
"<|ka|>",
|
75 |
+
"<|be|>",
|
76 |
+
"<|tg|>",
|
77 |
+
"<|sd|>",
|
78 |
+
"<|gu|>",
|
79 |
+
"<|am|>",
|
80 |
+
"<|yi|>",
|
81 |
+
"<|lo|>",
|
82 |
+
"<|uz|>",
|
83 |
+
"<|fo|>",
|
84 |
+
"<|ht|>",
|
85 |
+
"<|ps|>",
|
86 |
+
"<|tk|>",
|
87 |
+
"<|nn|>",
|
88 |
+
"<|mt|>",
|
89 |
+
"<|sa|>",
|
90 |
+
"<|lb|>",
|
91 |
+
"<|my|>",
|
92 |
+
"<|bo|>",
|
93 |
+
"<|tl|>",
|
94 |
+
"<|mg|>",
|
95 |
+
"<|as|>",
|
96 |
+
"<|tt|>",
|
97 |
+
"<|haw|>",
|
98 |
+
"<|ln|>",
|
99 |
+
"<|ha|>",
|
100 |
+
"<|ba|>",
|
101 |
+
"<|jw|>",
|
102 |
+
"<|su|>",
|
103 |
+
"<|yue|>",
|
104 |
+
"<|translate|>",
|
105 |
+
"<|transcribe|>",
|
106 |
+
"<|startoflm|>",
|
107 |
+
"<|startofprev|>",
|
108 |
+
"<|nospeech|>",
|
109 |
+
"<|notimestamps|>"
|
110 |
+
],
|
111 |
+
"bos_token": {
|
112 |
+
"content": "<|endoftext|>",
|
113 |
+
"lstrip": false,
|
114 |
+
"normalized": false,
|
115 |
+
"rstrip": false,
|
116 |
+
"single_word": false
|
117 |
+
},
|
118 |
+
"eos_token": {
|
119 |
+
"content": "<|endoftext|>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false
|
124 |
+
},
|
125 |
+
"pad_token": {
|
126 |
+
"content": "<|endoftext|>",
|
127 |
+
"lstrip": false,
|
128 |
+
"normalized": false,
|
129 |
+
"rstrip": false,
|
130 |
+
"single_word": false
|
131 |
+
},
|
132 |
+
"unk_token": {
|
133 |
+
"content": "<|endoftext|>",
|
134 |
+
"lstrip": false,
|
135 |
+
"normalized": false,
|
136 |
+
"rstrip": false,
|
137 |
+
"single_word": false
|
138 |
+
}
|
139 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c8fef62a116fd373aed4cbee49637759d2e8da863586704c7abd35aeea4042a0
|
3 |
+
size 5048
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
wandb/debug-cli.sanchit.log
ADDED
File without changes
|
wandb/debug-internal.log
ADDED
The diff for this file is too large to render.
See raw diff
|
|
wandb/debug.log
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2024-03-27 14:10:31,568 INFO MainThread:1482719 [wandb_setup.py:_flush():76] Current SDK version is 0.16.2
|
2 |
+
2024-03-27 14:10:31,568 INFO MainThread:1482719 [wandb_setup.py:_flush():76] Configure stats pid to 1482719
|
3 |
+
2024-03-27 14:10:31,568 INFO MainThread:1482719 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/.config/wandb/settings
|
4 |
+
2024-03-27 14:10:31,568 INFO MainThread:1482719 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/distil-large-v3-hi-ft/wandb/settings
|
5 |
+
2024-03-27 14:10:31,568 INFO MainThread:1482719 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
|
6 |
+
2024-03-27 14:10:31,568 INFO MainThread:1482719 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
|
7 |
+
2024-03-27 14:10:31,568 INFO MainThread:1482719 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program_relpath': 'run_speech_recognition_seq2seq.py', 'program_abspath': '/home/sanchit/distil-large-v3-hi-ft/run_speech_recognition_seq2seq.py', 'program': 'run_speech_recognition_seq2seq.py'}
|
8 |
+
2024-03-27 14:10:31,568 INFO MainThread:1482719 [wandb_init.py:_log_setup():526] Logging user logs to /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_141031-aoxf8fxn/logs/debug.log
|
9 |
+
2024-03-27 14:10:31,568 INFO MainThread:1482719 [wandb_init.py:_log_setup():527] Logging internal logs to /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_141031-aoxf8fxn/logs/debug-internal.log
|
10 |
+
2024-03-27 14:10:31,569 INFO MainThread:1482719 [wandb_init.py:init():566] calling init triggers
|
11 |
+
2024-03-27 14:10:31,569 INFO MainThread:1482719 [wandb_init.py:init():573] wandb.init called with sweep_config: {}
|
12 |
+
config: {}
|
13 |
+
2024-03-27 14:10:31,569 INFO MainThread:1482719 [wandb_init.py:init():616] starting backend
|
14 |
+
2024-03-27 14:10:31,569 INFO MainThread:1482719 [wandb_init.py:init():620] setting up manager
|
15 |
+
2024-03-27 14:10:31,569 INFO MainThread:1482719 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
|
16 |
+
2024-03-27 14:10:31,570 INFO MainThread:1482719 [wandb_init.py:init():628] backend started and connected
|
17 |
+
2024-03-27 14:10:31,574 INFO MainThread:1482719 [wandb_init.py:init():720] updated telemetry
|
18 |
+
2024-03-27 14:10:31,631 INFO MainThread:1482719 [wandb_init.py:init():753] communicating run to backend with 90.0 second timeout
|
19 |
+
2024-03-27 14:10:31,976 INFO MainThread:1482719 [wandb_run.py:_on_init():2254] communicating current version
|
20 |
+
2024-03-27 14:10:32,002 INFO MainThread:1482719 [wandb_run.py:_on_init():2263] got version response upgrade_message: "wandb version 0.16.5 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
|
21 |
+
|
22 |
+
2024-03-27 14:10:32,002 INFO MainThread:1482719 [wandb_init.py:init():804] starting run threads in backend
|
23 |
+
2024-03-27 14:10:32,531 INFO MainThread:1482719 [wandb_run.py:_console_start():2233] atexit reg
|
24 |
+
2024-03-27 14:10:32,531 INFO MainThread:1482719 [wandb_run.py:_redirect():2088] redirect: wrap_raw
|
25 |
+
2024-03-27 14:10:32,531 INFO MainThread:1482719 [wandb_run.py:_redirect():2153] Wrapping output streams.
|
26 |
+
2024-03-27 14:10:32,531 INFO MainThread:1482719 [wandb_run.py:_redirect():2178] Redirects installed.
|
27 |
+
2024-03-27 14:10:32,532 INFO MainThread:1482719 [wandb_init.py:init():847] run started, returning control to user process
|
28 |
+
2024-03-27 14:10:32,534 INFO MainThread:1482719 [wandb_run.py:_config_callback():1342] config_cb None None {'vocab_size': 51866, 'num_mel_bins': 128, 'd_model': 1280, 'encoder_layers': 32, 'encoder_attention_heads': 20, 'decoder_layers': 2, 'decoder_attention_heads': 20, 'decoder_ffn_dim': 5120, 'encoder_ffn_dim': 5120, 'dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.0, 'activation_function': 'gelu', 'init_std': 0.02, 'encoder_layerdrop': 0.0, 'decoder_layerdrop': 0.0, 'use_cache': True, 'num_hidden_layers': 32, 'scale_embedding': False, 'max_source_positions': 1500, 'max_target_positions': 448, 'classifier_proj_size': 256, 'use_weighted_layer_sum': False, 'apply_spec_augment': False, 'mask_time_prob': 0.05, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.0, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'median_filter_width': 7, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': True, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 448, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': [220, 50257], 'architectures': ['WhisperForConditionalGeneration'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 50257, 'pad_token_id': 50256, 'eos_token_id': 50257, 'sep_token_id': None, 'decoder_start_token_id': 50258, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'distil-whisper/distil-large-v3', 'transformers_version': '4.40.0.dev0', 'model_type': 'whisper', 'forced_decoder_ids': None, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 32, 'per_device_eval_batch_size': 32, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 3.0, 'max_steps': 5000, 'lr_scheduler_type': 'linear', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.0, 'warmup_steps': 500, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': './runs/Mar27_14-10-21_hf-dgx-01', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 25, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 1000, 'save_total_limit': None, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': 1000, 'dataloader_num_workers': 4, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'adamw_torch', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'input_length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None, 'sortish_sampler': False, 'predict_with_generate': True, 'generation_max_length': 225, 'generation_num_beams': None, 'generation_config': None}
|
wandb/run-20240327_123544-5vwxt2ut/files/config.yaml
ADDED
@@ -0,0 +1,731 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
wandb_version: 1
|
2 |
+
|
3 |
+
_wandb:
|
4 |
+
desc: null
|
5 |
+
value:
|
6 |
+
python_version: 3.8.10
|
7 |
+
cli_version: 0.16.2
|
8 |
+
framework: huggingface
|
9 |
+
huggingface_version: 4.40.0.dev0
|
10 |
+
is_jupyter_run: false
|
11 |
+
is_kaggle_kernel: false
|
12 |
+
start_time: 1711539344.066994
|
13 |
+
t:
|
14 |
+
1:
|
15 |
+
- 1
|
16 |
+
- 2
|
17 |
+
- 3
|
18 |
+
- 5
|
19 |
+
- 11
|
20 |
+
- 12
|
21 |
+
- 49
|
22 |
+
- 51
|
23 |
+
- 53
|
24 |
+
- 55
|
25 |
+
- 71
|
26 |
+
- 98
|
27 |
+
- 100
|
28 |
+
2:
|
29 |
+
- 1
|
30 |
+
- 2
|
31 |
+
- 3
|
32 |
+
- 5
|
33 |
+
- 11
|
34 |
+
- 12
|
35 |
+
- 49
|
36 |
+
- 51
|
37 |
+
- 53
|
38 |
+
- 55
|
39 |
+
- 71
|
40 |
+
- 98
|
41 |
+
- 100
|
42 |
+
3:
|
43 |
+
- 7
|
44 |
+
- 23
|
45 |
+
4: 3.8.10
|
46 |
+
5: 0.16.2
|
47 |
+
6: 4.40.0.dev0
|
48 |
+
8:
|
49 |
+
- 5
|
50 |
+
9:
|
51 |
+
1: transformers_trainer
|
52 |
+
13: linux-x86_64
|
53 |
+
m:
|
54 |
+
- 1: train/global_step
|
55 |
+
6:
|
56 |
+
- 3
|
57 |
+
- 1: train/loss
|
58 |
+
5: 1
|
59 |
+
6:
|
60 |
+
- 1
|
61 |
+
- 1: train/grad_norm
|
62 |
+
5: 1
|
63 |
+
6:
|
64 |
+
- 1
|
65 |
+
- 1: train/learning_rate
|
66 |
+
5: 1
|
67 |
+
6:
|
68 |
+
- 1
|
69 |
+
- 1: train/epoch
|
70 |
+
5: 1
|
71 |
+
6:
|
72 |
+
- 1
|
73 |
+
vocab_size:
|
74 |
+
desc: null
|
75 |
+
value: 51866
|
76 |
+
num_mel_bins:
|
77 |
+
desc: null
|
78 |
+
value: 128
|
79 |
+
d_model:
|
80 |
+
desc: null
|
81 |
+
value: 1280
|
82 |
+
encoder_layers:
|
83 |
+
desc: null
|
84 |
+
value: 32
|
85 |
+
encoder_attention_heads:
|
86 |
+
desc: null
|
87 |
+
value: 20
|
88 |
+
decoder_layers:
|
89 |
+
desc: null
|
90 |
+
value: 2
|
91 |
+
decoder_attention_heads:
|
92 |
+
desc: null
|
93 |
+
value: 20
|
94 |
+
decoder_ffn_dim:
|
95 |
+
desc: null
|
96 |
+
value: 5120
|
97 |
+
encoder_ffn_dim:
|
98 |
+
desc: null
|
99 |
+
value: 5120
|
100 |
+
dropout:
|
101 |
+
desc: null
|
102 |
+
value: 0.0
|
103 |
+
attention_dropout:
|
104 |
+
desc: null
|
105 |
+
value: 0.0
|
106 |
+
activation_dropout:
|
107 |
+
desc: null
|
108 |
+
value: 0.0
|
109 |
+
activation_function:
|
110 |
+
desc: null
|
111 |
+
value: gelu
|
112 |
+
init_std:
|
113 |
+
desc: null
|
114 |
+
value: 0.02
|
115 |
+
encoder_layerdrop:
|
116 |
+
desc: null
|
117 |
+
value: 0.0
|
118 |
+
decoder_layerdrop:
|
119 |
+
desc: null
|
120 |
+
value: 0.0
|
121 |
+
use_cache:
|
122 |
+
desc: null
|
123 |
+
value: true
|
124 |
+
num_hidden_layers:
|
125 |
+
desc: null
|
126 |
+
value: 32
|
127 |
+
scale_embedding:
|
128 |
+
desc: null
|
129 |
+
value: false
|
130 |
+
max_source_positions:
|
131 |
+
desc: null
|
132 |
+
value: 1500
|
133 |
+
max_target_positions:
|
134 |
+
desc: null
|
135 |
+
value: 448
|
136 |
+
classifier_proj_size:
|
137 |
+
desc: null
|
138 |
+
value: 256
|
139 |
+
use_weighted_layer_sum:
|
140 |
+
desc: null
|
141 |
+
value: false
|
142 |
+
apply_spec_augment:
|
143 |
+
desc: null
|
144 |
+
value: false
|
145 |
+
mask_time_prob:
|
146 |
+
desc: null
|
147 |
+
value: 0.05
|
148 |
+
mask_time_length:
|
149 |
+
desc: null
|
150 |
+
value: 10
|
151 |
+
mask_time_min_masks:
|
152 |
+
desc: null
|
153 |
+
value: 2
|
154 |
+
mask_feature_prob:
|
155 |
+
desc: null
|
156 |
+
value: 0.0
|
157 |
+
mask_feature_length:
|
158 |
+
desc: null
|
159 |
+
value: 10
|
160 |
+
mask_feature_min_masks:
|
161 |
+
desc: null
|
162 |
+
value: 0
|
163 |
+
median_filter_width:
|
164 |
+
desc: null
|
165 |
+
value: 7
|
166 |
+
return_dict:
|
167 |
+
desc: null
|
168 |
+
value: true
|
169 |
+
output_hidden_states:
|
170 |
+
desc: null
|
171 |
+
value: false
|
172 |
+
output_attentions:
|
173 |
+
desc: null
|
174 |
+
value: false
|
175 |
+
torchscript:
|
176 |
+
desc: null
|
177 |
+
value: false
|
178 |
+
torch_dtype:
|
179 |
+
desc: null
|
180 |
+
value: float16
|
181 |
+
use_bfloat16:
|
182 |
+
desc: null
|
183 |
+
value: false
|
184 |
+
tf_legacy_loss:
|
185 |
+
desc: null
|
186 |
+
value: false
|
187 |
+
pruned_heads:
|
188 |
+
desc: null
|
189 |
+
value: {}
|
190 |
+
tie_word_embeddings:
|
191 |
+
desc: null
|
192 |
+
value: true
|
193 |
+
chunk_size_feed_forward:
|
194 |
+
desc: null
|
195 |
+
value: 0
|
196 |
+
is_encoder_decoder:
|
197 |
+
desc: null
|
198 |
+
value: true
|
199 |
+
is_decoder:
|
200 |
+
desc: null
|
201 |
+
value: false
|
202 |
+
cross_attention_hidden_size:
|
203 |
+
desc: null
|
204 |
+
value: null
|
205 |
+
add_cross_attention:
|
206 |
+
desc: null
|
207 |
+
value: false
|
208 |
+
tie_encoder_decoder:
|
209 |
+
desc: null
|
210 |
+
value: false
|
211 |
+
max_length:
|
212 |
+
desc: null
|
213 |
+
value: 448
|
214 |
+
min_length:
|
215 |
+
desc: null
|
216 |
+
value: 0
|
217 |
+
do_sample:
|
218 |
+
desc: null
|
219 |
+
value: false
|
220 |
+
early_stopping:
|
221 |
+
desc: null
|
222 |
+
value: false
|
223 |
+
num_beams:
|
224 |
+
desc: null
|
225 |
+
value: 1
|
226 |
+
num_beam_groups:
|
227 |
+
desc: null
|
228 |
+
value: 1
|
229 |
+
diversity_penalty:
|
230 |
+
desc: null
|
231 |
+
value: 0.0
|
232 |
+
temperature:
|
233 |
+
desc: null
|
234 |
+
value: 1.0
|
235 |
+
top_k:
|
236 |
+
desc: null
|
237 |
+
value: 50
|
238 |
+
top_p:
|
239 |
+
desc: null
|
240 |
+
value: 1.0
|
241 |
+
typical_p:
|
242 |
+
desc: null
|
243 |
+
value: 1.0
|
244 |
+
repetition_penalty:
|
245 |
+
desc: null
|
246 |
+
value: 1.0
|
247 |
+
length_penalty:
|
248 |
+
desc: null
|
249 |
+
value: 1.0
|
250 |
+
no_repeat_ngram_size:
|
251 |
+
desc: null
|
252 |
+
value: 0
|
253 |
+
encoder_no_repeat_ngram_size:
|
254 |
+
desc: null
|
255 |
+
value: 0
|
256 |
+
bad_words_ids:
|
257 |
+
desc: null
|
258 |
+
value: null
|
259 |
+
num_return_sequences:
|
260 |
+
desc: null
|
261 |
+
value: 1
|
262 |
+
output_scores:
|
263 |
+
desc: null
|
264 |
+
value: false
|
265 |
+
return_dict_in_generate:
|
266 |
+
desc: null
|
267 |
+
value: false
|
268 |
+
forced_bos_token_id:
|
269 |
+
desc: null
|
270 |
+
value: null
|
271 |
+
forced_eos_token_id:
|
272 |
+
desc: null
|
273 |
+
value: null
|
274 |
+
remove_invalid_values:
|
275 |
+
desc: null
|
276 |
+
value: false
|
277 |
+
exponential_decay_length_penalty:
|
278 |
+
desc: null
|
279 |
+
value: null
|
280 |
+
suppress_tokens:
|
281 |
+
desc: null
|
282 |
+
value: null
|
283 |
+
begin_suppress_tokens:
|
284 |
+
desc: null
|
285 |
+
value:
|
286 |
+
- 220
|
287 |
+
- 50257
|
288 |
+
architectures:
|
289 |
+
desc: null
|
290 |
+
value:
|
291 |
+
- WhisperForConditionalGeneration
|
292 |
+
finetuning_task:
|
293 |
+
desc: null
|
294 |
+
value: null
|
295 |
+
id2label:
|
296 |
+
desc: null
|
297 |
+
value:
|
298 |
+
'0': LABEL_0
|
299 |
+
'1': LABEL_1
|
300 |
+
label2id:
|
301 |
+
desc: null
|
302 |
+
value:
|
303 |
+
LABEL_0: 0
|
304 |
+
LABEL_1: 1
|
305 |
+
tokenizer_class:
|
306 |
+
desc: null
|
307 |
+
value: null
|
308 |
+
prefix:
|
309 |
+
desc: null
|
310 |
+
value: null
|
311 |
+
bos_token_id:
|
312 |
+
desc: null
|
313 |
+
value: 50257
|
314 |
+
pad_token_id:
|
315 |
+
desc: null
|
316 |
+
value: 50256
|
317 |
+
eos_token_id:
|
318 |
+
desc: null
|
319 |
+
value: 50257
|
320 |
+
sep_token_id:
|
321 |
+
desc: null
|
322 |
+
value: null
|
323 |
+
decoder_start_token_id:
|
324 |
+
desc: null
|
325 |
+
value: 50258
|
326 |
+
task_specific_params:
|
327 |
+
desc: null
|
328 |
+
value: null
|
329 |
+
problem_type:
|
330 |
+
desc: null
|
331 |
+
value: null
|
332 |
+
_name_or_path:
|
333 |
+
desc: null
|
334 |
+
value: distil-whisper/distil-large-v3
|
335 |
+
transformers_version:
|
336 |
+
desc: null
|
337 |
+
value: 4.40.0.dev0
|
338 |
+
model_type:
|
339 |
+
desc: null
|
340 |
+
value: whisper
|
341 |
+
forced_decoder_ids:
|
342 |
+
desc: null
|
343 |
+
value: null
|
344 |
+
output_dir:
|
345 |
+
desc: null
|
346 |
+
value: ./
|
347 |
+
overwrite_output_dir:
|
348 |
+
desc: null
|
349 |
+
value: true
|
350 |
+
do_train:
|
351 |
+
desc: null
|
352 |
+
value: true
|
353 |
+
do_eval:
|
354 |
+
desc: null
|
355 |
+
value: true
|
356 |
+
do_predict:
|
357 |
+
desc: null
|
358 |
+
value: false
|
359 |
+
evaluation_strategy:
|
360 |
+
desc: null
|
361 |
+
value: steps
|
362 |
+
prediction_loss_only:
|
363 |
+
desc: null
|
364 |
+
value: false
|
365 |
+
per_device_train_batch_size:
|
366 |
+
desc: null
|
367 |
+
value: 32
|
368 |
+
per_device_eval_batch_size:
|
369 |
+
desc: null
|
370 |
+
value: 16
|
371 |
+
per_gpu_train_batch_size:
|
372 |
+
desc: null
|
373 |
+
value: null
|
374 |
+
per_gpu_eval_batch_size:
|
375 |
+
desc: null
|
376 |
+
value: null
|
377 |
+
gradient_accumulation_steps:
|
378 |
+
desc: null
|
379 |
+
value: 1
|
380 |
+
eval_accumulation_steps:
|
381 |
+
desc: null
|
382 |
+
value: null
|
383 |
+
eval_delay:
|
384 |
+
desc: null
|
385 |
+
value: 0
|
386 |
+
learning_rate:
|
387 |
+
desc: null
|
388 |
+
value: 1.0e-05
|
389 |
+
weight_decay:
|
390 |
+
desc: null
|
391 |
+
value: 0.0
|
392 |
+
adam_beta1:
|
393 |
+
desc: null
|
394 |
+
value: 0.9
|
395 |
+
adam_beta2:
|
396 |
+
desc: null
|
397 |
+
value: 0.999
|
398 |
+
adam_epsilon:
|
399 |
+
desc: null
|
400 |
+
value: 1.0e-08
|
401 |
+
max_grad_norm:
|
402 |
+
desc: null
|
403 |
+
value: 1.0
|
404 |
+
num_train_epochs:
|
405 |
+
desc: null
|
406 |
+
value: 3.0
|
407 |
+
max_steps:
|
408 |
+
desc: null
|
409 |
+
value: 5000
|
410 |
+
lr_scheduler_type:
|
411 |
+
desc: null
|
412 |
+
value: linear
|
413 |
+
lr_scheduler_kwargs:
|
414 |
+
desc: null
|
415 |
+
value: {}
|
416 |
+
warmup_ratio:
|
417 |
+
desc: null
|
418 |
+
value: 0.0
|
419 |
+
warmup_steps:
|
420 |
+
desc: null
|
421 |
+
value: 500
|
422 |
+
log_level:
|
423 |
+
desc: null
|
424 |
+
value: passive
|
425 |
+
log_level_replica:
|
426 |
+
desc: null
|
427 |
+
value: warning
|
428 |
+
log_on_each_node:
|
429 |
+
desc: null
|
430 |
+
value: true
|
431 |
+
logging_dir:
|
432 |
+
desc: null
|
433 |
+
value: ./runs/Mar27_12-33-46_hf-dgx-01
|
434 |
+
logging_strategy:
|
435 |
+
desc: null
|
436 |
+
value: steps
|
437 |
+
logging_first_step:
|
438 |
+
desc: null
|
439 |
+
value: false
|
440 |
+
logging_steps:
|
441 |
+
desc: null
|
442 |
+
value: 25
|
443 |
+
logging_nan_inf_filter:
|
444 |
+
desc: null
|
445 |
+
value: true
|
446 |
+
save_strategy:
|
447 |
+
desc: null
|
448 |
+
value: steps
|
449 |
+
save_steps:
|
450 |
+
desc: null
|
451 |
+
value: 1000
|
452 |
+
save_total_limit:
|
453 |
+
desc: null
|
454 |
+
value: null
|
455 |
+
save_safetensors:
|
456 |
+
desc: null
|
457 |
+
value: true
|
458 |
+
save_on_each_node:
|
459 |
+
desc: null
|
460 |
+
value: false
|
461 |
+
save_only_model:
|
462 |
+
desc: null
|
463 |
+
value: false
|
464 |
+
no_cuda:
|
465 |
+
desc: null
|
466 |
+
value: false
|
467 |
+
use_cpu:
|
468 |
+
desc: null
|
469 |
+
value: false
|
470 |
+
use_mps_device:
|
471 |
+
desc: null
|
472 |
+
value: false
|
473 |
+
seed:
|
474 |
+
desc: null
|
475 |
+
value: 42
|
476 |
+
data_seed:
|
477 |
+
desc: null
|
478 |
+
value: null
|
479 |
+
jit_mode_eval:
|
480 |
+
desc: null
|
481 |
+
value: false
|
482 |
+
use_ipex:
|
483 |
+
desc: null
|
484 |
+
value: false
|
485 |
+
bf16:
|
486 |
+
desc: null
|
487 |
+
value: false
|
488 |
+
fp16:
|
489 |
+
desc: null
|
490 |
+
value: true
|
491 |
+
fp16_opt_level:
|
492 |
+
desc: null
|
493 |
+
value: O1
|
494 |
+
half_precision_backend:
|
495 |
+
desc: null
|
496 |
+
value: auto
|
497 |
+
bf16_full_eval:
|
498 |
+
desc: null
|
499 |
+
value: false
|
500 |
+
fp16_full_eval:
|
501 |
+
desc: null
|
502 |
+
value: false
|
503 |
+
tf32:
|
504 |
+
desc: null
|
505 |
+
value: null
|
506 |
+
local_rank:
|
507 |
+
desc: null
|
508 |
+
value: 0
|
509 |
+
ddp_backend:
|
510 |
+
desc: null
|
511 |
+
value: null
|
512 |
+
tpu_num_cores:
|
513 |
+
desc: null
|
514 |
+
value: null
|
515 |
+
tpu_metrics_debug:
|
516 |
+
desc: null
|
517 |
+
value: false
|
518 |
+
debug:
|
519 |
+
desc: null
|
520 |
+
value: []
|
521 |
+
dataloader_drop_last:
|
522 |
+
desc: null
|
523 |
+
value: false
|
524 |
+
eval_steps:
|
525 |
+
desc: null
|
526 |
+
value: 1000
|
527 |
+
dataloader_num_workers:
|
528 |
+
desc: null
|
529 |
+
value: 0
|
530 |
+
dataloader_prefetch_factor:
|
531 |
+
desc: null
|
532 |
+
value: null
|
533 |
+
past_index:
|
534 |
+
desc: null
|
535 |
+
value: -1
|
536 |
+
run_name:
|
537 |
+
desc: null
|
538 |
+
value: ./
|
539 |
+
disable_tqdm:
|
540 |
+
desc: null
|
541 |
+
value: false
|
542 |
+
remove_unused_columns:
|
543 |
+
desc: null
|
544 |
+
value: true
|
545 |
+
label_names:
|
546 |
+
desc: null
|
547 |
+
value: null
|
548 |
+
load_best_model_at_end:
|
549 |
+
desc: null
|
550 |
+
value: false
|
551 |
+
metric_for_best_model:
|
552 |
+
desc: null
|
553 |
+
value: null
|
554 |
+
greater_is_better:
|
555 |
+
desc: null
|
556 |
+
value: null
|
557 |
+
ignore_data_skip:
|
558 |
+
desc: null
|
559 |
+
value: false
|
560 |
+
fsdp:
|
561 |
+
desc: null
|
562 |
+
value: []
|
563 |
+
fsdp_min_num_params:
|
564 |
+
desc: null
|
565 |
+
value: 0
|
566 |
+
fsdp_config:
|
567 |
+
desc: null
|
568 |
+
value:
|
569 |
+
min_num_params: 0
|
570 |
+
xla: false
|
571 |
+
xla_fsdp_v2: false
|
572 |
+
xla_fsdp_grad_ckpt: false
|
573 |
+
fsdp_transformer_layer_cls_to_wrap:
|
574 |
+
desc: null
|
575 |
+
value: null
|
576 |
+
accelerator_config:
|
577 |
+
desc: null
|
578 |
+
value:
|
579 |
+
split_batches: false
|
580 |
+
dispatch_batches: null
|
581 |
+
even_batches: true
|
582 |
+
use_seedable_sampler: true
|
583 |
+
deepspeed:
|
584 |
+
desc: null
|
585 |
+
value: null
|
586 |
+
label_smoothing_factor:
|
587 |
+
desc: null
|
588 |
+
value: 0.0
|
589 |
+
optim:
|
590 |
+
desc: null
|
591 |
+
value: adamw_torch
|
592 |
+
optim_args:
|
593 |
+
desc: null
|
594 |
+
value: null
|
595 |
+
adafactor:
|
596 |
+
desc: null
|
597 |
+
value: false
|
598 |
+
group_by_length:
|
599 |
+
desc: null
|
600 |
+
value: true
|
601 |
+
length_column_name:
|
602 |
+
desc: null
|
603 |
+
value: input_length
|
604 |
+
report_to:
|
605 |
+
desc: null
|
606 |
+
value:
|
607 |
+
- tensorboard
|
608 |
+
- wandb
|
609 |
+
ddp_find_unused_parameters:
|
610 |
+
desc: null
|
611 |
+
value: null
|
612 |
+
ddp_bucket_cap_mb:
|
613 |
+
desc: null
|
614 |
+
value: null
|
615 |
+
ddp_broadcast_buffers:
|
616 |
+
desc: null
|
617 |
+
value: null
|
618 |
+
dataloader_pin_memory:
|
619 |
+
desc: null
|
620 |
+
value: true
|
621 |
+
dataloader_persistent_workers:
|
622 |
+
desc: null
|
623 |
+
value: false
|
624 |
+
skip_memory_metrics:
|
625 |
+
desc: null
|
626 |
+
value: true
|
627 |
+
use_legacy_prediction_loop:
|
628 |
+
desc: null
|
629 |
+
value: false
|
630 |
+
push_to_hub:
|
631 |
+
desc: null
|
632 |
+
value: true
|
633 |
+
resume_from_checkpoint:
|
634 |
+
desc: null
|
635 |
+
value: null
|
636 |
+
hub_model_id:
|
637 |
+
desc: null
|
638 |
+
value: null
|
639 |
+
hub_strategy:
|
640 |
+
desc: null
|
641 |
+
value: every_save
|
642 |
+
hub_token:
|
643 |
+
desc: null
|
644 |
+
value: <HUB_TOKEN>
|
645 |
+
hub_private_repo:
|
646 |
+
desc: null
|
647 |
+
value: false
|
648 |
+
hub_always_push:
|
649 |
+
desc: null
|
650 |
+
value: false
|
651 |
+
gradient_checkpointing:
|
652 |
+
desc: null
|
653 |
+
value: true
|
654 |
+
gradient_checkpointing_kwargs:
|
655 |
+
desc: null
|
656 |
+
value: null
|
657 |
+
include_inputs_for_metrics:
|
658 |
+
desc: null
|
659 |
+
value: false
|
660 |
+
fp16_backend:
|
661 |
+
desc: null
|
662 |
+
value: auto
|
663 |
+
push_to_hub_model_id:
|
664 |
+
desc: null
|
665 |
+
value: null
|
666 |
+
push_to_hub_organization:
|
667 |
+
desc: null
|
668 |
+
value: null
|
669 |
+
push_to_hub_token:
|
670 |
+
desc: null
|
671 |
+
value: <PUSH_TO_HUB_TOKEN>
|
672 |
+
mp_parameters:
|
673 |
+
desc: null
|
674 |
+
value: ''
|
675 |
+
auto_find_batch_size:
|
676 |
+
desc: null
|
677 |
+
value: false
|
678 |
+
full_determinism:
|
679 |
+
desc: null
|
680 |
+
value: false
|
681 |
+
torchdynamo:
|
682 |
+
desc: null
|
683 |
+
value: null
|
684 |
+
ray_scope:
|
685 |
+
desc: null
|
686 |
+
value: last
|
687 |
+
ddp_timeout:
|
688 |
+
desc: null
|
689 |
+
value: 1800
|
690 |
+
torch_compile:
|
691 |
+
desc: null
|
692 |
+
value: false
|
693 |
+
torch_compile_backend:
|
694 |
+
desc: null
|
695 |
+
value: null
|
696 |
+
torch_compile_mode:
|
697 |
+
desc: null
|
698 |
+
value: null
|
699 |
+
dispatch_batches:
|
700 |
+
desc: null
|
701 |
+
value: null
|
702 |
+
split_batches:
|
703 |
+
desc: null
|
704 |
+
value: null
|
705 |
+
include_tokens_per_second:
|
706 |
+
desc: null
|
707 |
+
value: false
|
708 |
+
include_num_input_tokens_seen:
|
709 |
+
desc: null
|
710 |
+
value: false
|
711 |
+
neftune_noise_alpha:
|
712 |
+
desc: null
|
713 |
+
value: null
|
714 |
+
optim_target_modules:
|
715 |
+
desc: null
|
716 |
+
value: null
|
717 |
+
sortish_sampler:
|
718 |
+
desc: null
|
719 |
+
value: false
|
720 |
+
predict_with_generate:
|
721 |
+
desc: null
|
722 |
+
value: true
|
723 |
+
generation_max_length:
|
724 |
+
desc: null
|
725 |
+
value: 225
|
726 |
+
generation_num_beams:
|
727 |
+
desc: null
|
728 |
+
value: null
|
729 |
+
generation_config:
|
730 |
+
desc: null
|
731 |
+
value: null
|
wandb/run-20240327_123544-5vwxt2ut/files/output.log
ADDED
@@ -0,0 +1,164 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
0%| | 0/5000 [00:00<?, ?it/s]/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/checkpoint.py:460: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
|
3 |
+
warnings.warn(
|
4 |
+
[WARNING|logging.py:329] 2024-03-27 12:35:58,263 >> `use_cache = True` is incompatible with gradient checkpointing. Setting `use_cache = False`...
|
5 |
+
|
6 |
+
|
7 |
+
|
8 |
+
|
9 |
+
|
10 |
+
|
11 |
+
|
12 |
+
|
13 |
+
|
14 |
+
|
15 |
+
|
16 |
+
|
17 |
+
|
18 |
+
|
19 |
+
|
20 |
+
|
21 |
+
|
22 |
+
|
23 |
+
|
24 |
+
|
25 |
+
|
26 |
+
|
27 |
+
|
28 |
+
|
29 |
+
0%|▍ | 25/5000 [03:38<11:34:32, 8.38s/it]
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
|
40 |
+
|
41 |
+
|
42 |
+
|
43 |
+
|
44 |
+
|
45 |
+
|
46 |
+
|
47 |
+
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
1%|▊ | 49/5000 [06:58<11:27:58, 8.34s/it]
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
+
|
62 |
+
|
63 |
+
|
64 |
+
|
65 |
+
|
66 |
+
|
67 |
+
|
68 |
+
|
69 |
+
|
70 |
+
|
71 |
+
|
72 |
+
|
73 |
+
|
74 |
+
|
75 |
+
|
76 |
+
|
77 |
+
|
78 |
+
|
79 |
+
|
80 |
+
1%|█▏ | 74/5000 [10:27<11:22:31, 8.31s/it]
|
81 |
+
|
82 |
+
|
83 |
+
|
84 |
+
|
85 |
+
|
86 |
+
|
87 |
+
|
88 |
+
|
89 |
+
|
90 |
+
|
91 |
+
|
92 |
+
|
93 |
+
|
94 |
+
|
95 |
+
|
96 |
+
|
97 |
+
|
98 |
+
|
99 |
+
|
100 |
+
|
101 |
+
|
102 |
+
|
103 |
+
|
104 |
+
|
105 |
+
|
106 |
+
2%|█▌ | 99/5000 [13:55<11:20:20, 8.33s/it]
|
107 |
+
|
108 |
+
|
109 |
+
|
110 |
+
|
111 |
+
|
112 |
+
|
113 |
+
|
114 |
+
|
115 |
+
|
116 |
+
|
117 |
+
|
118 |
+
|
119 |
+
|
120 |
+
|
121 |
+
|
122 |
+
|
123 |
+
|
124 |
+
|
125 |
+
|
126 |
+
|
127 |
+
|
128 |
+
|
129 |
+
|
130 |
+
|
131 |
+
|
132 |
+
2%|█▉ | 124/5000 [17:24<11:17:05, 8.33s/it]
|
133 |
+
|
134 |
+
|
135 |
+
|
136 |
+
|
137 |
+
|
138 |
+
|
139 |
+
|
140 |
+
|
141 |
+
|
142 |
+
|
143 |
+
|
144 |
+
|
145 |
+
|
146 |
+
|
147 |
+
|
148 |
+
|
149 |
+
3%|██▏ | 140/5000 [19:37<11:16:28, 8.35s/it]Traceback (most recent call last):
|
150 |
+
File "run_speech_recognition_seq2seq.py", line 627, in <module>
|
151 |
+
main()
|
152 |
+
File "run_speech_recognition_seq2seq.py", line 577, in main
|
153 |
+
train_result = trainer.train(resume_from_checkpoint=checkpoint)
|
154 |
+
File "/home/sanchit/transformers/src/transformers/trainer.py", line 1774, in train
|
155 |
+
return inner_training_loop(
|
156 |
+
File "/home/sanchit/transformers/src/transformers/trainer.py", line 2088, in _inner_training_loop
|
157 |
+
for step, inputs in enumerate(epoch_iterator):
|
158 |
+
File "/home/sanchit/hf/lib/python3.8/site-packages/accelerate/data_loader.py", line 462, in __iter__
|
159 |
+
next_batch = next(dataloader_iter)
|
160 |
+
File "/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 631, in __next__
|
161 |
+
data = self._next_data()
|
162 |
+
File "/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 675, in _next_data
|
163 |
+
data = self._dataset_fetcher.fetch(index) # may raise StopIteration
|
164 |
+
KeyboardInterrupt
|
wandb/run-20240327_123544-5vwxt2ut/files/requirements.txt
ADDED
@@ -0,0 +1,247 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
absl-py==2.1.0
|
2 |
+
accelerate==0.27.2
|
3 |
+
aiohttp==3.9.3
|
4 |
+
aiosignal==1.3.1
|
5 |
+
anyio==4.2.0
|
6 |
+
appdirs==1.4.4
|
7 |
+
argon2-cffi-bindings==21.2.0
|
8 |
+
argon2-cffi==23.1.0
|
9 |
+
arrow==1.3.0
|
10 |
+
asttokens==2.4.1
|
11 |
+
astunparse==1.6.3
|
12 |
+
async-lru==2.0.4
|
13 |
+
async-timeout==4.0.3
|
14 |
+
attrs==23.2.0
|
15 |
+
audioread==3.0.1
|
16 |
+
av==11.0.0
|
17 |
+
babel==2.14.0
|
18 |
+
backcall==0.2.0
|
19 |
+
beautifulsoup4==4.12.3
|
20 |
+
bitsandbytes==0.42.0
|
21 |
+
bleach==6.1.0
|
22 |
+
cached-property==1.5.2
|
23 |
+
cachetools==5.3.2
|
24 |
+
certifi==2024.2.2
|
25 |
+
cffi==1.16.0
|
26 |
+
charset-normalizer==3.3.2
|
27 |
+
chex==0.1.7
|
28 |
+
click==8.1.7
|
29 |
+
coloredlogs==15.0.1
|
30 |
+
comm==0.2.1
|
31 |
+
contourpy==1.1.1
|
32 |
+
ctranslate2==4.1.0
|
33 |
+
cycler==0.12.1
|
34 |
+
datasets==2.18.0
|
35 |
+
debugpy==1.8.0
|
36 |
+
decorator==5.1.1
|
37 |
+
defusedxml==0.7.1
|
38 |
+
dill==0.3.7
|
39 |
+
dm-tree==0.1.8
|
40 |
+
docker-pycreds==0.4.0
|
41 |
+
docstring-parser==0.15
|
42 |
+
einops==0.7.0
|
43 |
+
etils==1.3.0
|
44 |
+
evaluate==0.4.1
|
45 |
+
exceptiongroup==1.2.0
|
46 |
+
executing==2.0.1
|
47 |
+
faster-whisper==1.0.1
|
48 |
+
fastjsonschema==2.19.1
|
49 |
+
filelock==3.13.1
|
50 |
+
flash-attn==2.5.3
|
51 |
+
flatbuffers==23.5.26
|
52 |
+
flax==0.7.2
|
53 |
+
fonttools==4.48.1
|
54 |
+
fqdn==1.5.1
|
55 |
+
frozenlist==1.4.1
|
56 |
+
fsspec==2024.2.0
|
57 |
+
gast==0.4.0
|
58 |
+
gitdb==4.0.11
|
59 |
+
gitpython==3.1.41
|
60 |
+
google-auth-oauthlib==1.0.0
|
61 |
+
google-auth==2.27.0
|
62 |
+
google-pasta==0.2.0
|
63 |
+
grpcio==1.60.1
|
64 |
+
h11==0.14.0
|
65 |
+
h5py==3.10.0
|
66 |
+
httpcore==1.0.2
|
67 |
+
httpx==0.26.0
|
68 |
+
huggingface-hub==0.21.4
|
69 |
+
humanfriendly==10.0
|
70 |
+
idna==3.6
|
71 |
+
importlib-metadata==7.0.1
|
72 |
+
importlib-resources==6.1.1
|
73 |
+
iniconfig==2.0.0
|
74 |
+
ipdb==0.13.13
|
75 |
+
ipykernel==6.29.2
|
76 |
+
ipython==8.12.3
|
77 |
+
isoduration==20.11.0
|
78 |
+
jax==0.4.13
|
79 |
+
jaxlib==0.4.13
|
80 |
+
jedi==0.19.1
|
81 |
+
jinja2==3.1.2
|
82 |
+
jiwer==3.0.3
|
83 |
+
joblib==1.3.2
|
84 |
+
json5==0.9.14
|
85 |
+
jsonpointer==2.4
|
86 |
+
jsonschema-specifications==2023.12.1
|
87 |
+
jsonschema==4.21.1
|
88 |
+
jupyter-client==8.6.0
|
89 |
+
jupyter-core==5.7.1
|
90 |
+
jupyter-events==0.9.0
|
91 |
+
jupyter-lsp==2.2.2
|
92 |
+
jupyter-server-terminals==0.5.2
|
93 |
+
jupyter-server==2.12.5
|
94 |
+
jupyterlab-pygments==0.3.0
|
95 |
+
jupyterlab-server==2.25.2
|
96 |
+
jupyterlab==4.1.0
|
97 |
+
keras==2.13.1
|
98 |
+
kiwisolver==1.4.5
|
99 |
+
lazy-loader==0.3
|
100 |
+
libclang==16.0.6
|
101 |
+
librosa==0.10.1
|
102 |
+
llvmlite==0.41.1
|
103 |
+
markdown-it-py==3.0.0
|
104 |
+
markdown==3.5.2
|
105 |
+
markupsafe==2.1.3
|
106 |
+
matplotlib-inline==0.1.6
|
107 |
+
matplotlib==3.7.4
|
108 |
+
mdurl==0.1.2
|
109 |
+
mistune==3.0.2
|
110 |
+
ml-dtypes==0.2.0
|
111 |
+
more-itertools==10.2.0
|
112 |
+
mpmath==1.2.1
|
113 |
+
msclap==1.3.3
|
114 |
+
msgpack==1.0.7
|
115 |
+
multidict==6.0.5
|
116 |
+
multiprocess==0.70.15
|
117 |
+
nbclient==0.9.0
|
118 |
+
nbconvert==7.16.0
|
119 |
+
nbformat==5.9.2
|
120 |
+
nest-asyncio==1.6.0
|
121 |
+
networkx==3.0rc1
|
122 |
+
ninja==1.11.1.1
|
123 |
+
notebook-shim==0.2.3
|
124 |
+
numba==0.58.1
|
125 |
+
numpy==1.24.3
|
126 |
+
nvidia-cublas-cu12==12.1.3.1
|
127 |
+
nvidia-cuda-cupti-cu12==12.1.105
|
128 |
+
nvidia-cuda-nvrtc-cu12==12.1.105
|
129 |
+
nvidia-cuda-runtime-cu12==12.1.105
|
130 |
+
nvidia-cudnn-cu12==8.9.2.26
|
131 |
+
nvidia-cufft-cu12==11.0.2.54
|
132 |
+
nvidia-curand-cu12==10.3.2.106
|
133 |
+
nvidia-cusolver-cu12==11.4.5.107
|
134 |
+
nvidia-cusparse-cu12==12.1.0.106
|
135 |
+
nvidia-nccl-cu12==2.19.3
|
136 |
+
nvidia-nvjitlink-cu12==12.1.105
|
137 |
+
nvidia-nvtx-cu12==12.1.105
|
138 |
+
oauthlib==3.2.2
|
139 |
+
onnxruntime==1.17.1
|
140 |
+
openai-whisper==20231117
|
141 |
+
opt-einsum==3.3.0
|
142 |
+
optax==0.1.8
|
143 |
+
orbax-checkpoint==0.2.3
|
144 |
+
overrides==7.7.0
|
145 |
+
packaging==23.2
|
146 |
+
pandas==2.0.3
|
147 |
+
pandocfilters==1.5.1
|
148 |
+
parameterized==0.9.0
|
149 |
+
parso==0.8.3
|
150 |
+
peft==0.8.2
|
151 |
+
pexpect==4.9.0
|
152 |
+
pickleshare==0.7.5
|
153 |
+
pillow==9.3.0
|
154 |
+
pip==24.0
|
155 |
+
pkg-resources==0.0.0
|
156 |
+
pkgutil-resolve-name==1.3.10
|
157 |
+
platformdirs==4.2.0
|
158 |
+
pluggy==1.4.0
|
159 |
+
pooch==1.8.0
|
160 |
+
prometheus-client==0.19.0
|
161 |
+
prompt-toolkit==3.0.43
|
162 |
+
protobuf==4.25.2
|
163 |
+
psutil==5.9.8
|
164 |
+
ptyprocess==0.7.0
|
165 |
+
pure-eval==0.2.2
|
166 |
+
pyarrow-hotfix==0.6
|
167 |
+
pyarrow==15.0.0
|
168 |
+
pyasn1-modules==0.3.0
|
169 |
+
pyasn1==0.5.1
|
170 |
+
pycparser==2.21
|
171 |
+
pygments==2.17.2
|
172 |
+
pyparsing==3.1.1
|
173 |
+
pytest==7.4.4
|
174 |
+
python-dateutil==2.8.2
|
175 |
+
python-json-logger==2.0.7
|
176 |
+
pytorch-triton==3.0.0+901819d2b6
|
177 |
+
pytz==2024.1
|
178 |
+
pyyaml==6.0.1
|
179 |
+
pyzmq==25.1.2
|
180 |
+
rapidfuzz==3.6.1
|
181 |
+
referencing==0.33.0
|
182 |
+
regex==2023.12.25
|
183 |
+
requests-oauthlib==1.3.1
|
184 |
+
requests==2.31.0
|
185 |
+
responses==0.18.0
|
186 |
+
rfc3339-validator==0.1.4
|
187 |
+
rfc3986-validator==0.1.1
|
188 |
+
rich==13.7.0
|
189 |
+
rpds-py==0.17.1
|
190 |
+
rsa==4.9
|
191 |
+
safetensors==0.4.2
|
192 |
+
scikit-learn==1.3.2
|
193 |
+
scipy==1.10.1
|
194 |
+
send2trash==1.8.2
|
195 |
+
sentry-sdk==1.40.0
|
196 |
+
setproctitle==1.3.3
|
197 |
+
setuptools==44.0.0
|
198 |
+
shtab==1.7.0
|
199 |
+
six==1.16.0
|
200 |
+
smmap==5.0.1
|
201 |
+
sniffio==1.3.0
|
202 |
+
soundfile==0.12.1
|
203 |
+
soupsieve==2.5
|
204 |
+
soxr==0.3.7
|
205 |
+
stack-data==0.6.3
|
206 |
+
sympy==1.11.1
|
207 |
+
tensorboard-data-server==0.7.2
|
208 |
+
tensorboard==2.13.0
|
209 |
+
tensorflow-cpu==2.13.1
|
210 |
+
tensorflow-estimator==2.13.0
|
211 |
+
tensorflow-io-gcs-filesystem==0.34.0
|
212 |
+
tensorstore==0.1.45
|
213 |
+
termcolor==2.4.0
|
214 |
+
terminado==0.18.0
|
215 |
+
threadpoolctl==3.2.0
|
216 |
+
tiktoken==0.6.0
|
217 |
+
tinycss2==1.2.1
|
218 |
+
tokenizers==0.15.1
|
219 |
+
tomli==2.0.1
|
220 |
+
toolz==0.12.1
|
221 |
+
torch==2.2.1
|
222 |
+
torchaudio==2.2.1
|
223 |
+
torchlibrosa==0.1.0
|
224 |
+
torchvision==0.17.1
|
225 |
+
tornado==6.4
|
226 |
+
tqdm==4.66.1
|
227 |
+
traitlets==5.14.1
|
228 |
+
transformers==4.39.0.dev0
|
229 |
+
triton==2.2.0
|
230 |
+
trl==0.7.11
|
231 |
+
types-python-dateutil==2.8.19.20240106
|
232 |
+
typing-extensions==4.9.0
|
233 |
+
tyro==0.7.3
|
234 |
+
tzdata==2023.4
|
235 |
+
uri-template==1.3.0
|
236 |
+
urllib3==2.2.0
|
237 |
+
wandb==0.16.2
|
238 |
+
wcwidth==0.2.13
|
239 |
+
webcolors==1.13
|
240 |
+
webencodings==0.5.1
|
241 |
+
websocket-client==1.7.0
|
242 |
+
werkzeug==3.0.1
|
243 |
+
wheel==0.42.0
|
244 |
+
wrapt==1.16.0
|
245 |
+
xxhash==3.4.1
|
246 |
+
yarl==1.9.4
|
247 |
+
zipp==3.17.0
|
wandb/run-20240327_123544-5vwxt2ut/files/wandb-metadata.json
ADDED
@@ -0,0 +1,737 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"os": "Linux-5.4.0-166-generic-x86_64-with-glibc2.29",
|
3 |
+
"python": "3.8.10",
|
4 |
+
"heartbeatAt": "2024-03-27T11:35:44.688111",
|
5 |
+
"startedAt": "2024-03-27T11:35:44.063111",
|
6 |
+
"docker": null,
|
7 |
+
"cuda": null,
|
8 |
+
"args": [
|
9 |
+
"--model_name_or_path=distil-whisper/distil-large-v3",
|
10 |
+
"--dataset_name=mozilla-foundation/common_voice_16_1",
|
11 |
+
"--dataset_config_name=hi",
|
12 |
+
"--language=hindi",
|
13 |
+
"--train_split_name=train+validation",
|
14 |
+
"--eval_split_name=test",
|
15 |
+
"--max_steps=5000",
|
16 |
+
"--output_dir=./",
|
17 |
+
"--per_device_train_batch_size=32",
|
18 |
+
"--per_device_eval_batch_size=16",
|
19 |
+
"--logging_steps=25",
|
20 |
+
"--learning_rate=1e-5",
|
21 |
+
"--warmup_steps=500",
|
22 |
+
"--evaluation_strategy=steps",
|
23 |
+
"--eval_steps=1000",
|
24 |
+
"--save_strategy=steps",
|
25 |
+
"--save_steps=1000",
|
26 |
+
"--generation_max_length=225",
|
27 |
+
"--preprocessing_num_workers=1",
|
28 |
+
"--length_column_name=input_length",
|
29 |
+
"--max_duration_in_seconds=30",
|
30 |
+
"--text_column_name=sentence",
|
31 |
+
"--freeze_feature_encoder=False",
|
32 |
+
"--gradient_checkpointing",
|
33 |
+
"--group_by_length",
|
34 |
+
"--fp16",
|
35 |
+
"--overwrite_output_dir",
|
36 |
+
"--do_train",
|
37 |
+
"--do_eval",
|
38 |
+
"--predict_with_generate",
|
39 |
+
"--use_auth_token",
|
40 |
+
"--push_to_hub"
|
41 |
+
],
|
42 |
+
"state": "running",
|
43 |
+
"program": "run_speech_recognition_seq2seq.py",
|
44 |
+
"codePathLocal": "run_speech_recognition_seq2seq.py",
|
45 |
+
"codePath": "run_speech_recognition_seq2seq.py",
|
46 |
+
"git": {
|
47 |
+
"remote": "https://huggingface.co/sanchit-gandhi/distil-large-v3-hi-ft",
|
48 |
+
"commit": "40c686df113c0e98e7363c1bd523f58d11848fc0"
|
49 |
+
},
|
50 |
+
"email": "sanchit@huggingface.co",
|
51 |
+
"root": "/home/sanchit/distil-large-v3-hi-ft",
|
52 |
+
"host": "hf-dgx-01",
|
53 |
+
"username": "sanchit",
|
54 |
+
"executable": "/home/sanchit/hf/bin/python",
|
55 |
+
"cpu_count": 64,
|
56 |
+
"cpu_count_logical": 128,
|
57 |
+
"cpu_freq": {
|
58 |
+
"current": 2320.8891093750008,
|
59 |
+
"min": 1500.0,
|
60 |
+
"max": 2250.0
|
61 |
+
},
|
62 |
+
"cpu_freq_per_core": [
|
63 |
+
{
|
64 |
+
"current": 1720.022,
|
65 |
+
"min": 1500.0,
|
66 |
+
"max": 2250.0
|
67 |
+
},
|
68 |
+
{
|
69 |
+
"current": 3381.801,
|
70 |
+
"min": 1500.0,
|
71 |
+
"max": 2250.0
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"current": 1739.128,
|
75 |
+
"min": 1500.0,
|
76 |
+
"max": 2250.0
|
77 |
+
},
|
78 |
+
{
|
79 |
+
"current": 3391.513,
|
80 |
+
"min": 1500.0,
|
81 |
+
"max": 2250.0
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"current": 3157.808,
|
85 |
+
"min": 1500.0,
|
86 |
+
"max": 2250.0
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"current": 1901.238,
|
90 |
+
"min": 1500.0,
|
91 |
+
"max": 2250.0
|
92 |
+
},
|
93 |
+
{
|
94 |
+
"current": 2217.668,
|
95 |
+
"min": 1500.0,
|
96 |
+
"max": 2250.0
|
97 |
+
},
|
98 |
+
{
|
99 |
+
"current": 1915.314,
|
100 |
+
"min": 1500.0,
|
101 |
+
"max": 2250.0
|
102 |
+
},
|
103 |
+
{
|
104 |
+
"current": 1752.905,
|
105 |
+
"min": 1500.0,
|
106 |
+
"max": 2250.0
|
107 |
+
},
|
108 |
+
{
|
109 |
+
"current": 2292.065,
|
110 |
+
"min": 1500.0,
|
111 |
+
"max": 2250.0
|
112 |
+
},
|
113 |
+
{
|
114 |
+
"current": 1948.336,
|
115 |
+
"min": 1500.0,
|
116 |
+
"max": 2250.0
|
117 |
+
},
|
118 |
+
{
|
119 |
+
"current": 1751.862,
|
120 |
+
"min": 1500.0,
|
121 |
+
"max": 2250.0
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"current": 1919.808,
|
125 |
+
"min": 1500.0,
|
126 |
+
"max": 2250.0
|
127 |
+
},
|
128 |
+
{
|
129 |
+
"current": 1754.656,
|
130 |
+
"min": 1500.0,
|
131 |
+
"max": 2250.0
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"current": 2313.669,
|
135 |
+
"min": 1500.0,
|
136 |
+
"max": 2250.0
|
137 |
+
},
|
138 |
+
{
|
139 |
+
"current": 1811.724,
|
140 |
+
"min": 1500.0,
|
141 |
+
"max": 2250.0
|
142 |
+
},
|
143 |
+
{
|
144 |
+
"current": 1807.199,
|
145 |
+
"min": 1500.0,
|
146 |
+
"max": 2250.0
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"current": 1692.815,
|
150 |
+
"min": 1500.0,
|
151 |
+
"max": 2250.0
|
152 |
+
},
|
153 |
+
{
|
154 |
+
"current": 1842.85,
|
155 |
+
"min": 1500.0,
|
156 |
+
"max": 2250.0
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"current": 1893.814,
|
160 |
+
"min": 1500.0,
|
161 |
+
"max": 2250.0
|
162 |
+
},
|
163 |
+
{
|
164 |
+
"current": 1925.522,
|
165 |
+
"min": 1500.0,
|
166 |
+
"max": 2250.0
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"current": 1756.882,
|
170 |
+
"min": 1500.0,
|
171 |
+
"max": 2250.0
|
172 |
+
},
|
173 |
+
{
|
174 |
+
"current": 2014.845,
|
175 |
+
"min": 1500.0,
|
176 |
+
"max": 2250.0
|
177 |
+
},
|
178 |
+
{
|
179 |
+
"current": 1937.037,
|
180 |
+
"min": 1500.0,
|
181 |
+
"max": 2250.0
|
182 |
+
},
|
183 |
+
{
|
184 |
+
"current": 1938.273,
|
185 |
+
"min": 1500.0,
|
186 |
+
"max": 2250.0
|
187 |
+
},
|
188 |
+
{
|
189 |
+
"current": 2292.805,
|
190 |
+
"min": 1500.0,
|
191 |
+
"max": 2250.0
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"current": 1753.629,
|
195 |
+
"min": 1500.0,
|
196 |
+
"max": 2250.0
|
197 |
+
},
|
198 |
+
{
|
199 |
+
"current": 1753.05,
|
200 |
+
"min": 1500.0,
|
201 |
+
"max": 2250.0
|
202 |
+
},
|
203 |
+
{
|
204 |
+
"current": 1944.417,
|
205 |
+
"min": 1500.0,
|
206 |
+
"max": 2250.0
|
207 |
+
},
|
208 |
+
{
|
209 |
+
"current": 1754.514,
|
210 |
+
"min": 1500.0,
|
211 |
+
"max": 2250.0
|
212 |
+
},
|
213 |
+
{
|
214 |
+
"current": 2280.397,
|
215 |
+
"min": 1500.0,
|
216 |
+
"max": 2250.0
|
217 |
+
},
|
218 |
+
{
|
219 |
+
"current": 1750.289,
|
220 |
+
"min": 1500.0,
|
221 |
+
"max": 2250.0
|
222 |
+
},
|
223 |
+
{
|
224 |
+
"current": 1751.461,
|
225 |
+
"min": 1500.0,
|
226 |
+
"max": 2250.0
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"current": 1753.829,
|
230 |
+
"min": 1500.0,
|
231 |
+
"max": 2250.0
|
232 |
+
},
|
233 |
+
{
|
234 |
+
"current": 2702.274,
|
235 |
+
"min": 1500.0,
|
236 |
+
"max": 2250.0
|
237 |
+
},
|
238 |
+
{
|
239 |
+
"current": 1921.157,
|
240 |
+
"min": 1500.0,
|
241 |
+
"max": 2250.0
|
242 |
+
},
|
243 |
+
{
|
244 |
+
"current": 1741.841,
|
245 |
+
"min": 1500.0,
|
246 |
+
"max": 2250.0
|
247 |
+
},
|
248 |
+
{
|
249 |
+
"current": 2111.679,
|
250 |
+
"min": 1500.0,
|
251 |
+
"max": 2250.0
|
252 |
+
},
|
253 |
+
{
|
254 |
+
"current": 1708.371,
|
255 |
+
"min": 1500.0,
|
256 |
+
"max": 2250.0
|
257 |
+
},
|
258 |
+
{
|
259 |
+
"current": 3387.844,
|
260 |
+
"min": 1500.0,
|
261 |
+
"max": 2250.0
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"current": 1696.851,
|
265 |
+
"min": 1500.0,
|
266 |
+
"max": 2250.0
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"current": 2832.842,
|
270 |
+
"min": 1500.0,
|
271 |
+
"max": 2250.0
|
272 |
+
},
|
273 |
+
{
|
274 |
+
"current": 1694.055,
|
275 |
+
"min": 1500.0,
|
276 |
+
"max": 2250.0
|
277 |
+
},
|
278 |
+
{
|
279 |
+
"current": 2207.381,
|
280 |
+
"min": 1500.0,
|
281 |
+
"max": 2250.0
|
282 |
+
},
|
283 |
+
{
|
284 |
+
"current": 1755.308,
|
285 |
+
"min": 1500.0,
|
286 |
+
"max": 2250.0
|
287 |
+
},
|
288 |
+
{
|
289 |
+
"current": 1754.396,
|
290 |
+
"min": 1500.0,
|
291 |
+
"max": 2250.0
|
292 |
+
},
|
293 |
+
{
|
294 |
+
"current": 1932.895,
|
295 |
+
"min": 1500.0,
|
296 |
+
"max": 2250.0
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"current": 1752.406,
|
300 |
+
"min": 1500.0,
|
301 |
+
"max": 2250.0
|
302 |
+
},
|
303 |
+
{
|
304 |
+
"current": 1868.451,
|
305 |
+
"min": 1500.0,
|
306 |
+
"max": 2250.0
|
307 |
+
},
|
308 |
+
{
|
309 |
+
"current": 2009.47,
|
310 |
+
"min": 1500.0,
|
311 |
+
"max": 2250.0
|
312 |
+
},
|
313 |
+
{
|
314 |
+
"current": 1693.917,
|
315 |
+
"min": 1500.0,
|
316 |
+
"max": 2250.0
|
317 |
+
},
|
318 |
+
{
|
319 |
+
"current": 1694.149,
|
320 |
+
"min": 1500.0,
|
321 |
+
"max": 2250.0
|
322 |
+
},
|
323 |
+
{
|
324 |
+
"current": 2241.039,
|
325 |
+
"min": 1500.0,
|
326 |
+
"max": 2250.0
|
327 |
+
},
|
328 |
+
{
|
329 |
+
"current": 2218.22,
|
330 |
+
"min": 1500.0,
|
331 |
+
"max": 2250.0
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"current": 2166.763,
|
335 |
+
"min": 1500.0,
|
336 |
+
"max": 2250.0
|
337 |
+
},
|
338 |
+
{
|
339 |
+
"current": 1694.42,
|
340 |
+
"min": 1500.0,
|
341 |
+
"max": 2250.0
|
342 |
+
},
|
343 |
+
{
|
344 |
+
"current": 1735.263,
|
345 |
+
"min": 1500.0,
|
346 |
+
"max": 2250.0
|
347 |
+
},
|
348 |
+
{
|
349 |
+
"current": 2342.293,
|
350 |
+
"min": 1500.0,
|
351 |
+
"max": 2250.0
|
352 |
+
},
|
353 |
+
{
|
354 |
+
"current": 1773.941,
|
355 |
+
"min": 1500.0,
|
356 |
+
"max": 2250.0
|
357 |
+
},
|
358 |
+
{
|
359 |
+
"current": 1736.693,
|
360 |
+
"min": 1500.0,
|
361 |
+
"max": 2250.0
|
362 |
+
},
|
363 |
+
{
|
364 |
+
"current": 2299.762,
|
365 |
+
"min": 1500.0,
|
366 |
+
"max": 2250.0
|
367 |
+
},
|
368 |
+
{
|
369 |
+
"current": 1929.979,
|
370 |
+
"min": 1500.0,
|
371 |
+
"max": 2250.0
|
372 |
+
},
|
373 |
+
{
|
374 |
+
"current": 2270.019,
|
375 |
+
"min": 1500.0,
|
376 |
+
"max": 2250.0
|
377 |
+
},
|
378 |
+
{
|
379 |
+
"current": 1947.148,
|
380 |
+
"min": 1500.0,
|
381 |
+
"max": 2250.0
|
382 |
+
},
|
383 |
+
{
|
384 |
+
"current": 2032.551,
|
385 |
+
"min": 1500.0,
|
386 |
+
"max": 2250.0
|
387 |
+
},
|
388 |
+
{
|
389 |
+
"current": 1742.768,
|
390 |
+
"min": 1500.0,
|
391 |
+
"max": 2250.0
|
392 |
+
},
|
393 |
+
{
|
394 |
+
"current": 1960.724,
|
395 |
+
"min": 1500.0,
|
396 |
+
"max": 2250.0
|
397 |
+
},
|
398 |
+
{
|
399 |
+
"current": 3357.093,
|
400 |
+
"min": 1500.0,
|
401 |
+
"max": 2250.0
|
402 |
+
},
|
403 |
+
{
|
404 |
+
"current": 3162.732,
|
405 |
+
"min": 1500.0,
|
406 |
+
"max": 2250.0
|
407 |
+
},
|
408 |
+
{
|
409 |
+
"current": 2133.692,
|
410 |
+
"min": 1500.0,
|
411 |
+
"max": 2250.0
|
412 |
+
},
|
413 |
+
{
|
414 |
+
"current": 2005.547,
|
415 |
+
"min": 1500.0,
|
416 |
+
"max": 2250.0
|
417 |
+
},
|
418 |
+
{
|
419 |
+
"current": 1860.04,
|
420 |
+
"min": 1500.0,
|
421 |
+
"max": 2250.0
|
422 |
+
},
|
423 |
+
{
|
424 |
+
"current": 2274.058,
|
425 |
+
"min": 1500.0,
|
426 |
+
"max": 2250.0
|
427 |
+
},
|
428 |
+
{
|
429 |
+
"current": 1883.804,
|
430 |
+
"min": 1500.0,
|
431 |
+
"max": 2250.0
|
432 |
+
},
|
433 |
+
{
|
434 |
+
"current": 2102.025,
|
435 |
+
"min": 1500.0,
|
436 |
+
"max": 2250.0
|
437 |
+
},
|
438 |
+
{
|
439 |
+
"current": 3038.986,
|
440 |
+
"min": 1500.0,
|
441 |
+
"max": 2250.0
|
442 |
+
},
|
443 |
+
{
|
444 |
+
"current": 1850.937,
|
445 |
+
"min": 1500.0,
|
446 |
+
"max": 2250.0
|
447 |
+
},
|
448 |
+
{
|
449 |
+
"current": 2286.556,
|
450 |
+
"min": 1500.0,
|
451 |
+
"max": 2250.0
|
452 |
+
},
|
453 |
+
{
|
454 |
+
"current": 2093.112,
|
455 |
+
"min": 1500.0,
|
456 |
+
"max": 2250.0
|
457 |
+
},
|
458 |
+
{
|
459 |
+
"current": 3080.388,
|
460 |
+
"min": 1500.0,
|
461 |
+
"max": 2250.0
|
462 |
+
},
|
463 |
+
{
|
464 |
+
"current": 3014.044,
|
465 |
+
"min": 1500.0,
|
466 |
+
"max": 2250.0
|
467 |
+
},
|
468 |
+
{
|
469 |
+
"current": 3067.336,
|
470 |
+
"min": 1500.0,
|
471 |
+
"max": 2250.0
|
472 |
+
},
|
473 |
+
{
|
474 |
+
"current": 2955.438,
|
475 |
+
"min": 1500.0,
|
476 |
+
"max": 2250.0
|
477 |
+
},
|
478 |
+
{
|
479 |
+
"current": 2070.708,
|
480 |
+
"min": 1500.0,
|
481 |
+
"max": 2250.0
|
482 |
+
},
|
483 |
+
{
|
484 |
+
"current": 3033.019,
|
485 |
+
"min": 1500.0,
|
486 |
+
"max": 2250.0
|
487 |
+
},
|
488 |
+
{
|
489 |
+
"current": 3016.806,
|
490 |
+
"min": 1500.0,
|
491 |
+
"max": 2250.0
|
492 |
+
},
|
493 |
+
{
|
494 |
+
"current": 2086.652,
|
495 |
+
"min": 1500.0,
|
496 |
+
"max": 2250.0
|
497 |
+
},
|
498 |
+
{
|
499 |
+
"current": 3038.198,
|
500 |
+
"min": 1500.0,
|
501 |
+
"max": 2250.0
|
502 |
+
},
|
503 |
+
{
|
504 |
+
"current": 3022.183,
|
505 |
+
"min": 1500.0,
|
506 |
+
"max": 2250.0
|
507 |
+
},
|
508 |
+
{
|
509 |
+
"current": 2151.059,
|
510 |
+
"min": 1500.0,
|
511 |
+
"max": 2250.0
|
512 |
+
},
|
513 |
+
{
|
514 |
+
"current": 3362.174,
|
515 |
+
"min": 1500.0,
|
516 |
+
"max": 2250.0
|
517 |
+
},
|
518 |
+
{
|
519 |
+
"current": 3382.004,
|
520 |
+
"min": 1500.0,
|
521 |
+
"max": 2250.0
|
522 |
+
},
|
523 |
+
{
|
524 |
+
"current": 3369.6,
|
525 |
+
"min": 1500.0,
|
526 |
+
"max": 2250.0
|
527 |
+
},
|
528 |
+
{
|
529 |
+
"current": 3367.906,
|
530 |
+
"min": 1500.0,
|
531 |
+
"max": 2250.0
|
532 |
+
},
|
533 |
+
{
|
534 |
+
"current": 2176.994,
|
535 |
+
"min": 1500.0,
|
536 |
+
"max": 2250.0
|
537 |
+
},
|
538 |
+
{
|
539 |
+
"current": 3368.516,
|
540 |
+
"min": 1500.0,
|
541 |
+
"max": 2250.0
|
542 |
+
},
|
543 |
+
{
|
544 |
+
"current": 3358.119,
|
545 |
+
"min": 1500.0,
|
546 |
+
"max": 2250.0
|
547 |
+
},
|
548 |
+
{
|
549 |
+
"current": 2422.973,
|
550 |
+
"min": 1500.0,
|
551 |
+
"max": 2250.0
|
552 |
+
},
|
553 |
+
{
|
554 |
+
"current": 1943.984,
|
555 |
+
"min": 1500.0,
|
556 |
+
"max": 2250.0
|
557 |
+
},
|
558 |
+
{
|
559 |
+
"current": 1681.849,
|
560 |
+
"min": 1500.0,
|
561 |
+
"max": 2250.0
|
562 |
+
},
|
563 |
+
{
|
564 |
+
"current": 1630.654,
|
565 |
+
"min": 1500.0,
|
566 |
+
"max": 2250.0
|
567 |
+
},
|
568 |
+
{
|
569 |
+
"current": 1691.891,
|
570 |
+
"min": 1500.0,
|
571 |
+
"max": 2250.0
|
572 |
+
},
|
573 |
+
{
|
574 |
+
"current": 1691.917,
|
575 |
+
"min": 1500.0,
|
576 |
+
"max": 2250.0
|
577 |
+
},
|
578 |
+
{
|
579 |
+
"current": 3265.59,
|
580 |
+
"min": 1500.0,
|
581 |
+
"max": 2250.0
|
582 |
+
},
|
583 |
+
{
|
584 |
+
"current": 1692.935,
|
585 |
+
"min": 1500.0,
|
586 |
+
"max": 2250.0
|
587 |
+
},
|
588 |
+
{
|
589 |
+
"current": 3368.811,
|
590 |
+
"min": 1500.0,
|
591 |
+
"max": 2250.0
|
592 |
+
},
|
593 |
+
{
|
594 |
+
"current": 1687.014,
|
595 |
+
"min": 1500.0,
|
596 |
+
"max": 2250.0
|
597 |
+
},
|
598 |
+
{
|
599 |
+
"current": 2499.16,
|
600 |
+
"min": 1500.0,
|
601 |
+
"max": 2250.0
|
602 |
+
},
|
603 |
+
{
|
604 |
+
"current": 1792.677,
|
605 |
+
"min": 1500.0,
|
606 |
+
"max": 2250.0
|
607 |
+
},
|
608 |
+
{
|
609 |
+
"current": 1795.963,
|
610 |
+
"min": 1500.0,
|
611 |
+
"max": 2250.0
|
612 |
+
},
|
613 |
+
{
|
614 |
+
"current": 1795.49,
|
615 |
+
"min": 1500.0,
|
616 |
+
"max": 2250.0
|
617 |
+
},
|
618 |
+
{
|
619 |
+
"current": 1793.159,
|
620 |
+
"min": 1500.0,
|
621 |
+
"max": 2250.0
|
622 |
+
},
|
623 |
+
{
|
624 |
+
"current": 1845.387,
|
625 |
+
"min": 1500.0,
|
626 |
+
"max": 2250.0
|
627 |
+
},
|
628 |
+
{
|
629 |
+
"current": 2385.622,
|
630 |
+
"min": 1500.0,
|
631 |
+
"max": 2250.0
|
632 |
+
},
|
633 |
+
{
|
634 |
+
"current": 1683.197,
|
635 |
+
"min": 1500.0,
|
636 |
+
"max": 2250.0
|
637 |
+
},
|
638 |
+
{
|
639 |
+
"current": 1683.711,
|
640 |
+
"min": 1500.0,
|
641 |
+
"max": 2250.0
|
642 |
+
},
|
643 |
+
{
|
644 |
+
"current": 1777.23,
|
645 |
+
"min": 1500.0,
|
646 |
+
"max": 2250.0
|
647 |
+
},
|
648 |
+
{
|
649 |
+
"current": 1778.423,
|
650 |
+
"min": 1500.0,
|
651 |
+
"max": 2250.0
|
652 |
+
},
|
653 |
+
{
|
654 |
+
"current": 1916.687,
|
655 |
+
"min": 1500.0,
|
656 |
+
"max": 2250.0
|
657 |
+
},
|
658 |
+
{
|
659 |
+
"current": 1736.278,
|
660 |
+
"min": 1500.0,
|
661 |
+
"max": 2250.0
|
662 |
+
},
|
663 |
+
{
|
664 |
+
"current": 1776.15,
|
665 |
+
"min": 1500.0,
|
666 |
+
"max": 2250.0
|
667 |
+
},
|
668 |
+
{
|
669 |
+
"current": 2028.709,
|
670 |
+
"min": 1500.0,
|
671 |
+
"max": 2250.0
|
672 |
+
},
|
673 |
+
{
|
674 |
+
"current": 1736.557,
|
675 |
+
"min": 1500.0,
|
676 |
+
"max": 2250.0
|
677 |
+
},
|
678 |
+
{
|
679 |
+
"current": 1777.429,
|
680 |
+
"min": 1500.0,
|
681 |
+
"max": 2250.0
|
682 |
+
},
|
683 |
+
{
|
684 |
+
"current": 1796.342,
|
685 |
+
"min": 1500.0,
|
686 |
+
"max": 2250.0
|
687 |
+
},
|
688 |
+
{
|
689 |
+
"current": 1793.553,
|
690 |
+
"min": 1500.0,
|
691 |
+
"max": 2250.0
|
692 |
+
},
|
693 |
+
{
|
694 |
+
"current": 1796.501,
|
695 |
+
"min": 1500.0,
|
696 |
+
"max": 2250.0
|
697 |
+
},
|
698 |
+
{
|
699 |
+
"current": 1795.512,
|
700 |
+
"min": 1500.0,
|
701 |
+
"max": 2250.0
|
702 |
+
}
|
703 |
+
],
|
704 |
+
"disk": {
|
705 |
+
"/": {
|
706 |
+
"total": 1757.8785285949707,
|
707 |
+
"used": 1499.6025924682617
|
708 |
+
}
|
709 |
+
},
|
710 |
+
"gpu": "NVIDIA A100-SXM4-80GB",
|
711 |
+
"gpu_count": 5,
|
712 |
+
"gpu_devices": [
|
713 |
+
{
|
714 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
715 |
+
"memory_total": 85899345920
|
716 |
+
},
|
717 |
+
{
|
718 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
719 |
+
"memory_total": 85899345920
|
720 |
+
},
|
721 |
+
{
|
722 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
723 |
+
"memory_total": 85899345920
|
724 |
+
},
|
725 |
+
{
|
726 |
+
"name": "NVIDIA DGX Display",
|
727 |
+
"memory_total": 4294967296
|
728 |
+
},
|
729 |
+
{
|
730 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
731 |
+
"memory_total": 85899345920
|
732 |
+
}
|
733 |
+
],
|
734 |
+
"memory": {
|
735 |
+
"total": 503.5396919250488
|
736 |
+
}
|
737 |
+
}
|
wandb/run-20240327_123544-5vwxt2ut/files/wandb-summary.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"train/loss": 3.196, "train/grad_norm": 8.523088455200195, "train/learning_rate": 2.4000000000000003e-06, "train/epoch": 0.56, "train/global_step": 125, "_timestamp": 1711540397.3985164, "_runtime": 1053.3315224647522, "_step": 4, "_wandb": {"runtime": 1183}}
|
wandb/run-20240327_123544-5vwxt2ut/logs/debug-internal.log
ADDED
The diff for this file is too large to render.
See raw diff
|
|
wandb/run-20240327_123544-5vwxt2ut/logs/debug.log
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_setup.py:_flush():76] Current SDK version is 0.16.2
|
2 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_setup.py:_flush():76] Configure stats pid to 1366875
|
3 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/.config/wandb/settings
|
4 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/distil-large-v3-hi-ft/wandb/settings
|
5 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
|
6 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
|
7 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program_relpath': 'run_speech_recognition_seq2seq.py', 'program_abspath': '/home/sanchit/distil-large-v3-hi-ft/run_speech_recognition_seq2seq.py', 'program': 'run_speech_recognition_seq2seq.py'}
|
8 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_init.py:_log_setup():526] Logging user logs to /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_123544-5vwxt2ut/logs/debug.log
|
9 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_init.py:_log_setup():527] Logging internal logs to /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_123544-5vwxt2ut/logs/debug-internal.log
|
10 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_init.py:init():566] calling init triggers
|
11 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_init.py:init():573] wandb.init called with sweep_config: {}
|
12 |
+
config: {}
|
13 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_init.py:init():616] starting backend
|
14 |
+
2024-03-27 12:35:44,064 INFO MainThread:1366875 [wandb_init.py:init():620] setting up manager
|
15 |
+
2024-03-27 12:35:44,065 INFO MainThread:1366875 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
|
16 |
+
2024-03-27 12:35:44,066 INFO MainThread:1366875 [wandb_init.py:init():628] backend started and connected
|
17 |
+
2024-03-27 12:35:44,070 INFO MainThread:1366875 [wandb_init.py:init():720] updated telemetry
|
18 |
+
2024-03-27 12:35:44,238 INFO MainThread:1366875 [wandb_init.py:init():753] communicating run to backend with 90.0 second timeout
|
19 |
+
2024-03-27 12:35:44,589 INFO MainThread:1366875 [wandb_run.py:_on_init():2254] communicating current version
|
20 |
+
2024-03-27 12:35:44,629 INFO MainThread:1366875 [wandb_run.py:_on_init():2263] got version response upgrade_message: "wandb version 0.16.5 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
|
21 |
+
|
22 |
+
2024-03-27 12:35:44,629 INFO MainThread:1366875 [wandb_init.py:init():804] starting run threads in backend
|
23 |
+
2024-03-27 12:35:44,716 INFO MainThread:1366875 [wandb_run.py:_console_start():2233] atexit reg
|
24 |
+
2024-03-27 12:35:44,716 INFO MainThread:1366875 [wandb_run.py:_redirect():2088] redirect: wrap_raw
|
25 |
+
2024-03-27 12:35:44,716 INFO MainThread:1366875 [wandb_run.py:_redirect():2153] Wrapping output streams.
|
26 |
+
2024-03-27 12:35:44,716 INFO MainThread:1366875 [wandb_run.py:_redirect():2178] Redirects installed.
|
27 |
+
2024-03-27 12:35:44,717 INFO MainThread:1366875 [wandb_init.py:init():847] run started, returning control to user process
|
28 |
+
2024-03-27 12:35:44,719 INFO MainThread:1366875 [wandb_run.py:_config_callback():1342] config_cb None None {'vocab_size': 51866, 'num_mel_bins': 128, 'd_model': 1280, 'encoder_layers': 32, 'encoder_attention_heads': 20, 'decoder_layers': 2, 'decoder_attention_heads': 20, 'decoder_ffn_dim': 5120, 'encoder_ffn_dim': 5120, 'dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.0, 'activation_function': 'gelu', 'init_std': 0.02, 'encoder_layerdrop': 0.0, 'decoder_layerdrop': 0.0, 'use_cache': True, 'num_hidden_layers': 32, 'scale_embedding': False, 'max_source_positions': 1500, 'max_target_positions': 448, 'classifier_proj_size': 256, 'use_weighted_layer_sum': False, 'apply_spec_augment': False, 'mask_time_prob': 0.05, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.0, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'median_filter_width': 7, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': True, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 448, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': [220, 50257], 'architectures': ['WhisperForConditionalGeneration'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 50257, 'pad_token_id': 50256, 'eos_token_id': 50257, 'sep_token_id': None, 'decoder_start_token_id': 50258, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'distil-whisper/distil-large-v3', 'transformers_version': '4.40.0.dev0', 'model_type': 'whisper', 'forced_decoder_ids': None, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 32, 'per_device_eval_batch_size': 16, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 1e-05, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 3.0, 'max_steps': 5000, 'lr_scheduler_type': 'linear', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.0, 'warmup_steps': 500, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': './runs/Mar27_12-33-46_hf-dgx-01', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 25, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 1000, 'save_total_limit': None, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': 1000, 'dataloader_num_workers': 0, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'adamw_torch', 'optim_args': None, 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None, 'sortish_sampler': False, 'predict_with_generate': True, 'generation_max_length': 225, 'generation_num_beams': None, 'generation_config': None}
|
wandb/run-20240327_123544-5vwxt2ut/run-5vwxt2ut.wandb
ADDED
Binary file (231 kB). View file
|
|
wandb/run-20240327_125651-wqmi98ok/files/config.yaml
ADDED
@@ -0,0 +1,715 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
wandb_version: 1
|
2 |
+
|
3 |
+
_wandb:
|
4 |
+
desc: null
|
5 |
+
value:
|
6 |
+
python_version: 3.8.10
|
7 |
+
cli_version: 0.16.2
|
8 |
+
framework: huggingface
|
9 |
+
huggingface_version: 4.40.0.dev0
|
10 |
+
is_jupyter_run: false
|
11 |
+
is_kaggle_kernel: false
|
12 |
+
start_time: 1711540611.622937
|
13 |
+
t:
|
14 |
+
1:
|
15 |
+
- 1
|
16 |
+
- 2
|
17 |
+
- 3
|
18 |
+
- 5
|
19 |
+
- 11
|
20 |
+
- 12
|
21 |
+
- 49
|
22 |
+
- 51
|
23 |
+
- 53
|
24 |
+
- 55
|
25 |
+
- 71
|
26 |
+
- 98
|
27 |
+
- 100
|
28 |
+
2:
|
29 |
+
- 1
|
30 |
+
- 2
|
31 |
+
- 3
|
32 |
+
- 5
|
33 |
+
- 11
|
34 |
+
- 12
|
35 |
+
- 49
|
36 |
+
- 51
|
37 |
+
- 53
|
38 |
+
- 55
|
39 |
+
- 71
|
40 |
+
- 98
|
41 |
+
- 100
|
42 |
+
3:
|
43 |
+
- 7
|
44 |
+
- 23
|
45 |
+
4: 3.8.10
|
46 |
+
5: 0.16.2
|
47 |
+
6: 4.40.0.dev0
|
48 |
+
8:
|
49 |
+
- 5
|
50 |
+
9:
|
51 |
+
1: transformers_trainer
|
52 |
+
13: linux-x86_64
|
53 |
+
m:
|
54 |
+
- 1: train/global_step
|
55 |
+
6:
|
56 |
+
- 3
|
57 |
+
vocab_size:
|
58 |
+
desc: null
|
59 |
+
value: 51866
|
60 |
+
num_mel_bins:
|
61 |
+
desc: null
|
62 |
+
value: 128
|
63 |
+
d_model:
|
64 |
+
desc: null
|
65 |
+
value: 1280
|
66 |
+
encoder_layers:
|
67 |
+
desc: null
|
68 |
+
value: 32
|
69 |
+
encoder_attention_heads:
|
70 |
+
desc: null
|
71 |
+
value: 20
|
72 |
+
decoder_layers:
|
73 |
+
desc: null
|
74 |
+
value: 2
|
75 |
+
decoder_attention_heads:
|
76 |
+
desc: null
|
77 |
+
value: 20
|
78 |
+
decoder_ffn_dim:
|
79 |
+
desc: null
|
80 |
+
value: 5120
|
81 |
+
encoder_ffn_dim:
|
82 |
+
desc: null
|
83 |
+
value: 5120
|
84 |
+
dropout:
|
85 |
+
desc: null
|
86 |
+
value: 0.0
|
87 |
+
attention_dropout:
|
88 |
+
desc: null
|
89 |
+
value: 0.0
|
90 |
+
activation_dropout:
|
91 |
+
desc: null
|
92 |
+
value: 0.0
|
93 |
+
activation_function:
|
94 |
+
desc: null
|
95 |
+
value: gelu
|
96 |
+
init_std:
|
97 |
+
desc: null
|
98 |
+
value: 0.02
|
99 |
+
encoder_layerdrop:
|
100 |
+
desc: null
|
101 |
+
value: 0.0
|
102 |
+
decoder_layerdrop:
|
103 |
+
desc: null
|
104 |
+
value: 0.0
|
105 |
+
use_cache:
|
106 |
+
desc: null
|
107 |
+
value: true
|
108 |
+
num_hidden_layers:
|
109 |
+
desc: null
|
110 |
+
value: 32
|
111 |
+
scale_embedding:
|
112 |
+
desc: null
|
113 |
+
value: false
|
114 |
+
max_source_positions:
|
115 |
+
desc: null
|
116 |
+
value: 1500
|
117 |
+
max_target_positions:
|
118 |
+
desc: null
|
119 |
+
value: 448
|
120 |
+
classifier_proj_size:
|
121 |
+
desc: null
|
122 |
+
value: 256
|
123 |
+
use_weighted_layer_sum:
|
124 |
+
desc: null
|
125 |
+
value: false
|
126 |
+
apply_spec_augment:
|
127 |
+
desc: null
|
128 |
+
value: false
|
129 |
+
mask_time_prob:
|
130 |
+
desc: null
|
131 |
+
value: 0.05
|
132 |
+
mask_time_length:
|
133 |
+
desc: null
|
134 |
+
value: 10
|
135 |
+
mask_time_min_masks:
|
136 |
+
desc: null
|
137 |
+
value: 2
|
138 |
+
mask_feature_prob:
|
139 |
+
desc: null
|
140 |
+
value: 0.0
|
141 |
+
mask_feature_length:
|
142 |
+
desc: null
|
143 |
+
value: 10
|
144 |
+
mask_feature_min_masks:
|
145 |
+
desc: null
|
146 |
+
value: 0
|
147 |
+
median_filter_width:
|
148 |
+
desc: null
|
149 |
+
value: 7
|
150 |
+
return_dict:
|
151 |
+
desc: null
|
152 |
+
value: true
|
153 |
+
output_hidden_states:
|
154 |
+
desc: null
|
155 |
+
value: false
|
156 |
+
output_attentions:
|
157 |
+
desc: null
|
158 |
+
value: false
|
159 |
+
torchscript:
|
160 |
+
desc: null
|
161 |
+
value: false
|
162 |
+
torch_dtype:
|
163 |
+
desc: null
|
164 |
+
value: float16
|
165 |
+
use_bfloat16:
|
166 |
+
desc: null
|
167 |
+
value: false
|
168 |
+
tf_legacy_loss:
|
169 |
+
desc: null
|
170 |
+
value: false
|
171 |
+
pruned_heads:
|
172 |
+
desc: null
|
173 |
+
value: {}
|
174 |
+
tie_word_embeddings:
|
175 |
+
desc: null
|
176 |
+
value: true
|
177 |
+
chunk_size_feed_forward:
|
178 |
+
desc: null
|
179 |
+
value: 0
|
180 |
+
is_encoder_decoder:
|
181 |
+
desc: null
|
182 |
+
value: true
|
183 |
+
is_decoder:
|
184 |
+
desc: null
|
185 |
+
value: false
|
186 |
+
cross_attention_hidden_size:
|
187 |
+
desc: null
|
188 |
+
value: null
|
189 |
+
add_cross_attention:
|
190 |
+
desc: null
|
191 |
+
value: false
|
192 |
+
tie_encoder_decoder:
|
193 |
+
desc: null
|
194 |
+
value: false
|
195 |
+
max_length:
|
196 |
+
desc: null
|
197 |
+
value: 448
|
198 |
+
min_length:
|
199 |
+
desc: null
|
200 |
+
value: 0
|
201 |
+
do_sample:
|
202 |
+
desc: null
|
203 |
+
value: false
|
204 |
+
early_stopping:
|
205 |
+
desc: null
|
206 |
+
value: false
|
207 |
+
num_beams:
|
208 |
+
desc: null
|
209 |
+
value: 1
|
210 |
+
num_beam_groups:
|
211 |
+
desc: null
|
212 |
+
value: 1
|
213 |
+
diversity_penalty:
|
214 |
+
desc: null
|
215 |
+
value: 0.0
|
216 |
+
temperature:
|
217 |
+
desc: null
|
218 |
+
value: 1.0
|
219 |
+
top_k:
|
220 |
+
desc: null
|
221 |
+
value: 50
|
222 |
+
top_p:
|
223 |
+
desc: null
|
224 |
+
value: 1.0
|
225 |
+
typical_p:
|
226 |
+
desc: null
|
227 |
+
value: 1.0
|
228 |
+
repetition_penalty:
|
229 |
+
desc: null
|
230 |
+
value: 1.0
|
231 |
+
length_penalty:
|
232 |
+
desc: null
|
233 |
+
value: 1.0
|
234 |
+
no_repeat_ngram_size:
|
235 |
+
desc: null
|
236 |
+
value: 0
|
237 |
+
encoder_no_repeat_ngram_size:
|
238 |
+
desc: null
|
239 |
+
value: 0
|
240 |
+
bad_words_ids:
|
241 |
+
desc: null
|
242 |
+
value: null
|
243 |
+
num_return_sequences:
|
244 |
+
desc: null
|
245 |
+
value: 1
|
246 |
+
output_scores:
|
247 |
+
desc: null
|
248 |
+
value: false
|
249 |
+
return_dict_in_generate:
|
250 |
+
desc: null
|
251 |
+
value: false
|
252 |
+
forced_bos_token_id:
|
253 |
+
desc: null
|
254 |
+
value: null
|
255 |
+
forced_eos_token_id:
|
256 |
+
desc: null
|
257 |
+
value: null
|
258 |
+
remove_invalid_values:
|
259 |
+
desc: null
|
260 |
+
value: false
|
261 |
+
exponential_decay_length_penalty:
|
262 |
+
desc: null
|
263 |
+
value: null
|
264 |
+
suppress_tokens:
|
265 |
+
desc: null
|
266 |
+
value: null
|
267 |
+
begin_suppress_tokens:
|
268 |
+
desc: null
|
269 |
+
value:
|
270 |
+
- 220
|
271 |
+
- 50257
|
272 |
+
architectures:
|
273 |
+
desc: null
|
274 |
+
value:
|
275 |
+
- WhisperForConditionalGeneration
|
276 |
+
finetuning_task:
|
277 |
+
desc: null
|
278 |
+
value: null
|
279 |
+
id2label:
|
280 |
+
desc: null
|
281 |
+
value:
|
282 |
+
'0': LABEL_0
|
283 |
+
'1': LABEL_1
|
284 |
+
label2id:
|
285 |
+
desc: null
|
286 |
+
value:
|
287 |
+
LABEL_0: 0
|
288 |
+
LABEL_1: 1
|
289 |
+
tokenizer_class:
|
290 |
+
desc: null
|
291 |
+
value: null
|
292 |
+
prefix:
|
293 |
+
desc: null
|
294 |
+
value: null
|
295 |
+
bos_token_id:
|
296 |
+
desc: null
|
297 |
+
value: 50257
|
298 |
+
pad_token_id:
|
299 |
+
desc: null
|
300 |
+
value: 50256
|
301 |
+
eos_token_id:
|
302 |
+
desc: null
|
303 |
+
value: 50257
|
304 |
+
sep_token_id:
|
305 |
+
desc: null
|
306 |
+
value: null
|
307 |
+
decoder_start_token_id:
|
308 |
+
desc: null
|
309 |
+
value: 50258
|
310 |
+
task_specific_params:
|
311 |
+
desc: null
|
312 |
+
value: null
|
313 |
+
problem_type:
|
314 |
+
desc: null
|
315 |
+
value: null
|
316 |
+
_name_or_path:
|
317 |
+
desc: null
|
318 |
+
value: distil-whisper/distil-large-v3
|
319 |
+
transformers_version:
|
320 |
+
desc: null
|
321 |
+
value: 4.40.0.dev0
|
322 |
+
model_type:
|
323 |
+
desc: null
|
324 |
+
value: whisper
|
325 |
+
forced_decoder_ids:
|
326 |
+
desc: null
|
327 |
+
value: null
|
328 |
+
output_dir:
|
329 |
+
desc: null
|
330 |
+
value: ./
|
331 |
+
overwrite_output_dir:
|
332 |
+
desc: null
|
333 |
+
value: true
|
334 |
+
do_train:
|
335 |
+
desc: null
|
336 |
+
value: true
|
337 |
+
do_eval:
|
338 |
+
desc: null
|
339 |
+
value: true
|
340 |
+
do_predict:
|
341 |
+
desc: null
|
342 |
+
value: false
|
343 |
+
evaluation_strategy:
|
344 |
+
desc: null
|
345 |
+
value: steps
|
346 |
+
prediction_loss_only:
|
347 |
+
desc: null
|
348 |
+
value: false
|
349 |
+
per_device_train_batch_size:
|
350 |
+
desc: null
|
351 |
+
value: 128
|
352 |
+
per_device_eval_batch_size:
|
353 |
+
desc: null
|
354 |
+
value: 128
|
355 |
+
per_gpu_train_batch_size:
|
356 |
+
desc: null
|
357 |
+
value: null
|
358 |
+
per_gpu_eval_batch_size:
|
359 |
+
desc: null
|
360 |
+
value: null
|
361 |
+
gradient_accumulation_steps:
|
362 |
+
desc: null
|
363 |
+
value: 1
|
364 |
+
eval_accumulation_steps:
|
365 |
+
desc: null
|
366 |
+
value: null
|
367 |
+
eval_delay:
|
368 |
+
desc: null
|
369 |
+
value: 0
|
370 |
+
learning_rate:
|
371 |
+
desc: null
|
372 |
+
value: 0.0001
|
373 |
+
weight_decay:
|
374 |
+
desc: null
|
375 |
+
value: 0.0
|
376 |
+
adam_beta1:
|
377 |
+
desc: null
|
378 |
+
value: 0.9
|
379 |
+
adam_beta2:
|
380 |
+
desc: null
|
381 |
+
value: 0.999
|
382 |
+
adam_epsilon:
|
383 |
+
desc: null
|
384 |
+
value: 1.0e-08
|
385 |
+
max_grad_norm:
|
386 |
+
desc: null
|
387 |
+
value: 1.0
|
388 |
+
num_train_epochs:
|
389 |
+
desc: null
|
390 |
+
value: 3.0
|
391 |
+
max_steps:
|
392 |
+
desc: null
|
393 |
+
value: 5000
|
394 |
+
lr_scheduler_type:
|
395 |
+
desc: null
|
396 |
+
value: linear
|
397 |
+
lr_scheduler_kwargs:
|
398 |
+
desc: null
|
399 |
+
value: {}
|
400 |
+
warmup_ratio:
|
401 |
+
desc: null
|
402 |
+
value: 0.0
|
403 |
+
warmup_steps:
|
404 |
+
desc: null
|
405 |
+
value: 500
|
406 |
+
log_level:
|
407 |
+
desc: null
|
408 |
+
value: passive
|
409 |
+
log_level_replica:
|
410 |
+
desc: null
|
411 |
+
value: warning
|
412 |
+
log_on_each_node:
|
413 |
+
desc: null
|
414 |
+
value: true
|
415 |
+
logging_dir:
|
416 |
+
desc: null
|
417 |
+
value: ./runs/Mar27_12-56-36_hf-dgx-01
|
418 |
+
logging_strategy:
|
419 |
+
desc: null
|
420 |
+
value: steps
|
421 |
+
logging_first_step:
|
422 |
+
desc: null
|
423 |
+
value: false
|
424 |
+
logging_steps:
|
425 |
+
desc: null
|
426 |
+
value: 25
|
427 |
+
logging_nan_inf_filter:
|
428 |
+
desc: null
|
429 |
+
value: true
|
430 |
+
save_strategy:
|
431 |
+
desc: null
|
432 |
+
value: steps
|
433 |
+
save_steps:
|
434 |
+
desc: null
|
435 |
+
value: 1000
|
436 |
+
save_total_limit:
|
437 |
+
desc: null
|
438 |
+
value: null
|
439 |
+
save_safetensors:
|
440 |
+
desc: null
|
441 |
+
value: true
|
442 |
+
save_on_each_node:
|
443 |
+
desc: null
|
444 |
+
value: false
|
445 |
+
save_only_model:
|
446 |
+
desc: null
|
447 |
+
value: false
|
448 |
+
no_cuda:
|
449 |
+
desc: null
|
450 |
+
value: false
|
451 |
+
use_cpu:
|
452 |
+
desc: null
|
453 |
+
value: false
|
454 |
+
use_mps_device:
|
455 |
+
desc: null
|
456 |
+
value: false
|
457 |
+
seed:
|
458 |
+
desc: null
|
459 |
+
value: 42
|
460 |
+
data_seed:
|
461 |
+
desc: null
|
462 |
+
value: null
|
463 |
+
jit_mode_eval:
|
464 |
+
desc: null
|
465 |
+
value: false
|
466 |
+
use_ipex:
|
467 |
+
desc: null
|
468 |
+
value: false
|
469 |
+
bf16:
|
470 |
+
desc: null
|
471 |
+
value: false
|
472 |
+
fp16:
|
473 |
+
desc: null
|
474 |
+
value: true
|
475 |
+
fp16_opt_level:
|
476 |
+
desc: null
|
477 |
+
value: O1
|
478 |
+
half_precision_backend:
|
479 |
+
desc: null
|
480 |
+
value: auto
|
481 |
+
bf16_full_eval:
|
482 |
+
desc: null
|
483 |
+
value: false
|
484 |
+
fp16_full_eval:
|
485 |
+
desc: null
|
486 |
+
value: false
|
487 |
+
tf32:
|
488 |
+
desc: null
|
489 |
+
value: null
|
490 |
+
local_rank:
|
491 |
+
desc: null
|
492 |
+
value: 0
|
493 |
+
ddp_backend:
|
494 |
+
desc: null
|
495 |
+
value: null
|
496 |
+
tpu_num_cores:
|
497 |
+
desc: null
|
498 |
+
value: null
|
499 |
+
tpu_metrics_debug:
|
500 |
+
desc: null
|
501 |
+
value: false
|
502 |
+
debug:
|
503 |
+
desc: null
|
504 |
+
value: []
|
505 |
+
dataloader_drop_last:
|
506 |
+
desc: null
|
507 |
+
value: false
|
508 |
+
eval_steps:
|
509 |
+
desc: null
|
510 |
+
value: 1000
|
511 |
+
dataloader_num_workers:
|
512 |
+
desc: null
|
513 |
+
value: 4
|
514 |
+
dataloader_prefetch_factor:
|
515 |
+
desc: null
|
516 |
+
value: null
|
517 |
+
past_index:
|
518 |
+
desc: null
|
519 |
+
value: -1
|
520 |
+
run_name:
|
521 |
+
desc: null
|
522 |
+
value: ./
|
523 |
+
disable_tqdm:
|
524 |
+
desc: null
|
525 |
+
value: false
|
526 |
+
remove_unused_columns:
|
527 |
+
desc: null
|
528 |
+
value: true
|
529 |
+
label_names:
|
530 |
+
desc: null
|
531 |
+
value: null
|
532 |
+
load_best_model_at_end:
|
533 |
+
desc: null
|
534 |
+
value: false
|
535 |
+
metric_for_best_model:
|
536 |
+
desc: null
|
537 |
+
value: null
|
538 |
+
greater_is_better:
|
539 |
+
desc: null
|
540 |
+
value: null
|
541 |
+
ignore_data_skip:
|
542 |
+
desc: null
|
543 |
+
value: false
|
544 |
+
fsdp:
|
545 |
+
desc: null
|
546 |
+
value: []
|
547 |
+
fsdp_min_num_params:
|
548 |
+
desc: null
|
549 |
+
value: 0
|
550 |
+
fsdp_config:
|
551 |
+
desc: null
|
552 |
+
value:
|
553 |
+
min_num_params: 0
|
554 |
+
xla: false
|
555 |
+
xla_fsdp_v2: false
|
556 |
+
xla_fsdp_grad_ckpt: false
|
557 |
+
fsdp_transformer_layer_cls_to_wrap:
|
558 |
+
desc: null
|
559 |
+
value: null
|
560 |
+
accelerator_config:
|
561 |
+
desc: null
|
562 |
+
value:
|
563 |
+
split_batches: false
|
564 |
+
dispatch_batches: null
|
565 |
+
even_batches: true
|
566 |
+
use_seedable_sampler: true
|
567 |
+
deepspeed:
|
568 |
+
desc: null
|
569 |
+
value: null
|
570 |
+
label_smoothing_factor:
|
571 |
+
desc: null
|
572 |
+
value: 0.0
|
573 |
+
optim:
|
574 |
+
desc: null
|
575 |
+
value: adamw_torch
|
576 |
+
optim_args:
|
577 |
+
desc: null
|
578 |
+
value: null
|
579 |
+
adafactor:
|
580 |
+
desc: null
|
581 |
+
value: false
|
582 |
+
group_by_length:
|
583 |
+
desc: null
|
584 |
+
value: true
|
585 |
+
length_column_name:
|
586 |
+
desc: null
|
587 |
+
value: input_length
|
588 |
+
report_to:
|
589 |
+
desc: null
|
590 |
+
value:
|
591 |
+
- tensorboard
|
592 |
+
- wandb
|
593 |
+
ddp_find_unused_parameters:
|
594 |
+
desc: null
|
595 |
+
value: null
|
596 |
+
ddp_bucket_cap_mb:
|
597 |
+
desc: null
|
598 |
+
value: null
|
599 |
+
ddp_broadcast_buffers:
|
600 |
+
desc: null
|
601 |
+
value: null
|
602 |
+
dataloader_pin_memory:
|
603 |
+
desc: null
|
604 |
+
value: true
|
605 |
+
dataloader_persistent_workers:
|
606 |
+
desc: null
|
607 |
+
value: false
|
608 |
+
skip_memory_metrics:
|
609 |
+
desc: null
|
610 |
+
value: true
|
611 |
+
use_legacy_prediction_loop:
|
612 |
+
desc: null
|
613 |
+
value: false
|
614 |
+
push_to_hub:
|
615 |
+
desc: null
|
616 |
+
value: true
|
617 |
+
resume_from_checkpoint:
|
618 |
+
desc: null
|
619 |
+
value: null
|
620 |
+
hub_model_id:
|
621 |
+
desc: null
|
622 |
+
value: null
|
623 |
+
hub_strategy:
|
624 |
+
desc: null
|
625 |
+
value: every_save
|
626 |
+
hub_token:
|
627 |
+
desc: null
|
628 |
+
value: <HUB_TOKEN>
|
629 |
+
hub_private_repo:
|
630 |
+
desc: null
|
631 |
+
value: false
|
632 |
+
hub_always_push:
|
633 |
+
desc: null
|
634 |
+
value: false
|
635 |
+
gradient_checkpointing:
|
636 |
+
desc: null
|
637 |
+
value: true
|
638 |
+
gradient_checkpointing_kwargs:
|
639 |
+
desc: null
|
640 |
+
value: null
|
641 |
+
include_inputs_for_metrics:
|
642 |
+
desc: null
|
643 |
+
value: false
|
644 |
+
fp16_backend:
|
645 |
+
desc: null
|
646 |
+
value: auto
|
647 |
+
push_to_hub_model_id:
|
648 |
+
desc: null
|
649 |
+
value: null
|
650 |
+
push_to_hub_organization:
|
651 |
+
desc: null
|
652 |
+
value: null
|
653 |
+
push_to_hub_token:
|
654 |
+
desc: null
|
655 |
+
value: <PUSH_TO_HUB_TOKEN>
|
656 |
+
mp_parameters:
|
657 |
+
desc: null
|
658 |
+
value: ''
|
659 |
+
auto_find_batch_size:
|
660 |
+
desc: null
|
661 |
+
value: false
|
662 |
+
full_determinism:
|
663 |
+
desc: null
|
664 |
+
value: false
|
665 |
+
torchdynamo:
|
666 |
+
desc: null
|
667 |
+
value: null
|
668 |
+
ray_scope:
|
669 |
+
desc: null
|
670 |
+
value: last
|
671 |
+
ddp_timeout:
|
672 |
+
desc: null
|
673 |
+
value: 1800
|
674 |
+
torch_compile:
|
675 |
+
desc: null
|
676 |
+
value: false
|
677 |
+
torch_compile_backend:
|
678 |
+
desc: null
|
679 |
+
value: null
|
680 |
+
torch_compile_mode:
|
681 |
+
desc: null
|
682 |
+
value: null
|
683 |
+
dispatch_batches:
|
684 |
+
desc: null
|
685 |
+
value: null
|
686 |
+
split_batches:
|
687 |
+
desc: null
|
688 |
+
value: null
|
689 |
+
include_tokens_per_second:
|
690 |
+
desc: null
|
691 |
+
value: false
|
692 |
+
include_num_input_tokens_seen:
|
693 |
+
desc: null
|
694 |
+
value: false
|
695 |
+
neftune_noise_alpha:
|
696 |
+
desc: null
|
697 |
+
value: null
|
698 |
+
optim_target_modules:
|
699 |
+
desc: null
|
700 |
+
value: null
|
701 |
+
sortish_sampler:
|
702 |
+
desc: null
|
703 |
+
value: false
|
704 |
+
predict_with_generate:
|
705 |
+
desc: null
|
706 |
+
value: true
|
707 |
+
generation_max_length:
|
708 |
+
desc: null
|
709 |
+
value: 225
|
710 |
+
generation_num_beams:
|
711 |
+
desc: null
|
712 |
+
value: null
|
713 |
+
generation_config:
|
714 |
+
desc: null
|
715 |
+
value: null
|
wandb/run-20240327_125651-wqmi98ok/files/output.log
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
0%| | 0/5000 [00:00<?, ?it/s]Traceback (most recent call last):
|
3 |
+
File "run_speech_recognition_seq2seq.py", line 627, in <module>
|
4 |
+
main()
|
5 |
+
File "run_speech_recognition_seq2seq.py", line 577, in main
|
6 |
+
train_result = trainer.train(resume_from_checkpoint=checkpoint)
|
7 |
+
File "/home/sanchit/transformers/src/transformers/trainer.py", line 1774, in train
|
8 |
+
return inner_training_loop(
|
9 |
+
File "/home/sanchit/transformers/src/transformers/trainer.py", line 2088, in _inner_training_loop
|
10 |
+
for step, inputs in enumerate(epoch_iterator):
|
11 |
+
File "/home/sanchit/hf/lib/python3.8/site-packages/accelerate/data_loader.py", line 452, in __iter__
|
12 |
+
current_batch = next(dataloader_iter)
|
13 |
+
File "/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 631, in __next__
|
14 |
+
data = self._next_data()
|
15 |
+
File "/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 1329, in _next_data
|
16 |
+
idx, data = self._get_data()
|
17 |
+
File "/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 1285, in _get_data
|
18 |
+
success, data = self._try_get_data()
|
19 |
+
File "/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 1133, in _try_get_data
|
20 |
+
data = self._data_queue.get(timeout=timeout)
|
21 |
+
File "/usr/lib/python3.8/queue.py", line 179, in get
|
22 |
+
self.not_empty.wait(remaining)
|
23 |
+
File "/usr/lib/python3.8/threading.py", line 306, in wait
|
24 |
+
gotit = waiter.acquire(True, timeout)
|
25 |
+
KeyboardInterrupt
|
wandb/run-20240327_125651-wqmi98ok/files/requirements.txt
ADDED
@@ -0,0 +1,247 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
absl-py==2.1.0
|
2 |
+
accelerate==0.27.2
|
3 |
+
aiohttp==3.9.3
|
4 |
+
aiosignal==1.3.1
|
5 |
+
anyio==4.2.0
|
6 |
+
appdirs==1.4.4
|
7 |
+
argon2-cffi-bindings==21.2.0
|
8 |
+
argon2-cffi==23.1.0
|
9 |
+
arrow==1.3.0
|
10 |
+
asttokens==2.4.1
|
11 |
+
astunparse==1.6.3
|
12 |
+
async-lru==2.0.4
|
13 |
+
async-timeout==4.0.3
|
14 |
+
attrs==23.2.0
|
15 |
+
audioread==3.0.1
|
16 |
+
av==11.0.0
|
17 |
+
babel==2.14.0
|
18 |
+
backcall==0.2.0
|
19 |
+
beautifulsoup4==4.12.3
|
20 |
+
bitsandbytes==0.42.0
|
21 |
+
bleach==6.1.0
|
22 |
+
cached-property==1.5.2
|
23 |
+
cachetools==5.3.2
|
24 |
+
certifi==2024.2.2
|
25 |
+
cffi==1.16.0
|
26 |
+
charset-normalizer==3.3.2
|
27 |
+
chex==0.1.7
|
28 |
+
click==8.1.7
|
29 |
+
coloredlogs==15.0.1
|
30 |
+
comm==0.2.1
|
31 |
+
contourpy==1.1.1
|
32 |
+
ctranslate2==4.1.0
|
33 |
+
cycler==0.12.1
|
34 |
+
datasets==2.18.0
|
35 |
+
debugpy==1.8.0
|
36 |
+
decorator==5.1.1
|
37 |
+
defusedxml==0.7.1
|
38 |
+
dill==0.3.7
|
39 |
+
dm-tree==0.1.8
|
40 |
+
docker-pycreds==0.4.0
|
41 |
+
docstring-parser==0.15
|
42 |
+
einops==0.7.0
|
43 |
+
etils==1.3.0
|
44 |
+
evaluate==0.4.1
|
45 |
+
exceptiongroup==1.2.0
|
46 |
+
executing==2.0.1
|
47 |
+
faster-whisper==1.0.1
|
48 |
+
fastjsonschema==2.19.1
|
49 |
+
filelock==3.13.1
|
50 |
+
flash-attn==2.5.3
|
51 |
+
flatbuffers==23.5.26
|
52 |
+
flax==0.7.2
|
53 |
+
fonttools==4.48.1
|
54 |
+
fqdn==1.5.1
|
55 |
+
frozenlist==1.4.1
|
56 |
+
fsspec==2024.2.0
|
57 |
+
gast==0.4.0
|
58 |
+
gitdb==4.0.11
|
59 |
+
gitpython==3.1.41
|
60 |
+
google-auth-oauthlib==1.0.0
|
61 |
+
google-auth==2.27.0
|
62 |
+
google-pasta==0.2.0
|
63 |
+
grpcio==1.60.1
|
64 |
+
h11==0.14.0
|
65 |
+
h5py==3.10.0
|
66 |
+
httpcore==1.0.2
|
67 |
+
httpx==0.26.0
|
68 |
+
huggingface-hub==0.21.4
|
69 |
+
humanfriendly==10.0
|
70 |
+
idna==3.6
|
71 |
+
importlib-metadata==7.0.1
|
72 |
+
importlib-resources==6.1.1
|
73 |
+
iniconfig==2.0.0
|
74 |
+
ipdb==0.13.13
|
75 |
+
ipykernel==6.29.2
|
76 |
+
ipython==8.12.3
|
77 |
+
isoduration==20.11.0
|
78 |
+
jax==0.4.13
|
79 |
+
jaxlib==0.4.13
|
80 |
+
jedi==0.19.1
|
81 |
+
jinja2==3.1.2
|
82 |
+
jiwer==3.0.3
|
83 |
+
joblib==1.3.2
|
84 |
+
json5==0.9.14
|
85 |
+
jsonpointer==2.4
|
86 |
+
jsonschema-specifications==2023.12.1
|
87 |
+
jsonschema==4.21.1
|
88 |
+
jupyter-client==8.6.0
|
89 |
+
jupyter-core==5.7.1
|
90 |
+
jupyter-events==0.9.0
|
91 |
+
jupyter-lsp==2.2.2
|
92 |
+
jupyter-server-terminals==0.5.2
|
93 |
+
jupyter-server==2.12.5
|
94 |
+
jupyterlab-pygments==0.3.0
|
95 |
+
jupyterlab-server==2.25.2
|
96 |
+
jupyterlab==4.1.0
|
97 |
+
keras==2.13.1
|
98 |
+
kiwisolver==1.4.5
|
99 |
+
lazy-loader==0.3
|
100 |
+
libclang==16.0.6
|
101 |
+
librosa==0.10.1
|
102 |
+
llvmlite==0.41.1
|
103 |
+
markdown-it-py==3.0.0
|
104 |
+
markdown==3.5.2
|
105 |
+
markupsafe==2.1.3
|
106 |
+
matplotlib-inline==0.1.6
|
107 |
+
matplotlib==3.7.4
|
108 |
+
mdurl==0.1.2
|
109 |
+
mistune==3.0.2
|
110 |
+
ml-dtypes==0.2.0
|
111 |
+
more-itertools==10.2.0
|
112 |
+
mpmath==1.2.1
|
113 |
+
msclap==1.3.3
|
114 |
+
msgpack==1.0.7
|
115 |
+
multidict==6.0.5
|
116 |
+
multiprocess==0.70.15
|
117 |
+
nbclient==0.9.0
|
118 |
+
nbconvert==7.16.0
|
119 |
+
nbformat==5.9.2
|
120 |
+
nest-asyncio==1.6.0
|
121 |
+
networkx==3.0rc1
|
122 |
+
ninja==1.11.1.1
|
123 |
+
notebook-shim==0.2.3
|
124 |
+
numba==0.58.1
|
125 |
+
numpy==1.24.3
|
126 |
+
nvidia-cublas-cu12==12.1.3.1
|
127 |
+
nvidia-cuda-cupti-cu12==12.1.105
|
128 |
+
nvidia-cuda-nvrtc-cu12==12.1.105
|
129 |
+
nvidia-cuda-runtime-cu12==12.1.105
|
130 |
+
nvidia-cudnn-cu12==8.9.2.26
|
131 |
+
nvidia-cufft-cu12==11.0.2.54
|
132 |
+
nvidia-curand-cu12==10.3.2.106
|
133 |
+
nvidia-cusolver-cu12==11.4.5.107
|
134 |
+
nvidia-cusparse-cu12==12.1.0.106
|
135 |
+
nvidia-nccl-cu12==2.19.3
|
136 |
+
nvidia-nvjitlink-cu12==12.1.105
|
137 |
+
nvidia-nvtx-cu12==12.1.105
|
138 |
+
oauthlib==3.2.2
|
139 |
+
onnxruntime==1.17.1
|
140 |
+
openai-whisper==20231117
|
141 |
+
opt-einsum==3.3.0
|
142 |
+
optax==0.1.8
|
143 |
+
orbax-checkpoint==0.2.3
|
144 |
+
overrides==7.7.0
|
145 |
+
packaging==23.2
|
146 |
+
pandas==2.0.3
|
147 |
+
pandocfilters==1.5.1
|
148 |
+
parameterized==0.9.0
|
149 |
+
parso==0.8.3
|
150 |
+
peft==0.8.2
|
151 |
+
pexpect==4.9.0
|
152 |
+
pickleshare==0.7.5
|
153 |
+
pillow==9.3.0
|
154 |
+
pip==24.0
|
155 |
+
pkg-resources==0.0.0
|
156 |
+
pkgutil-resolve-name==1.3.10
|
157 |
+
platformdirs==4.2.0
|
158 |
+
pluggy==1.4.0
|
159 |
+
pooch==1.8.0
|
160 |
+
prometheus-client==0.19.0
|
161 |
+
prompt-toolkit==3.0.43
|
162 |
+
protobuf==4.25.2
|
163 |
+
psutil==5.9.8
|
164 |
+
ptyprocess==0.7.0
|
165 |
+
pure-eval==0.2.2
|
166 |
+
pyarrow-hotfix==0.6
|
167 |
+
pyarrow==15.0.0
|
168 |
+
pyasn1-modules==0.3.0
|
169 |
+
pyasn1==0.5.1
|
170 |
+
pycparser==2.21
|
171 |
+
pygments==2.17.2
|
172 |
+
pyparsing==3.1.1
|
173 |
+
pytest==7.4.4
|
174 |
+
python-dateutil==2.8.2
|
175 |
+
python-json-logger==2.0.7
|
176 |
+
pytorch-triton==3.0.0+901819d2b6
|
177 |
+
pytz==2024.1
|
178 |
+
pyyaml==6.0.1
|
179 |
+
pyzmq==25.1.2
|
180 |
+
rapidfuzz==3.6.1
|
181 |
+
referencing==0.33.0
|
182 |
+
regex==2023.12.25
|
183 |
+
requests-oauthlib==1.3.1
|
184 |
+
requests==2.31.0
|
185 |
+
responses==0.18.0
|
186 |
+
rfc3339-validator==0.1.4
|
187 |
+
rfc3986-validator==0.1.1
|
188 |
+
rich==13.7.0
|
189 |
+
rpds-py==0.17.1
|
190 |
+
rsa==4.9
|
191 |
+
safetensors==0.4.2
|
192 |
+
scikit-learn==1.3.2
|
193 |
+
scipy==1.10.1
|
194 |
+
send2trash==1.8.2
|
195 |
+
sentry-sdk==1.40.0
|
196 |
+
setproctitle==1.3.3
|
197 |
+
setuptools==44.0.0
|
198 |
+
shtab==1.7.0
|
199 |
+
six==1.16.0
|
200 |
+
smmap==5.0.1
|
201 |
+
sniffio==1.3.0
|
202 |
+
soundfile==0.12.1
|
203 |
+
soupsieve==2.5
|
204 |
+
soxr==0.3.7
|
205 |
+
stack-data==0.6.3
|
206 |
+
sympy==1.11.1
|
207 |
+
tensorboard-data-server==0.7.2
|
208 |
+
tensorboard==2.13.0
|
209 |
+
tensorflow-cpu==2.13.1
|
210 |
+
tensorflow-estimator==2.13.0
|
211 |
+
tensorflow-io-gcs-filesystem==0.34.0
|
212 |
+
tensorstore==0.1.45
|
213 |
+
termcolor==2.4.0
|
214 |
+
terminado==0.18.0
|
215 |
+
threadpoolctl==3.2.0
|
216 |
+
tiktoken==0.6.0
|
217 |
+
tinycss2==1.2.1
|
218 |
+
tokenizers==0.15.1
|
219 |
+
tomli==2.0.1
|
220 |
+
toolz==0.12.1
|
221 |
+
torch==2.2.1
|
222 |
+
torchaudio==2.2.1
|
223 |
+
torchlibrosa==0.1.0
|
224 |
+
torchvision==0.17.1
|
225 |
+
tornado==6.4
|
226 |
+
tqdm==4.66.1
|
227 |
+
traitlets==5.14.1
|
228 |
+
transformers==4.39.0.dev0
|
229 |
+
triton==2.2.0
|
230 |
+
trl==0.7.11
|
231 |
+
types-python-dateutil==2.8.19.20240106
|
232 |
+
typing-extensions==4.9.0
|
233 |
+
tyro==0.7.3
|
234 |
+
tzdata==2023.4
|
235 |
+
uri-template==1.3.0
|
236 |
+
urllib3==2.2.0
|
237 |
+
wandb==0.16.2
|
238 |
+
wcwidth==0.2.13
|
239 |
+
webcolors==1.13
|
240 |
+
webencodings==0.5.1
|
241 |
+
websocket-client==1.7.0
|
242 |
+
werkzeug==3.0.1
|
243 |
+
wheel==0.42.0
|
244 |
+
wrapt==1.16.0
|
245 |
+
xxhash==3.4.1
|
246 |
+
yarl==1.9.4
|
247 |
+
zipp==3.17.0
|
wandb/run-20240327_125651-wqmi98ok/files/wandb-metadata.json
ADDED
@@ -0,0 +1,738 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"os": "Linux-5.4.0-166-generic-x86_64-with-glibc2.29",
|
3 |
+
"python": "3.8.10",
|
4 |
+
"heartbeatAt": "2024-03-27T11:56:52.112775",
|
5 |
+
"startedAt": "2024-03-27T11:56:51.619117",
|
6 |
+
"docker": null,
|
7 |
+
"cuda": null,
|
8 |
+
"args": [
|
9 |
+
"--model_name_or_path=distil-whisper/distil-large-v3",
|
10 |
+
"--dataset_name=mozilla-foundation/common_voice_16_1",
|
11 |
+
"--dataset_config_name=hi",
|
12 |
+
"--language=hindi",
|
13 |
+
"--train_split_name=train+validation",
|
14 |
+
"--eval_split_name=test",
|
15 |
+
"--max_steps=5000",
|
16 |
+
"--output_dir=./",
|
17 |
+
"--per_device_train_batch_size=128",
|
18 |
+
"--per_device_eval_batch_size=128",
|
19 |
+
"--logging_steps=25",
|
20 |
+
"--learning_rate=1e-4",
|
21 |
+
"--warmup_steps=500",
|
22 |
+
"--evaluation_strategy=steps",
|
23 |
+
"--eval_steps=1000",
|
24 |
+
"--save_strategy=steps",
|
25 |
+
"--save_steps=1000",
|
26 |
+
"--generation_max_length=225",
|
27 |
+
"--preprocessing_num_workers=1",
|
28 |
+
"--dataloader_num_workers=4",
|
29 |
+
"--length_column_name=input_length",
|
30 |
+
"--max_duration_in_seconds=30",
|
31 |
+
"--text_column_name=sentence",
|
32 |
+
"--freeze_feature_encoder=False",
|
33 |
+
"--gradient_checkpointing",
|
34 |
+
"--group_by_length",
|
35 |
+
"--fp16",
|
36 |
+
"--overwrite_output_dir",
|
37 |
+
"--do_train",
|
38 |
+
"--do_eval",
|
39 |
+
"--predict_with_generate",
|
40 |
+
"--use_auth_token",
|
41 |
+
"--push_to_hub"
|
42 |
+
],
|
43 |
+
"state": "running",
|
44 |
+
"program": "run_speech_recognition_seq2seq.py",
|
45 |
+
"codePathLocal": "run_speech_recognition_seq2seq.py",
|
46 |
+
"codePath": "run_speech_recognition_seq2seq.py",
|
47 |
+
"git": {
|
48 |
+
"remote": "https://huggingface.co/sanchit-gandhi/distil-large-v3-hi-ft",
|
49 |
+
"commit": "40c686df113c0e98e7363c1bd523f58d11848fc0"
|
50 |
+
},
|
51 |
+
"email": "sanchit@huggingface.co",
|
52 |
+
"root": "/home/sanchit/distil-large-v3-hi-ft",
|
53 |
+
"host": "hf-dgx-01",
|
54 |
+
"username": "sanchit",
|
55 |
+
"executable": "/home/sanchit/hf/bin/python",
|
56 |
+
"cpu_count": 64,
|
57 |
+
"cpu_count_logical": 128,
|
58 |
+
"cpu_freq": {
|
59 |
+
"current": 2243.00703125,
|
60 |
+
"min": 1500.0,
|
61 |
+
"max": 2250.0
|
62 |
+
},
|
63 |
+
"cpu_freq_per_core": [
|
64 |
+
{
|
65 |
+
"current": 1964.281,
|
66 |
+
"min": 1500.0,
|
67 |
+
"max": 2250.0
|
68 |
+
},
|
69 |
+
{
|
70 |
+
"current": 1689.142,
|
71 |
+
"min": 1500.0,
|
72 |
+
"max": 2250.0
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"current": 3345.39,
|
76 |
+
"min": 1500.0,
|
77 |
+
"max": 2250.0
|
78 |
+
},
|
79 |
+
{
|
80 |
+
"current": 3006.418,
|
81 |
+
"min": 1500.0,
|
82 |
+
"max": 2250.0
|
83 |
+
},
|
84 |
+
{
|
85 |
+
"current": 3380.779,
|
86 |
+
"min": 1500.0,
|
87 |
+
"max": 2250.0
|
88 |
+
},
|
89 |
+
{
|
90 |
+
"current": 1688.238,
|
91 |
+
"min": 1500.0,
|
92 |
+
"max": 2250.0
|
93 |
+
},
|
94 |
+
{
|
95 |
+
"current": 1685.704,
|
96 |
+
"min": 1500.0,
|
97 |
+
"max": 2250.0
|
98 |
+
},
|
99 |
+
{
|
100 |
+
"current": 1687.499,
|
101 |
+
"min": 1500.0,
|
102 |
+
"max": 2250.0
|
103 |
+
},
|
104 |
+
{
|
105 |
+
"current": 3376.761,
|
106 |
+
"min": 1500.0,
|
107 |
+
"max": 2250.0
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"current": 1752.087,
|
111 |
+
"min": 1500.0,
|
112 |
+
"max": 2250.0
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"current": 1739.572,
|
116 |
+
"min": 1500.0,
|
117 |
+
"max": 2250.0
|
118 |
+
},
|
119 |
+
{
|
120 |
+
"current": 1734.478,
|
121 |
+
"min": 1500.0,
|
122 |
+
"max": 2250.0
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"current": 1865.962,
|
126 |
+
"min": 1500.0,
|
127 |
+
"max": 2250.0
|
128 |
+
},
|
129 |
+
{
|
130 |
+
"current": 1672.752,
|
131 |
+
"min": 1500.0,
|
132 |
+
"max": 2250.0
|
133 |
+
},
|
134 |
+
{
|
135 |
+
"current": 1405.724,
|
136 |
+
"min": 1500.0,
|
137 |
+
"max": 2250.0
|
138 |
+
},
|
139 |
+
{
|
140 |
+
"current": 1615.437,
|
141 |
+
"min": 1500.0,
|
142 |
+
"max": 2250.0
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"current": 1794.997,
|
146 |
+
"min": 1500.0,
|
147 |
+
"max": 2250.0
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"current": 1794.034,
|
151 |
+
"min": 1500.0,
|
152 |
+
"max": 2250.0
|
153 |
+
},
|
154 |
+
{
|
155 |
+
"current": 1796.085,
|
156 |
+
"min": 1500.0,
|
157 |
+
"max": 2250.0
|
158 |
+
},
|
159 |
+
{
|
160 |
+
"current": 1795.252,
|
161 |
+
"min": 1500.0,
|
162 |
+
"max": 2250.0
|
163 |
+
},
|
164 |
+
{
|
165 |
+
"current": 1795.636,
|
166 |
+
"min": 1500.0,
|
167 |
+
"max": 2250.0
|
168 |
+
},
|
169 |
+
{
|
170 |
+
"current": 1792.425,
|
171 |
+
"min": 1500.0,
|
172 |
+
"max": 2250.0
|
173 |
+
},
|
174 |
+
{
|
175 |
+
"current": 1796.274,
|
176 |
+
"min": 1500.0,
|
177 |
+
"max": 2250.0
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"current": 1791.019,
|
181 |
+
"min": 1500.0,
|
182 |
+
"max": 2250.0
|
183 |
+
},
|
184 |
+
{
|
185 |
+
"current": 1794.279,
|
186 |
+
"min": 1500.0,
|
187 |
+
"max": 2250.0
|
188 |
+
},
|
189 |
+
{
|
190 |
+
"current": 1794.021,
|
191 |
+
"min": 1500.0,
|
192 |
+
"max": 2250.0
|
193 |
+
},
|
194 |
+
{
|
195 |
+
"current": 1795.141,
|
196 |
+
"min": 1500.0,
|
197 |
+
"max": 2250.0
|
198 |
+
},
|
199 |
+
{
|
200 |
+
"current": 1794.416,
|
201 |
+
"min": 1500.0,
|
202 |
+
"max": 2250.0
|
203 |
+
},
|
204 |
+
{
|
205 |
+
"current": 1794.591,
|
206 |
+
"min": 1500.0,
|
207 |
+
"max": 2250.0
|
208 |
+
},
|
209 |
+
{
|
210 |
+
"current": 2119.227,
|
211 |
+
"min": 1500.0,
|
212 |
+
"max": 2250.0
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"current": 1794.947,
|
216 |
+
"min": 1500.0,
|
217 |
+
"max": 2250.0
|
218 |
+
},
|
219 |
+
{
|
220 |
+
"current": 1791.688,
|
221 |
+
"min": 1500.0,
|
222 |
+
"max": 2250.0
|
223 |
+
},
|
224 |
+
{
|
225 |
+
"current": 1695.926,
|
226 |
+
"min": 1500.0,
|
227 |
+
"max": 2250.0
|
228 |
+
},
|
229 |
+
{
|
230 |
+
"current": 3389.658,
|
231 |
+
"min": 1500.0,
|
232 |
+
"max": 2250.0
|
233 |
+
},
|
234 |
+
{
|
235 |
+
"current": 1695.382,
|
236 |
+
"min": 1500.0,
|
237 |
+
"max": 2250.0
|
238 |
+
},
|
239 |
+
{
|
240 |
+
"current": 2841.566,
|
241 |
+
"min": 1500.0,
|
242 |
+
"max": 2250.0
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"current": 1736.243,
|
246 |
+
"min": 1500.0,
|
247 |
+
"max": 2250.0
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"current": 1735.534,
|
251 |
+
"min": 1500.0,
|
252 |
+
"max": 2250.0
|
253 |
+
},
|
254 |
+
{
|
255 |
+
"current": 2177.597,
|
256 |
+
"min": 1500.0,
|
257 |
+
"max": 2250.0
|
258 |
+
},
|
259 |
+
{
|
260 |
+
"current": 1733.217,
|
261 |
+
"min": 1500.0,
|
262 |
+
"max": 2250.0
|
263 |
+
},
|
264 |
+
{
|
265 |
+
"current": 1693.939,
|
266 |
+
"min": 1500.0,
|
267 |
+
"max": 2250.0
|
268 |
+
},
|
269 |
+
{
|
270 |
+
"current": 2565.181,
|
271 |
+
"min": 1500.0,
|
272 |
+
"max": 2250.0
|
273 |
+
},
|
274 |
+
{
|
275 |
+
"current": 1691.109,
|
276 |
+
"min": 1500.0,
|
277 |
+
"max": 2250.0
|
278 |
+
},
|
279 |
+
{
|
280 |
+
"current": 1694.235,
|
281 |
+
"min": 1500.0,
|
282 |
+
"max": 2250.0
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"current": 1733.402,
|
286 |
+
"min": 1500.0,
|
287 |
+
"max": 2250.0
|
288 |
+
},
|
289 |
+
{
|
290 |
+
"current": 1735.221,
|
291 |
+
"min": 1500.0,
|
292 |
+
"max": 2250.0
|
293 |
+
},
|
294 |
+
{
|
295 |
+
"current": 1738.531,
|
296 |
+
"min": 1500.0,
|
297 |
+
"max": 2250.0
|
298 |
+
},
|
299 |
+
{
|
300 |
+
"current": 1952.321,
|
301 |
+
"min": 1500.0,
|
302 |
+
"max": 2250.0
|
303 |
+
},
|
304 |
+
{
|
305 |
+
"current": 1940.147,
|
306 |
+
"min": 1500.0,
|
307 |
+
"max": 2250.0
|
308 |
+
},
|
309 |
+
{
|
310 |
+
"current": 1739.762,
|
311 |
+
"min": 1500.0,
|
312 |
+
"max": 2250.0
|
313 |
+
},
|
314 |
+
{
|
315 |
+
"current": 1739.459,
|
316 |
+
"min": 1500.0,
|
317 |
+
"max": 2250.0
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"current": 1737.067,
|
321 |
+
"min": 1500.0,
|
322 |
+
"max": 2250.0
|
323 |
+
},
|
324 |
+
{
|
325 |
+
"current": 3380.82,
|
326 |
+
"min": 1500.0,
|
327 |
+
"max": 2250.0
|
328 |
+
},
|
329 |
+
{
|
330 |
+
"current": 3319.789,
|
331 |
+
"min": 1500.0,
|
332 |
+
"max": 2250.0
|
333 |
+
},
|
334 |
+
{
|
335 |
+
"current": 1694.445,
|
336 |
+
"min": 1500.0,
|
337 |
+
"max": 2250.0
|
338 |
+
},
|
339 |
+
{
|
340 |
+
"current": 1693.778,
|
341 |
+
"min": 1500.0,
|
342 |
+
"max": 2250.0
|
343 |
+
},
|
344 |
+
{
|
345 |
+
"current": 2277.268,
|
346 |
+
"min": 1500.0,
|
347 |
+
"max": 2250.0
|
348 |
+
},
|
349 |
+
{
|
350 |
+
"current": 1735.253,
|
351 |
+
"min": 1500.0,
|
352 |
+
"max": 2250.0
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"current": 1726.248,
|
356 |
+
"min": 1500.0,
|
357 |
+
"max": 2250.0
|
358 |
+
},
|
359 |
+
{
|
360 |
+
"current": 1762.84,
|
361 |
+
"min": 1500.0,
|
362 |
+
"max": 2250.0
|
363 |
+
},
|
364 |
+
{
|
365 |
+
"current": 1795.057,
|
366 |
+
"min": 1500.0,
|
367 |
+
"max": 2250.0
|
368 |
+
},
|
369 |
+
{
|
370 |
+
"current": 1794.362,
|
371 |
+
"min": 1500.0,
|
372 |
+
"max": 2250.0
|
373 |
+
},
|
374 |
+
{
|
375 |
+
"current": 1792.493,
|
376 |
+
"min": 1500.0,
|
377 |
+
"max": 2250.0
|
378 |
+
},
|
379 |
+
{
|
380 |
+
"current": 1796.363,
|
381 |
+
"min": 1500.0,
|
382 |
+
"max": 2250.0
|
383 |
+
},
|
384 |
+
{
|
385 |
+
"current": 2225.456,
|
386 |
+
"min": 1500.0,
|
387 |
+
"max": 2250.0
|
388 |
+
},
|
389 |
+
{
|
390 |
+
"current": 1693.574,
|
391 |
+
"min": 1500.0,
|
392 |
+
"max": 2250.0
|
393 |
+
},
|
394 |
+
{
|
395 |
+
"current": 3348.191,
|
396 |
+
"min": 1500.0,
|
397 |
+
"max": 2250.0
|
398 |
+
},
|
399 |
+
{
|
400 |
+
"current": 1693.601,
|
401 |
+
"min": 1500.0,
|
402 |
+
"max": 2250.0
|
403 |
+
},
|
404 |
+
{
|
405 |
+
"current": 3387.356,
|
406 |
+
"min": 1500.0,
|
407 |
+
"max": 2250.0
|
408 |
+
},
|
409 |
+
{
|
410 |
+
"current": 1696.518,
|
411 |
+
"min": 1500.0,
|
412 |
+
"max": 2250.0
|
413 |
+
},
|
414 |
+
{
|
415 |
+
"current": 1692.69,
|
416 |
+
"min": 1500.0,
|
417 |
+
"max": 2250.0
|
418 |
+
},
|
419 |
+
{
|
420 |
+
"current": 1694.38,
|
421 |
+
"min": 1500.0,
|
422 |
+
"max": 2250.0
|
423 |
+
},
|
424 |
+
{
|
425 |
+
"current": 2428.729,
|
426 |
+
"min": 1500.0,
|
427 |
+
"max": 2250.0
|
428 |
+
},
|
429 |
+
{
|
430 |
+
"current": 2289.127,
|
431 |
+
"min": 1500.0,
|
432 |
+
"max": 2250.0
|
433 |
+
},
|
434 |
+
{
|
435 |
+
"current": 1893.633,
|
436 |
+
"min": 1500.0,
|
437 |
+
"max": 2250.0
|
438 |
+
},
|
439 |
+
{
|
440 |
+
"current": 2281.235,
|
441 |
+
"min": 1500.0,
|
442 |
+
"max": 2250.0
|
443 |
+
},
|
444 |
+
{
|
445 |
+
"current": 1614.407,
|
446 |
+
"min": 1500.0,
|
447 |
+
"max": 2250.0
|
448 |
+
},
|
449 |
+
{
|
450 |
+
"current": 2290.923,
|
451 |
+
"min": 1500.0,
|
452 |
+
"max": 2250.0
|
453 |
+
},
|
454 |
+
{
|
455 |
+
"current": 2286.246,
|
456 |
+
"min": 1500.0,
|
457 |
+
"max": 2250.0
|
458 |
+
},
|
459 |
+
{
|
460 |
+
"current": 1821.797,
|
461 |
+
"min": 1500.0,
|
462 |
+
"max": 2250.0
|
463 |
+
},
|
464 |
+
{
|
465 |
+
"current": 2017.471,
|
466 |
+
"min": 1500.0,
|
467 |
+
"max": 2250.0
|
468 |
+
},
|
469 |
+
{
|
470 |
+
"current": 2290.154,
|
471 |
+
"min": 1500.0,
|
472 |
+
"max": 2250.0
|
473 |
+
},
|
474 |
+
{
|
475 |
+
"current": 1823.286,
|
476 |
+
"min": 1500.0,
|
477 |
+
"max": 2250.0
|
478 |
+
},
|
479 |
+
{
|
480 |
+
"current": 2271.526,
|
481 |
+
"min": 1500.0,
|
482 |
+
"max": 2250.0
|
483 |
+
},
|
484 |
+
{
|
485 |
+
"current": 2287.721,
|
486 |
+
"min": 1500.0,
|
487 |
+
"max": 2250.0
|
488 |
+
},
|
489 |
+
{
|
490 |
+
"current": 2286.719,
|
491 |
+
"min": 1500.0,
|
492 |
+
"max": 2250.0
|
493 |
+
},
|
494 |
+
{
|
495 |
+
"current": 2922.541,
|
496 |
+
"min": 1500.0,
|
497 |
+
"max": 2250.0
|
498 |
+
},
|
499 |
+
{
|
500 |
+
"current": 2281.65,
|
501 |
+
"min": 1500.0,
|
502 |
+
"max": 2250.0
|
503 |
+
},
|
504 |
+
{
|
505 |
+
"current": 3350.967,
|
506 |
+
"min": 1500.0,
|
507 |
+
"max": 2250.0
|
508 |
+
},
|
509 |
+
{
|
510 |
+
"current": 3342.277,
|
511 |
+
"min": 1500.0,
|
512 |
+
"max": 2250.0
|
513 |
+
},
|
514 |
+
{
|
515 |
+
"current": 3349.98,
|
516 |
+
"min": 1500.0,
|
517 |
+
"max": 2250.0
|
518 |
+
},
|
519 |
+
{
|
520 |
+
"current": 3347.133,
|
521 |
+
"min": 1500.0,
|
522 |
+
"max": 2250.0
|
523 |
+
},
|
524 |
+
{
|
525 |
+
"current": 3356.231,
|
526 |
+
"min": 1500.0,
|
527 |
+
"max": 2250.0
|
528 |
+
},
|
529 |
+
{
|
530 |
+
"current": 3342.335,
|
531 |
+
"min": 1500.0,
|
532 |
+
"max": 2250.0
|
533 |
+
},
|
534 |
+
{
|
535 |
+
"current": 3350.302,
|
536 |
+
"min": 1500.0,
|
537 |
+
"max": 2250.0
|
538 |
+
},
|
539 |
+
{
|
540 |
+
"current": 3357.191,
|
541 |
+
"min": 1500.0,
|
542 |
+
"max": 2250.0
|
543 |
+
},
|
544 |
+
{
|
545 |
+
"current": 3351.921,
|
546 |
+
"min": 1500.0,
|
547 |
+
"max": 2250.0
|
548 |
+
},
|
549 |
+
{
|
550 |
+
"current": 3240.956,
|
551 |
+
"min": 1500.0,
|
552 |
+
"max": 2250.0
|
553 |
+
},
|
554 |
+
{
|
555 |
+
"current": 3342.465,
|
556 |
+
"min": 1500.0,
|
557 |
+
"max": 2250.0
|
558 |
+
},
|
559 |
+
{
|
560 |
+
"current": 3348.204,
|
561 |
+
"min": 1500.0,
|
562 |
+
"max": 2250.0
|
563 |
+
},
|
564 |
+
{
|
565 |
+
"current": 1918.245,
|
566 |
+
"min": 1500.0,
|
567 |
+
"max": 2250.0
|
568 |
+
},
|
569 |
+
{
|
570 |
+
"current": 2365.609,
|
571 |
+
"min": 1500.0,
|
572 |
+
"max": 2250.0
|
573 |
+
},
|
574 |
+
{
|
575 |
+
"current": 3343.816,
|
576 |
+
"min": 1500.0,
|
577 |
+
"max": 2250.0
|
578 |
+
},
|
579 |
+
{
|
580 |
+
"current": 2377.081,
|
581 |
+
"min": 1500.0,
|
582 |
+
"max": 2250.0
|
583 |
+
},
|
584 |
+
{
|
585 |
+
"current": 3356.969,
|
586 |
+
"min": 1500.0,
|
587 |
+
"max": 2250.0
|
588 |
+
},
|
589 |
+
{
|
590 |
+
"current": 3344.931,
|
591 |
+
"min": 1500.0,
|
592 |
+
"max": 2250.0
|
593 |
+
},
|
594 |
+
{
|
595 |
+
"current": 2361.682,
|
596 |
+
"min": 1500.0,
|
597 |
+
"max": 2250.0
|
598 |
+
},
|
599 |
+
{
|
600 |
+
"current": 2370.457,
|
601 |
+
"min": 1500.0,
|
602 |
+
"max": 2250.0
|
603 |
+
},
|
604 |
+
{
|
605 |
+
"current": 2362.206,
|
606 |
+
"min": 1500.0,
|
607 |
+
"max": 2250.0
|
608 |
+
},
|
609 |
+
{
|
610 |
+
"current": 2376.413,
|
611 |
+
"min": 1500.0,
|
612 |
+
"max": 2250.0
|
613 |
+
},
|
614 |
+
{
|
615 |
+
"current": 2394.486,
|
616 |
+
"min": 1500.0,
|
617 |
+
"max": 2250.0
|
618 |
+
},
|
619 |
+
{
|
620 |
+
"current": 3344.484,
|
621 |
+
"min": 1500.0,
|
622 |
+
"max": 2250.0
|
623 |
+
},
|
624 |
+
{
|
625 |
+
"current": 3345.586,
|
626 |
+
"min": 1500.0,
|
627 |
+
"max": 2250.0
|
628 |
+
},
|
629 |
+
{
|
630 |
+
"current": 2206.515,
|
631 |
+
"min": 1500.0,
|
632 |
+
"max": 2250.0
|
633 |
+
},
|
634 |
+
{
|
635 |
+
"current": 1677.619,
|
636 |
+
"min": 1500.0,
|
637 |
+
"max": 2250.0
|
638 |
+
},
|
639 |
+
{
|
640 |
+
"current": 1674.904,
|
641 |
+
"min": 1500.0,
|
642 |
+
"max": 2250.0
|
643 |
+
},
|
644 |
+
{
|
645 |
+
"current": 3357.382,
|
646 |
+
"min": 1500.0,
|
647 |
+
"max": 2250.0
|
648 |
+
},
|
649 |
+
{
|
650 |
+
"current": 3340.888,
|
651 |
+
"min": 1500.0,
|
652 |
+
"max": 2250.0
|
653 |
+
},
|
654 |
+
{
|
655 |
+
"current": 1701.861,
|
656 |
+
"min": 1500.0,
|
657 |
+
"max": 2250.0
|
658 |
+
},
|
659 |
+
{
|
660 |
+
"current": 1677.818,
|
661 |
+
"min": 1500.0,
|
662 |
+
"max": 2250.0
|
663 |
+
},
|
664 |
+
{
|
665 |
+
"current": 3350.001,
|
666 |
+
"min": 1500.0,
|
667 |
+
"max": 2250.0
|
668 |
+
},
|
669 |
+
{
|
670 |
+
"current": 2190.299,
|
671 |
+
"min": 1500.0,
|
672 |
+
"max": 2250.0
|
673 |
+
},
|
674 |
+
{
|
675 |
+
"current": 1772.227,
|
676 |
+
"min": 1500.0,
|
677 |
+
"max": 2250.0
|
678 |
+
},
|
679 |
+
{
|
680 |
+
"current": 1812.92,
|
681 |
+
"min": 1500.0,
|
682 |
+
"max": 2250.0
|
683 |
+
},
|
684 |
+
{
|
685 |
+
"current": 1819.065,
|
686 |
+
"min": 1500.0,
|
687 |
+
"max": 2250.0
|
688 |
+
},
|
689 |
+
{
|
690 |
+
"current": 1674.597,
|
691 |
+
"min": 1500.0,
|
692 |
+
"max": 2250.0
|
693 |
+
},
|
694 |
+
{
|
695 |
+
"current": 1675.097,
|
696 |
+
"min": 1500.0,
|
697 |
+
"max": 2250.0
|
698 |
+
},
|
699 |
+
{
|
700 |
+
"current": 2180.015,
|
701 |
+
"min": 1500.0,
|
702 |
+
"max": 2250.0
|
703 |
+
}
|
704 |
+
],
|
705 |
+
"disk": {
|
706 |
+
"/": {
|
707 |
+
"total": 1757.8785285949707,
|
708 |
+
"used": 1497.0354919433594
|
709 |
+
}
|
710 |
+
},
|
711 |
+
"gpu": "NVIDIA A100-SXM4-80GB",
|
712 |
+
"gpu_count": 5,
|
713 |
+
"gpu_devices": [
|
714 |
+
{
|
715 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
716 |
+
"memory_total": 85899345920
|
717 |
+
},
|
718 |
+
{
|
719 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
720 |
+
"memory_total": 85899345920
|
721 |
+
},
|
722 |
+
{
|
723 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
724 |
+
"memory_total": 85899345920
|
725 |
+
},
|
726 |
+
{
|
727 |
+
"name": "NVIDIA DGX Display",
|
728 |
+
"memory_total": 4294967296
|
729 |
+
},
|
730 |
+
{
|
731 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
732 |
+
"memory_total": 85899345920
|
733 |
+
}
|
734 |
+
],
|
735 |
+
"memory": {
|
736 |
+
"total": 503.5396919250488
|
737 |
+
}
|
738 |
+
}
|
wandb/run-20240327_125651-wqmi98ok/files/wandb-summary.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_wandb": {"runtime": 28}}
|
wandb/run-20240327_125651-wqmi98ok/logs/debug-internal.log
ADDED
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2024-03-27 12:56:51,624 INFO StreamThr :1387372 [internal.py:wandb_internal():86] W&B internal server running at pid: 1387372, started at: 2024-03-27 12:56:51.623842
|
2 |
+
2024-03-27 12:56:51,626 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: status
|
3 |
+
2024-03-27 12:56:51,629 INFO WriterThread:1387372 [datastore.py:open_for_write():87] open: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/run-wqmi98ok.wandb
|
4 |
+
2024-03-27 12:56:51,629 DEBUG SenderThread:1387372 [sender.py:send():382] send: header
|
5 |
+
2024-03-27 12:56:51,695 DEBUG SenderThread:1387372 [sender.py:send():382] send: run
|
6 |
+
2024-03-27 12:56:52,021 INFO SenderThread:1387372 [dir_watcher.py:__init__():211] watching files in: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files
|
7 |
+
2024-03-27 12:56:52,021 INFO SenderThread:1387372 [sender.py:_start_run_threads():1136] run started: wqmi98ok with start time 1711540611.622937
|
8 |
+
2024-03-27 12:56:52,026 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: check_version
|
9 |
+
2024-03-27 12:56:52,027 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: check_version
|
10 |
+
2024-03-27 12:56:52,060 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: run_start
|
11 |
+
2024-03-27 12:56:52,072 DEBUG HandlerThread:1387372 [system_info.py:__init__():27] System info init
|
12 |
+
2024-03-27 12:56:52,072 DEBUG HandlerThread:1387372 [system_info.py:__init__():42] System info init done
|
13 |
+
2024-03-27 12:56:52,072 INFO HandlerThread:1387372 [system_monitor.py:start():194] Starting system monitor
|
14 |
+
2024-03-27 12:56:52,072 INFO SystemMonitor:1387372 [system_monitor.py:_start():158] Starting system asset monitoring threads
|
15 |
+
2024-03-27 12:56:52,073 INFO HandlerThread:1387372 [system_monitor.py:probe():214] Collecting system info
|
16 |
+
2024-03-27 12:56:52,073 INFO SystemMonitor:1387372 [interfaces.py:start():190] Started cpu monitoring
|
17 |
+
2024-03-27 12:56:52,074 INFO SystemMonitor:1387372 [interfaces.py:start():190] Started disk monitoring
|
18 |
+
2024-03-27 12:56:52,075 INFO SystemMonitor:1387372 [interfaces.py:start():190] Started gpu monitoring
|
19 |
+
2024-03-27 12:56:52,076 INFO SystemMonitor:1387372 [interfaces.py:start():190] Started memory monitoring
|
20 |
+
2024-03-27 12:56:52,079 INFO SystemMonitor:1387372 [interfaces.py:start():190] Started network monitoring
|
21 |
+
2024-03-27 12:56:52,112 DEBUG HandlerThread:1387372 [system_info.py:probe():151] Probing system
|
22 |
+
2024-03-27 12:56:52,119 DEBUG HandlerThread:1387372 [system_info.py:_probe_git():136] Probing git
|
23 |
+
2024-03-27 12:56:52,129 DEBUG HandlerThread:1387372 [system_info.py:_probe_git():144] Probing git done
|
24 |
+
2024-03-27 12:56:52,129 DEBUG HandlerThread:1387372 [system_info.py:probe():199] Probing system done
|
25 |
+
2024-03-27 12:56:52,129 DEBUG HandlerThread:1387372 [system_monitor.py:probe():223] {'os': 'Linux-5.4.0-166-generic-x86_64-with-glibc2.29', 'python': '3.8.10', 'heartbeatAt': '2024-03-27T11:56:52.112775', 'startedAt': '2024-03-27T11:56:51.619117', 'docker': None, 'cuda': None, 'args': ('--model_name_or_path=distil-whisper/distil-large-v3', '--dataset_name=mozilla-foundation/common_voice_16_1', '--dataset_config_name=hi', '--language=hindi', '--train_split_name=train+validation', '--eval_split_name=test', '--max_steps=5000', '--output_dir=./', '--per_device_train_batch_size=128', '--per_device_eval_batch_size=128', '--logging_steps=25', '--learning_rate=1e-4', '--warmup_steps=500', '--evaluation_strategy=steps', '--eval_steps=1000', '--save_strategy=steps', '--save_steps=1000', '--generation_max_length=225', '--preprocessing_num_workers=1', '--dataloader_num_workers=4', '--length_column_name=input_length', '--max_duration_in_seconds=30', '--text_column_name=sentence', '--freeze_feature_encoder=False', '--gradient_checkpointing', '--group_by_length', '--fp16', '--overwrite_output_dir', '--do_train', '--do_eval', '--predict_with_generate', '--use_auth_token', '--push_to_hub'), 'state': 'running', 'program': 'run_speech_recognition_seq2seq.py', 'codePathLocal': 'run_speech_recognition_seq2seq.py', 'codePath': 'run_speech_recognition_seq2seq.py', 'git': {'remote': 'https://huggingface.co/sanchit-gandhi/distil-large-v3-hi-ft', 'commit': '40c686df113c0e98e7363c1bd523f58d11848fc0'}, 'email': 'sanchit@huggingface.co', 'root': '/home/sanchit/distil-large-v3-hi-ft', 'host': 'hf-dgx-01', 'username': 'sanchit', 'executable': '/home/sanchit/hf/bin/python', 'cpu_count': 64, 'cpu_count_logical': 128, 'cpu_freq': {'current': 2243.00703125, 'min': 1500.0, 'max': 2250.0}, 'cpu_freq_per_core': [{'current': 1964.281, 'min': 1500.0, 'max': 2250.0}, {'current': 1689.142, 'min': 1500.0, 'max': 2250.0}, {'current': 3345.39, 'min': 1500.0, 'max': 2250.0}, {'current': 3006.418, 'min': 1500.0, 'max': 2250.0}, {'current': 3380.779, 'min': 1500.0, 'max': 2250.0}, {'current': 1688.238, 'min': 1500.0, 'max': 2250.0}, {'current': 1685.704, 'min': 1500.0, 'max': 2250.0}, {'current': 1687.499, 'min': 1500.0, 'max': 2250.0}, {'current': 3376.761, 'min': 1500.0, 'max': 2250.0}, {'current': 1752.087, 'min': 1500.0, 'max': 2250.0}, {'current': 1739.572, 'min': 1500.0, 'max': 2250.0}, {'current': 1734.478, 'min': 1500.0, 'max': 2250.0}, {'current': 1865.962, 'min': 1500.0, 'max': 2250.0}, {'current': 1672.752, 'min': 1500.0, 'max': 2250.0}, {'current': 1405.724, 'min': 1500.0, 'max': 2250.0}, {'current': 1615.437, 'min': 1500.0, 'max': 2250.0}, {'current': 1794.997, 'min': 1500.0, 'max': 2250.0}, {'current': 1794.034, 'min': 1500.0, 'max': 2250.0}, {'current': 1796.085, 'min': 1500.0, 'max': 2250.0}, {'current': 1795.252, 'min': 1500.0, 'max': 2250.0}, {'current': 1795.636, 'min': 1500.0, 'max': 2250.0}, {'current': 1792.425, 'min': 1500.0, 'max': 2250.0}, {'current': 1796.274, 'min': 1500.0, 'max': 2250.0}, {'current': 1791.019, 'min': 1500.0, 'max': 2250.0}, {'current': 1794.279, 'min': 1500.0, 'max': 2250.0}, {'current': 1794.021, 'min': 1500.0, 'max': 2250.0}, {'current': 1795.141, 'min': 1500.0, 'max': 2250.0}, {'current': 1794.416, 'min': 1500.0, 'max': 2250.0}, {'current': 1794.591, 'min': 1500.0, 'max': 2250.0}, {'current': 2119.227, 'min': 1500.0, 'max': 2250.0}, {'current': 1794.947, 'min': 1500.0, 'max': 2250.0}, {'current': 1791.688, 'min': 1500.0, 'max': 2250.0}, {'current': 1695.926, 'min': 1500.0, 'max': 2250.0}, {'current': 3389.658, 'min': 1500.0, 'max': 2250.0}, {'current': 1695.382, 'min': 1500.0, 'max': 2250.0}, {'current': 2841.566, 'min': 1500.0, 'max': 2250.0}, {'current': 1736.243, 'min': 1500.0, 'max': 2250.0}, {'current': 1735.534, 'min': 1500.0, 'max': 2250.0}, {'current': 2177.597, 'min': 1500.0, 'max': 2250.0}, {'current': 1733.217, 'min': 1500.0, 'max': 2250.0}, {'current': 1693.939, 'min': 1500.0, 'max': 2250.0}, {'current': 2565.181, 'min': 1500.0, 'max': 2250.0}, {'current': 1691.109, 'min': 1500.0, 'max': 2250.0}, {'current': 1694.235, 'min': 1500.0, 'max': 2250.0}, {'current': 1733.402, 'min': 1500.0, 'max': 2250.0}, {'current': 1735.221, 'min': 1500.0, 'max': 2250.0}, {'current': 1738.531, 'min': 1500.0, 'max': 2250.0}, {'current': 1952.321, 'min': 1500.0, 'max': 2250.0}, {'current': 1940.147, 'min': 1500.0, 'max': 2250.0}, {'current': 1739.762, 'min': 1500.0, 'max': 2250.0}, {'current': 1739.459, 'min': 1500.0, 'max': 2250.0}, {'current': 1737.067, 'min': 1500.0, 'max': 2250.0}, {'current': 3380.82, 'min': 1500.0, 'max': 2250.0}, {'current': 3319.789, 'min': 1500.0, 'max': 2250.0}, {'current': 1694.445, 'min': 1500.0, 'max': 2250.0}, {'current': 1693.778, 'min': 1500.0, 'max': 2250.0}, {'current': 2277.268, 'min': 1500.0, 'max': 2250.0}, {'current': 1735.253, 'min': 1500.0, 'max': 2250.0}, {'current': 1726.248, 'min': 1500.0, 'max': 2250.0}, {'current': 1762.84, 'min': 1500.0, 'max': 2250.0}, {'current': 1795.057, 'min': 1500.0, 'max': 2250.0}, {'current': 1794.362, 'min': 1500.0, 'max': 2250.0}, {'current': 1792.493, 'min': 1500.0, 'max': 2250.0}, {'current': 1796.363, 'min': 1500.0, 'max': 2250.0}, {'current': 2225.456, 'min': 1500.0, 'max': 2250.0}, {'current': 1693.574, 'min': 1500.0, 'max': 2250.0}, {'current': 3348.191, 'min': 1500.0, 'max': 2250.0}, {'current': 1693.601, 'min': 1500.0, 'max': 2250.0}, {'current': 3387.356, 'min': 1500.0, 'max': 2250.0}, {'current': 1696.518, 'min': 1500.0, 'max': 2250.0}, {'current': 1692.69, 'min': 1500.0, 'max': 2250.0}, {'current': 1694.38, 'min': 1500.0, 'max': 2250.0}, {'current': 2428.729, 'min': 1500.0, 'max': 2250.0}, {'current': 2289.127, 'min': 1500.0, 'max': 2250.0}, {'current': 1893.633, 'min': 1500.0, 'max': 2250.0}, {'current': 2281.235, 'min': 1500.0, 'max': 2250.0}, {'current': 1614.407, 'min': 1500.0, 'max': 2250.0}, {'current': 2290.923, 'min': 1500.0, 'max': 2250.0}, {'current': 2286.246, 'min': 1500.0, 'max': 2250.0}, {'current': 1821.797, 'min': 1500.0, 'max': 2250.0}, {'current': 2017.471, 'min': 1500.0, 'max': 2250.0}, {'current': 2290.154, 'min': 1500.0, 'max': 2250.0}, {'current': 1823.286, 'min': 1500.0, 'max': 2250.0}, {'current': 2271.526, 'min': 1500.0, 'max': 2250.0}, {'current': 2287.721, 'min': 1500.0, 'max': 2250.0}, {'current': 2286.719, 'min': 1500.0, 'max': 2250.0}, {'current': 2922.541, 'min': 1500.0, 'max': 2250.0}, {'current': 2281.65, 'min': 1500.0, 'max': 2250.0}, {'current': 3350.967, 'min': 1500.0, 'max': 2250.0}, {'current': 3342.277, 'min': 1500.0, 'max': 2250.0}, {'current': 3349.98, 'min': 1500.0, 'max': 2250.0}, {'current': 3347.133, 'min': 1500.0, 'max': 2250.0}, {'current': 3356.231, 'min': 1500.0, 'max': 2250.0}, {'current': 3342.335, 'min': 1500.0, 'max': 2250.0}, {'current': 3350.302, 'min': 1500.0, 'max': 2250.0}, {'current': 3357.191, 'min': 1500.0, 'max': 2250.0}, {'current': 3351.921, 'min': 1500.0, 'max': 2250.0}, {'current': 3240.956, 'min': 1500.0, 'max': 2250.0}, {'current': 3342.465, 'min': 1500.0, 'max': 2250.0}, {'current': 3348.204, 'min': 1500.0, 'max': 2250.0}, {'current': 1918.245, 'min': 1500.0, 'max': 2250.0}, {'current': 2365.609, 'min': 1500.0, 'max': 2250.0}, {'current': 3343.816, 'min': 1500.0, 'max': 2250.0}, {'current': 2377.081, 'min': 1500.0, 'max': 2250.0}, {'current': 3356.969, 'min': 1500.0, 'max': 2250.0}, {'current': 3344.931, 'min': 1500.0, 'max': 2250.0}, {'current': 2361.682, 'min': 1500.0, 'max': 2250.0}, {'current': 2370.457, 'min': 1500.0, 'max': 2250.0}, {'current': 2362.206, 'min': 1500.0, 'max': 2250.0}, {'current': 2376.413, 'min': 1500.0, 'max': 2250.0}, {'current': 2394.486, 'min': 1500.0, 'max': 2250.0}, {'current': 3344.484, 'min': 1500.0, 'max': 2250.0}, {'current': 3345.586, 'min': 1500.0, 'max': 2250.0}, {'current': 2206.515, 'min': 1500.0, 'max': 2250.0}, {'current': 1677.619, 'min': 1500.0, 'max': 2250.0}, {'current': 1674.904, 'min': 1500.0, 'max': 2250.0}, {'current': 3357.382, 'min': 1500.0, 'max': 2250.0}, {'current': 3340.888, 'min': 1500.0, 'max': 2250.0}, {'current': 1701.861, 'min': 1500.0, 'max': 2250.0}, {'current': 1677.818, 'min': 1500.0, 'max': 2250.0}, {'current': 3350.001, 'min': 1500.0, 'max': 2250.0}, {'current': 2190.299, 'min': 1500.0, 'max': 2250.0}, {'current': 1772.227, 'min': 1500.0, 'max': 2250.0}, {'current': 1812.92, 'min': 1500.0, 'max': 2250.0}, {'current': 1819.065, 'min': 1500.0, 'max': 2250.0}, {'current': 1674.597, 'min': 1500.0, 'max': 2250.0}, {'current': 1675.097, 'min': 1500.0, 'max': 2250.0}, {'current': 2180.015, 'min': 1500.0, 'max': 2250.0}], 'disk': {'/': {'total': 1757.8785285949707, 'used': 1497.0354919433594}}, 'gpu': 'NVIDIA A100-SXM4-80GB', 'gpu_count': 5, 'gpu_devices': [{'name': 'NVIDIA A100-SXM4-80GB', 'memory_total': 85899345920}, {'name': 'NVIDIA A100-SXM4-80GB', 'memory_total': 85899345920}, {'name': 'NVIDIA A100-SXM4-80GB', 'memory_total': 85899345920}, {'name': 'NVIDIA DGX Display', 'memory_total': 4294967296}, {'name': 'NVIDIA A100-SXM4-80GB', 'memory_total': 85899345920}], 'memory': {'total': 503.5396919250488}}
|
26 |
+
2024-03-27 12:56:52,129 INFO HandlerThread:1387372 [system_monitor.py:probe():224] Finished collecting system info
|
27 |
+
2024-03-27 12:56:52,129 INFO HandlerThread:1387372 [system_monitor.py:probe():227] Publishing system info
|
28 |
+
2024-03-27 12:56:52,130 INFO HandlerThread:1387372 [system_monitor.py:probe():229] Finished publishing system info
|
29 |
+
2024-03-27 12:56:52,134 DEBUG SenderThread:1387372 [sender.py:send():382] send: files
|
30 |
+
2024-03-27 12:56:52,134 INFO SenderThread:1387372 [sender.py:_save_file():1403] saving file wandb-metadata.json with policy now
|
31 |
+
2024-03-27 12:56:52,139 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: python_packages
|
32 |
+
2024-03-27 12:56:52,139 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: python_packages
|
33 |
+
2024-03-27 12:56:52,140 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: stop_status
|
34 |
+
2024-03-27 12:56:52,140 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: stop_status
|
35 |
+
2024-03-27 12:56:52,143 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
36 |
+
2024-03-27 12:56:52,144 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: internal_messages
|
37 |
+
2024-03-27 12:56:52,153 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
38 |
+
2024-03-27 12:56:52,353 DEBUG SenderThread:1387372 [sender.py:send():382] send: telemetry
|
39 |
+
2024-03-27 12:56:52,354 DEBUG SenderThread:1387372 [sender.py:send():382] send: config
|
40 |
+
2024-03-27 12:56:52,355 DEBUG SenderThread:1387372 [sender.py:send():382] send: metric
|
41 |
+
2024-03-27 12:56:52,355 DEBUG SenderThread:1387372 [sender.py:send():382] send: telemetry
|
42 |
+
2024-03-27 12:56:52,355 DEBUG SenderThread:1387372 [sender.py:send():382] send: metric
|
43 |
+
2024-03-27 12:56:52,355 WARNING SenderThread:1387372 [sender.py:send_metric():1354] Seen metric with glob (shouldn't happen)
|
44 |
+
2024-03-27 12:56:52,355 DEBUG SenderThread:1387372 [sender.py:send():382] send: telemetry
|
45 |
+
2024-03-27 12:56:52,656 INFO wandb-upload_0:1387372 [upload_job.py:push():131] Uploaded file /tmp/tmp_obn2emjwandb/2rxvos1l-wandb-metadata.json
|
46 |
+
2024-03-27 12:56:53,023 INFO Thread-13 :1387372 [dir_watcher.py:_on_file_created():271] file/dir created: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/wandb-metadata.json
|
47 |
+
2024-03-27 12:56:53,023 INFO Thread-13 :1387372 [dir_watcher.py:_on_file_created():271] file/dir created: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/output.log
|
48 |
+
2024-03-27 12:56:53,023 INFO Thread-13 :1387372 [dir_watcher.py:_on_file_created():271] file/dir created: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/requirements.txt
|
49 |
+
2024-03-27 12:56:54,912 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
50 |
+
2024-03-27 12:56:54,930 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
51 |
+
2024-03-27 12:56:55,022 INFO Thread-13 :1387372 [dir_watcher.py:_on_file_modified():288] file/dir modified: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/output.log
|
52 |
+
2024-03-27 12:56:56,950 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
53 |
+
2024-03-27 12:56:56,964 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
54 |
+
2024-03-27 12:56:57,356 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: status_report
|
55 |
+
2024-03-27 12:56:59,508 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
56 |
+
2024-03-27 12:56:59,521 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
57 |
+
2024-03-27 12:57:01,538 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
58 |
+
2024-03-27 12:57:01,549 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
59 |
+
2024-03-27 12:57:02,357 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: status_report
|
60 |
+
2024-03-27 12:57:04,091 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
61 |
+
2024-03-27 12:57:04,101 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
62 |
+
2024-03-27 12:57:06,120 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
63 |
+
2024-03-27 12:57:06,130 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
64 |
+
2024-03-27 12:57:07,140 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: stop_status
|
65 |
+
2024-03-27 12:57:07,141 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: stop_status
|
66 |
+
2024-03-27 12:57:07,141 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: internal_messages
|
67 |
+
2024-03-27 12:57:08,326 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: status_report
|
68 |
+
2024-03-27 12:57:08,686 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
69 |
+
2024-03-27 12:57:08,698 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
70 |
+
2024-03-27 12:57:10,717 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
71 |
+
2024-03-27 12:57:10,727 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
72 |
+
2024-03-27 12:57:13,225 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
73 |
+
2024-03-27 12:57:13,236 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
74 |
+
2024-03-27 12:57:13,327 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: status_report
|
75 |
+
2024-03-27 12:57:15,265 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
76 |
+
2024-03-27 12:57:15,275 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
77 |
+
2024-03-27 12:57:17,746 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
78 |
+
2024-03-27 12:57:17,760 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
79 |
+
2024-03-27 12:57:18,328 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: status_report
|
80 |
+
2024-03-27 12:57:19,779 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
81 |
+
2024-03-27 12:57:19,794 ERROR gpu :1387372 [interfaces.py:monitor():144] Failed to sample metric: Not Supported
|
82 |
+
2024-03-27 12:57:20,371 DEBUG SenderThread:1387372 [sender.py:send():382] send: exit
|
83 |
+
2024-03-27 12:57:20,372 INFO SenderThread:1387372 [sender.py:send_exit():589] handling exit code: 255
|
84 |
+
2024-03-27 12:57:20,372 INFO SenderThread:1387372 [sender.py:send_exit():591] handling runtime: 28
|
85 |
+
2024-03-27 12:57:20,374 INFO SenderThread:1387372 [sender.py:_save_file():1403] saving file wandb-summary.json with policy end
|
86 |
+
2024-03-27 12:57:20,374 INFO SenderThread:1387372 [sender.py:send_exit():597] send defer
|
87 |
+
2024-03-27 12:57:20,374 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
88 |
+
2024-03-27 12:57:20,374 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 0
|
89 |
+
2024-03-27 12:57:20,374 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: defer
|
90 |
+
2024-03-27 12:57:20,374 INFO SenderThread:1387372 [sender.py:send_request_defer():613] handle sender defer: 0
|
91 |
+
2024-03-27 12:57:20,375 INFO SenderThread:1387372 [sender.py:transition_state():617] send defer: 1
|
92 |
+
2024-03-27 12:57:20,375 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
93 |
+
2024-03-27 12:57:20,375 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 1
|
94 |
+
2024-03-27 12:57:20,375 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: defer
|
95 |
+
2024-03-27 12:57:20,375 INFO SenderThread:1387372 [sender.py:send_request_defer():613] handle sender defer: 1
|
96 |
+
2024-03-27 12:57:20,375 INFO SenderThread:1387372 [sender.py:transition_state():617] send defer: 2
|
97 |
+
2024-03-27 12:57:20,375 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
98 |
+
2024-03-27 12:57:20,375 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 2
|
99 |
+
2024-03-27 12:57:20,375 INFO HandlerThread:1387372 [system_monitor.py:finish():203] Stopping system monitor
|
100 |
+
2024-03-27 12:57:20,376 DEBUG SystemMonitor:1387372 [system_monitor.py:_start():172] Starting system metrics aggregation loop
|
101 |
+
2024-03-27 12:57:20,377 DEBUG SystemMonitor:1387372 [system_monitor.py:_start():179] Finished system metrics aggregation loop
|
102 |
+
2024-03-27 12:57:20,377 DEBUG SystemMonitor:1387372 [system_monitor.py:_start():183] Publishing last batch of metrics
|
103 |
+
2024-03-27 12:57:20,379 INFO HandlerThread:1387372 [interfaces.py:finish():202] Joined cpu monitor
|
104 |
+
2024-03-27 12:57:20,379 INFO HandlerThread:1387372 [interfaces.py:finish():202] Joined disk monitor
|
105 |
+
2024-03-27 12:57:21,030 INFO Thread-13 :1387372 [dir_watcher.py:_on_file_created():271] file/dir created: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/wandb-summary.json
|
106 |
+
2024-03-27 12:57:21,316 ERROR gpu :1387372 [interfaces.py:aggregate():161] Failed to serialize metric: division by zero
|
107 |
+
2024-03-27 12:57:21,317 INFO HandlerThread:1387372 [interfaces.py:finish():202] Joined gpu monitor
|
108 |
+
2024-03-27 12:57:21,317 INFO HandlerThread:1387372 [interfaces.py:finish():202] Joined memory monitor
|
109 |
+
2024-03-27 12:57:21,317 INFO HandlerThread:1387372 [interfaces.py:finish():202] Joined network monitor
|
110 |
+
2024-03-27 12:57:21,317 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: defer
|
111 |
+
2024-03-27 12:57:21,317 INFO SenderThread:1387372 [sender.py:send_request_defer():613] handle sender defer: 2
|
112 |
+
2024-03-27 12:57:21,317 INFO SenderThread:1387372 [sender.py:transition_state():617] send defer: 3
|
113 |
+
2024-03-27 12:57:21,317 DEBUG SenderThread:1387372 [sender.py:send():382] send: stats
|
114 |
+
2024-03-27 12:57:21,318 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
115 |
+
2024-03-27 12:57:21,318 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 3
|
116 |
+
2024-03-27 12:57:21,318 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: defer
|
117 |
+
2024-03-27 12:57:21,318 INFO SenderThread:1387372 [sender.py:send_request_defer():613] handle sender defer: 3
|
118 |
+
2024-03-27 12:57:21,318 INFO SenderThread:1387372 [sender.py:transition_state():617] send defer: 4
|
119 |
+
2024-03-27 12:57:21,319 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
120 |
+
2024-03-27 12:57:21,319 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 4
|
121 |
+
2024-03-27 12:57:21,319 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: defer
|
122 |
+
2024-03-27 12:57:21,319 INFO SenderThread:1387372 [sender.py:send_request_defer():613] handle sender defer: 4
|
123 |
+
2024-03-27 12:57:21,319 INFO SenderThread:1387372 [sender.py:transition_state():617] send defer: 5
|
124 |
+
2024-03-27 12:57:21,319 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
125 |
+
2024-03-27 12:57:21,319 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 5
|
126 |
+
2024-03-27 12:57:21,319 DEBUG SenderThread:1387372 [sender.py:send():382] send: summary
|
127 |
+
2024-03-27 12:57:21,320 INFO SenderThread:1387372 [sender.py:_save_file():1403] saving file wandb-summary.json with policy end
|
128 |
+
2024-03-27 12:57:21,320 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: defer
|
129 |
+
2024-03-27 12:57:21,320 INFO SenderThread:1387372 [sender.py:send_request_defer():613] handle sender defer: 5
|
130 |
+
2024-03-27 12:57:21,320 INFO SenderThread:1387372 [sender.py:transition_state():617] send defer: 6
|
131 |
+
2024-03-27 12:57:21,320 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
132 |
+
2024-03-27 12:57:21,320 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 6
|
133 |
+
2024-03-27 12:57:21,320 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: defer
|
134 |
+
2024-03-27 12:57:21,320 INFO SenderThread:1387372 [sender.py:send_request_defer():613] handle sender defer: 6
|
135 |
+
2024-03-27 12:57:21,324 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: status_report
|
136 |
+
2024-03-27 12:57:21,372 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: poll_exit
|
137 |
+
2024-03-27 12:57:21,500 INFO SenderThread:1387372 [sender.py:transition_state():617] send defer: 7
|
138 |
+
2024-03-27 12:57:21,500 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: poll_exit
|
139 |
+
2024-03-27 12:57:21,500 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
140 |
+
2024-03-27 12:57:21,500 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 7
|
141 |
+
2024-03-27 12:57:21,501 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: defer
|
142 |
+
2024-03-27 12:57:21,501 INFO SenderThread:1387372 [sender.py:send_request_defer():613] handle sender defer: 7
|
143 |
+
2024-03-27 12:57:22,030 INFO Thread-13 :1387372 [dir_watcher.py:_on_file_modified():288] file/dir modified: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/config.yaml
|
144 |
+
2024-03-27 12:57:22,030 INFO Thread-13 :1387372 [dir_watcher.py:_on_file_modified():288] file/dir modified: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/wandb-summary.json
|
145 |
+
2024-03-27 12:57:22,373 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: poll_exit
|
146 |
+
2024-03-27 12:57:22,396 INFO SenderThread:1387372 [sender.py:transition_state():617] send defer: 8
|
147 |
+
2024-03-27 12:57:22,397 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: poll_exit
|
148 |
+
2024-03-27 12:57:22,397 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
149 |
+
2024-03-27 12:57:22,397 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 8
|
150 |
+
2024-03-27 12:57:22,397 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: defer
|
151 |
+
2024-03-27 12:57:22,397 INFO SenderThread:1387372 [sender.py:send_request_defer():613] handle sender defer: 8
|
152 |
+
2024-03-27 12:57:22,397 INFO SenderThread:1387372 [job_builder.py:build():296] Attempting to build job artifact
|
153 |
+
2024-03-27 12:57:22,398 INFO SenderThread:1387372 [job_builder.py:_get_source_type():426] is repo sourced job
|
154 |
+
2024-03-27 12:57:22,419 INFO SenderThread:1387372 [job_builder.py:build():402] adding wandb-job metadata file
|
155 |
+
2024-03-27 12:57:22,422 INFO SenderThread:1387372 [sender.py:transition_state():617] send defer: 9
|
156 |
+
2024-03-27 12:57:22,423 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
157 |
+
2024-03-27 12:57:22,423 DEBUG SenderThread:1387372 [sender.py:send():382] send: artifact
|
158 |
+
2024-03-27 12:57:22,423 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 9
|
159 |
+
2024-03-27 12:57:23,031 INFO Thread-13 :1387372 [dir_watcher.py:_on_file_modified():288] file/dir modified: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/output.log
|
160 |
+
2024-03-27 12:57:23,374 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: poll_exit
|
161 |
+
2024-03-27 12:57:23,467 INFO wandb-upload_0:1387372 [upload_job.py:push():89] Uploaded file /home/sanchit/.local/share/wandb/artifacts/staging/tmpwv2oidgf
|
162 |
+
2024-03-27 12:57:23,516 INFO wandb-upload_1:1387372 [upload_job.py:push():89] Uploaded file /home/sanchit/.local/share/wandb/artifacts/staging/tmpxlb7y42p
|
163 |
+
2024-03-27 12:57:24,407 INFO SenderThread:1387372 [sender.py:send_artifact():1494] sent artifact job-https___huggingface.co_sanchit-gandhi_distil-large-v3-hi-ft_run_speech_recognition_seq2seq.py - {'id': 'QXJ0aWZhY3Q6NzcyODMxMDMw', 'state': 'PENDING', 'artifactSequence': {'id': 'QXJ0aWZhY3RDb2xsZWN0aW9uOjE1MzEyNTQyNg==', 'latestArtifact': None}}
|
164 |
+
2024-03-27 12:57:24,407 DEBUG SenderThread:1387372 [sender.py:send_request():409] send_request: defer
|
165 |
+
2024-03-27 12:57:24,407 INFO SenderThread:1387372 [sender.py:send_request_defer():613] handle sender defer: 9
|
166 |
+
2024-03-27 12:57:24,407 INFO SenderThread:1387372 [dir_watcher.py:finish():358] shutting down directory watcher
|
167 |
+
2024-03-27 12:57:24,664 WARNING StreamThr :1387372 [internal.py:is_dead():414] Internal process exiting, parent pid 1386946 disappeared
|
168 |
+
2024-03-27 12:57:24,664 ERROR StreamThr :1387372 [internal.py:wandb_internal():152] Internal process shutdown.
|
169 |
+
2024-03-27 12:57:25,031 INFO SenderThread:1387372 [dir_watcher.py:finish():388] scan: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files
|
170 |
+
2024-03-27 12:57:25,032 INFO SenderThread:1387372 [dir_watcher.py:finish():402] scan save: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/requirements.txt requirements.txt
|
171 |
+
2024-03-27 12:57:25,032 INFO SenderThread:1387372 [dir_watcher.py:finish():402] scan save: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/wandb-metadata.json wandb-metadata.json
|
172 |
+
2024-03-27 12:57:25,032 INFO SenderThread:1387372 [dir_watcher.py:finish():402] scan save: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/wandb-summary.json wandb-summary.json
|
173 |
+
2024-03-27 12:57:25,032 INFO SenderThread:1387372 [dir_watcher.py:finish():402] scan save: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/output.log output.log
|
174 |
+
2024-03-27 12:57:25,032 INFO SenderThread:1387372 [dir_watcher.py:finish():402] scan save: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/config.yaml config.yaml
|
175 |
+
2024-03-27 12:57:25,033 INFO SenderThread:1387372 [sender.py:transition_state():617] send defer: 10
|
176 |
+
2024-03-27 12:57:25,033 INFO SenderThread:1387372 [sender.py:finish():1572] shutting down sender
|
177 |
+
2024-03-27 12:57:25,033 INFO SenderThread:1387372 [file_pusher.py:finish():175] shutting down file pusher
|
178 |
+
2024-03-27 12:57:25,033 INFO SenderThread:1387372 [file_pusher.py:join():181] waiting for file pusher
|
179 |
+
2024-03-27 12:57:25,035 DEBUG HandlerThread:1387372 [handler.py:handle_request():146] handle_request: defer
|
180 |
+
2024-03-27 12:57:25,035 INFO HandlerThread:1387372 [handler.py:handle_request_defer():172] handle defer: 10
|
181 |
+
2024-03-27 12:57:25,036 INFO HandlerThread:1387372 [handler.py:finish():869] shutting down handler
|
182 |
+
2024-03-27 12:57:25,038 INFO WriterThread:1387372 [datastore.py:close():296] close: /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/run-wqmi98ok.wandb
|
183 |
+
2024-03-27 12:57:25,369 INFO wandb-upload_1:1387372 [upload_job.py:push():131] Uploaded file /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/wandb-summary.json
|
184 |
+
2024-03-27 12:57:25,401 INFO wandb-upload_0:1387372 [upload_job.py:push():131] Uploaded file /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/requirements.txt
|
185 |
+
2024-03-27 12:57:25,424 INFO wandb-upload_3:1387372 [upload_job.py:push():131] Uploaded file /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/config.yaml
|
186 |
+
2024-03-27 12:57:25,697 INFO wandb-upload_2:1387372 [upload_job.py:push():131] Uploaded file /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/files/output.log
|
187 |
+
2024-03-27 12:57:26,034 INFO SenderThread:1387372 [file_stream.py:finish():595] file stream finish called
|
188 |
+
2024-03-27 12:57:26,199 INFO SenderThread:1387372 [file_stream.py:finish():599] file stream finish is done
|
wandb/run-20240327_125651-wqmi98ok/logs/debug.log
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2024-03-27 12:56:51,620 INFO MainThread:1386946 [wandb_setup.py:_flush():76] Current SDK version is 0.16.2
|
2 |
+
2024-03-27 12:56:51,620 INFO MainThread:1386946 [wandb_setup.py:_flush():76] Configure stats pid to 1386946
|
3 |
+
2024-03-27 12:56:51,620 INFO MainThread:1386946 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/.config/wandb/settings
|
4 |
+
2024-03-27 12:56:51,620 INFO MainThread:1386946 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/distil-large-v3-hi-ft/wandb/settings
|
5 |
+
2024-03-27 12:56:51,620 INFO MainThread:1386946 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
|
6 |
+
2024-03-27 12:56:51,620 INFO MainThread:1386946 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
|
7 |
+
2024-03-27 12:56:51,620 INFO MainThread:1386946 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program_relpath': 'run_speech_recognition_seq2seq.py', 'program_abspath': '/home/sanchit/distil-large-v3-hi-ft/run_speech_recognition_seq2seq.py', 'program': 'run_speech_recognition_seq2seq.py'}
|
8 |
+
2024-03-27 12:56:51,620 INFO MainThread:1386946 [wandb_init.py:_log_setup():526] Logging user logs to /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/logs/debug.log
|
9 |
+
2024-03-27 12:56:51,620 INFO MainThread:1386946 [wandb_init.py:_log_setup():527] Logging internal logs to /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125651-wqmi98ok/logs/debug-internal.log
|
10 |
+
2024-03-27 12:56:51,620 INFO MainThread:1386946 [wandb_init.py:init():566] calling init triggers
|
11 |
+
2024-03-27 12:56:51,621 INFO MainThread:1386946 [wandb_init.py:init():573] wandb.init called with sweep_config: {}
|
12 |
+
config: {}
|
13 |
+
2024-03-27 12:56:51,621 INFO MainThread:1386946 [wandb_init.py:init():616] starting backend
|
14 |
+
2024-03-27 12:56:51,621 INFO MainThread:1386946 [wandb_init.py:init():620] setting up manager
|
15 |
+
2024-03-27 12:56:51,621 INFO MainThread:1386946 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
|
16 |
+
2024-03-27 12:56:51,622 INFO MainThread:1386946 [wandb_init.py:init():628] backend started and connected
|
17 |
+
2024-03-27 12:56:51,626 INFO MainThread:1386946 [wandb_init.py:init():720] updated telemetry
|
18 |
+
2024-03-27 12:56:51,694 INFO MainThread:1386946 [wandb_init.py:init():753] communicating run to backend with 90.0 second timeout
|
19 |
+
2024-03-27 12:56:52,026 INFO MainThread:1386946 [wandb_run.py:_on_init():2254] communicating current version
|
20 |
+
2024-03-27 12:56:52,056 INFO MainThread:1386946 [wandb_run.py:_on_init():2263] got version response upgrade_message: "wandb version 0.16.5 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
|
21 |
+
|
22 |
+
2024-03-27 12:56:52,056 INFO MainThread:1386946 [wandb_init.py:init():804] starting run threads in backend
|
23 |
+
2024-03-27 12:56:52,140 INFO MainThread:1386946 [wandb_run.py:_console_start():2233] atexit reg
|
24 |
+
2024-03-27 12:56:52,140 INFO MainThread:1386946 [wandb_run.py:_redirect():2088] redirect: wrap_raw
|
25 |
+
2024-03-27 12:56:52,140 INFO MainThread:1386946 [wandb_run.py:_redirect():2153] Wrapping output streams.
|
26 |
+
2024-03-27 12:56:52,140 INFO MainThread:1386946 [wandb_run.py:_redirect():2178] Redirects installed.
|
27 |
+
2024-03-27 12:56:52,141 INFO MainThread:1386946 [wandb_init.py:init():847] run started, returning control to user process
|
28 |
+
2024-03-27 12:56:52,142 INFO MainThread:1386946 [wandb_run.py:_config_callback():1342] config_cb None None {'vocab_size': 51866, 'num_mel_bins': 128, 'd_model': 1280, 'encoder_layers': 32, 'encoder_attention_heads': 20, 'decoder_layers': 2, 'decoder_attention_heads': 20, 'decoder_ffn_dim': 5120, 'encoder_ffn_dim': 5120, 'dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.0, 'activation_function': 'gelu', 'init_std': 0.02, 'encoder_layerdrop': 0.0, 'decoder_layerdrop': 0.0, 'use_cache': True, 'num_hidden_layers': 32, 'scale_embedding': False, 'max_source_positions': 1500, 'max_target_positions': 448, 'classifier_proj_size': 256, 'use_weighted_layer_sum': False, 'apply_spec_augment': False, 'mask_time_prob': 0.05, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.0, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'median_filter_width': 7, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': True, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 448, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': [220, 50257], 'architectures': ['WhisperForConditionalGeneration'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 50257, 'pad_token_id': 50256, 'eos_token_id': 50257, 'sep_token_id': None, 'decoder_start_token_id': 50258, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'distil-whisper/distil-large-v3', 'transformers_version': '4.40.0.dev0', 'model_type': 'whisper', 'forced_decoder_ids': None, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 128, 'per_device_eval_batch_size': 128, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 3.0, 'max_steps': 5000, 'lr_scheduler_type': 'linear', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.0, 'warmup_steps': 500, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': './runs/Mar27_12-56-36_hf-dgx-01', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 25, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 1000, 'save_total_limit': None, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': 1000, 'dataloader_num_workers': 4, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'adamw_torch', 'optim_args': None, 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None, 'sortish_sampler': False, 'predict_with_generate': True, 'generation_max_length': 225, 'generation_num_beams': None, 'generation_config': None}
|
wandb/run-20240327_125651-wqmi98ok/run-wqmi98ok.wandb
ADDED
Binary file (15.8 kB). View file
|
|
wandb/run-20240327_125828-wlmqyk6v/files/config.yaml
ADDED
@@ -0,0 +1,731 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
wandb_version: 1
|
2 |
+
|
3 |
+
_wandb:
|
4 |
+
desc: null
|
5 |
+
value:
|
6 |
+
python_version: 3.8.10
|
7 |
+
cli_version: 0.16.2
|
8 |
+
framework: huggingface
|
9 |
+
huggingface_version: 4.40.0.dev0
|
10 |
+
is_jupyter_run: false
|
11 |
+
is_kaggle_kernel: false
|
12 |
+
start_time: 1711540708.740468
|
13 |
+
t:
|
14 |
+
1:
|
15 |
+
- 1
|
16 |
+
- 2
|
17 |
+
- 3
|
18 |
+
- 5
|
19 |
+
- 11
|
20 |
+
- 12
|
21 |
+
- 49
|
22 |
+
- 51
|
23 |
+
- 53
|
24 |
+
- 55
|
25 |
+
- 71
|
26 |
+
- 98
|
27 |
+
- 100
|
28 |
+
2:
|
29 |
+
- 1
|
30 |
+
- 2
|
31 |
+
- 3
|
32 |
+
- 5
|
33 |
+
- 11
|
34 |
+
- 12
|
35 |
+
- 49
|
36 |
+
- 51
|
37 |
+
- 53
|
38 |
+
- 55
|
39 |
+
- 71
|
40 |
+
- 98
|
41 |
+
- 100
|
42 |
+
3:
|
43 |
+
- 7
|
44 |
+
- 23
|
45 |
+
4: 3.8.10
|
46 |
+
5: 0.16.2
|
47 |
+
6: 4.40.0.dev0
|
48 |
+
8:
|
49 |
+
- 5
|
50 |
+
9:
|
51 |
+
1: transformers_trainer
|
52 |
+
13: linux-x86_64
|
53 |
+
m:
|
54 |
+
- 1: train/global_step
|
55 |
+
6:
|
56 |
+
- 3
|
57 |
+
- 1: train/loss
|
58 |
+
5: 1
|
59 |
+
6:
|
60 |
+
- 1
|
61 |
+
- 1: train/grad_norm
|
62 |
+
5: 1
|
63 |
+
6:
|
64 |
+
- 1
|
65 |
+
- 1: train/learning_rate
|
66 |
+
5: 1
|
67 |
+
6:
|
68 |
+
- 1
|
69 |
+
- 1: train/epoch
|
70 |
+
5: 1
|
71 |
+
6:
|
72 |
+
- 1
|
73 |
+
vocab_size:
|
74 |
+
desc: null
|
75 |
+
value: 51866
|
76 |
+
num_mel_bins:
|
77 |
+
desc: null
|
78 |
+
value: 128
|
79 |
+
d_model:
|
80 |
+
desc: null
|
81 |
+
value: 1280
|
82 |
+
encoder_layers:
|
83 |
+
desc: null
|
84 |
+
value: 32
|
85 |
+
encoder_attention_heads:
|
86 |
+
desc: null
|
87 |
+
value: 20
|
88 |
+
decoder_layers:
|
89 |
+
desc: null
|
90 |
+
value: 2
|
91 |
+
decoder_attention_heads:
|
92 |
+
desc: null
|
93 |
+
value: 20
|
94 |
+
decoder_ffn_dim:
|
95 |
+
desc: null
|
96 |
+
value: 5120
|
97 |
+
encoder_ffn_dim:
|
98 |
+
desc: null
|
99 |
+
value: 5120
|
100 |
+
dropout:
|
101 |
+
desc: null
|
102 |
+
value: 0.0
|
103 |
+
attention_dropout:
|
104 |
+
desc: null
|
105 |
+
value: 0.0
|
106 |
+
activation_dropout:
|
107 |
+
desc: null
|
108 |
+
value: 0.0
|
109 |
+
activation_function:
|
110 |
+
desc: null
|
111 |
+
value: gelu
|
112 |
+
init_std:
|
113 |
+
desc: null
|
114 |
+
value: 0.02
|
115 |
+
encoder_layerdrop:
|
116 |
+
desc: null
|
117 |
+
value: 0.0
|
118 |
+
decoder_layerdrop:
|
119 |
+
desc: null
|
120 |
+
value: 0.0
|
121 |
+
use_cache:
|
122 |
+
desc: null
|
123 |
+
value: true
|
124 |
+
num_hidden_layers:
|
125 |
+
desc: null
|
126 |
+
value: 32
|
127 |
+
scale_embedding:
|
128 |
+
desc: null
|
129 |
+
value: false
|
130 |
+
max_source_positions:
|
131 |
+
desc: null
|
132 |
+
value: 1500
|
133 |
+
max_target_positions:
|
134 |
+
desc: null
|
135 |
+
value: 448
|
136 |
+
classifier_proj_size:
|
137 |
+
desc: null
|
138 |
+
value: 256
|
139 |
+
use_weighted_layer_sum:
|
140 |
+
desc: null
|
141 |
+
value: false
|
142 |
+
apply_spec_augment:
|
143 |
+
desc: null
|
144 |
+
value: false
|
145 |
+
mask_time_prob:
|
146 |
+
desc: null
|
147 |
+
value: 0.05
|
148 |
+
mask_time_length:
|
149 |
+
desc: null
|
150 |
+
value: 10
|
151 |
+
mask_time_min_masks:
|
152 |
+
desc: null
|
153 |
+
value: 2
|
154 |
+
mask_feature_prob:
|
155 |
+
desc: null
|
156 |
+
value: 0.0
|
157 |
+
mask_feature_length:
|
158 |
+
desc: null
|
159 |
+
value: 10
|
160 |
+
mask_feature_min_masks:
|
161 |
+
desc: null
|
162 |
+
value: 0
|
163 |
+
median_filter_width:
|
164 |
+
desc: null
|
165 |
+
value: 7
|
166 |
+
return_dict:
|
167 |
+
desc: null
|
168 |
+
value: true
|
169 |
+
output_hidden_states:
|
170 |
+
desc: null
|
171 |
+
value: false
|
172 |
+
output_attentions:
|
173 |
+
desc: null
|
174 |
+
value: false
|
175 |
+
torchscript:
|
176 |
+
desc: null
|
177 |
+
value: false
|
178 |
+
torch_dtype:
|
179 |
+
desc: null
|
180 |
+
value: float16
|
181 |
+
use_bfloat16:
|
182 |
+
desc: null
|
183 |
+
value: false
|
184 |
+
tf_legacy_loss:
|
185 |
+
desc: null
|
186 |
+
value: false
|
187 |
+
pruned_heads:
|
188 |
+
desc: null
|
189 |
+
value: {}
|
190 |
+
tie_word_embeddings:
|
191 |
+
desc: null
|
192 |
+
value: true
|
193 |
+
chunk_size_feed_forward:
|
194 |
+
desc: null
|
195 |
+
value: 0
|
196 |
+
is_encoder_decoder:
|
197 |
+
desc: null
|
198 |
+
value: true
|
199 |
+
is_decoder:
|
200 |
+
desc: null
|
201 |
+
value: false
|
202 |
+
cross_attention_hidden_size:
|
203 |
+
desc: null
|
204 |
+
value: null
|
205 |
+
add_cross_attention:
|
206 |
+
desc: null
|
207 |
+
value: false
|
208 |
+
tie_encoder_decoder:
|
209 |
+
desc: null
|
210 |
+
value: false
|
211 |
+
max_length:
|
212 |
+
desc: null
|
213 |
+
value: 448
|
214 |
+
min_length:
|
215 |
+
desc: null
|
216 |
+
value: 0
|
217 |
+
do_sample:
|
218 |
+
desc: null
|
219 |
+
value: false
|
220 |
+
early_stopping:
|
221 |
+
desc: null
|
222 |
+
value: false
|
223 |
+
num_beams:
|
224 |
+
desc: null
|
225 |
+
value: 1
|
226 |
+
num_beam_groups:
|
227 |
+
desc: null
|
228 |
+
value: 1
|
229 |
+
diversity_penalty:
|
230 |
+
desc: null
|
231 |
+
value: 0.0
|
232 |
+
temperature:
|
233 |
+
desc: null
|
234 |
+
value: 1.0
|
235 |
+
top_k:
|
236 |
+
desc: null
|
237 |
+
value: 50
|
238 |
+
top_p:
|
239 |
+
desc: null
|
240 |
+
value: 1.0
|
241 |
+
typical_p:
|
242 |
+
desc: null
|
243 |
+
value: 1.0
|
244 |
+
repetition_penalty:
|
245 |
+
desc: null
|
246 |
+
value: 1.0
|
247 |
+
length_penalty:
|
248 |
+
desc: null
|
249 |
+
value: 1.0
|
250 |
+
no_repeat_ngram_size:
|
251 |
+
desc: null
|
252 |
+
value: 0
|
253 |
+
encoder_no_repeat_ngram_size:
|
254 |
+
desc: null
|
255 |
+
value: 0
|
256 |
+
bad_words_ids:
|
257 |
+
desc: null
|
258 |
+
value: null
|
259 |
+
num_return_sequences:
|
260 |
+
desc: null
|
261 |
+
value: 1
|
262 |
+
output_scores:
|
263 |
+
desc: null
|
264 |
+
value: false
|
265 |
+
return_dict_in_generate:
|
266 |
+
desc: null
|
267 |
+
value: false
|
268 |
+
forced_bos_token_id:
|
269 |
+
desc: null
|
270 |
+
value: null
|
271 |
+
forced_eos_token_id:
|
272 |
+
desc: null
|
273 |
+
value: null
|
274 |
+
remove_invalid_values:
|
275 |
+
desc: null
|
276 |
+
value: false
|
277 |
+
exponential_decay_length_penalty:
|
278 |
+
desc: null
|
279 |
+
value: null
|
280 |
+
suppress_tokens:
|
281 |
+
desc: null
|
282 |
+
value: null
|
283 |
+
begin_suppress_tokens:
|
284 |
+
desc: null
|
285 |
+
value:
|
286 |
+
- 220
|
287 |
+
- 50257
|
288 |
+
architectures:
|
289 |
+
desc: null
|
290 |
+
value:
|
291 |
+
- WhisperForConditionalGeneration
|
292 |
+
finetuning_task:
|
293 |
+
desc: null
|
294 |
+
value: null
|
295 |
+
id2label:
|
296 |
+
desc: null
|
297 |
+
value:
|
298 |
+
'0': LABEL_0
|
299 |
+
'1': LABEL_1
|
300 |
+
label2id:
|
301 |
+
desc: null
|
302 |
+
value:
|
303 |
+
LABEL_0: 0
|
304 |
+
LABEL_1: 1
|
305 |
+
tokenizer_class:
|
306 |
+
desc: null
|
307 |
+
value: null
|
308 |
+
prefix:
|
309 |
+
desc: null
|
310 |
+
value: null
|
311 |
+
bos_token_id:
|
312 |
+
desc: null
|
313 |
+
value: 50257
|
314 |
+
pad_token_id:
|
315 |
+
desc: null
|
316 |
+
value: 50256
|
317 |
+
eos_token_id:
|
318 |
+
desc: null
|
319 |
+
value: 50257
|
320 |
+
sep_token_id:
|
321 |
+
desc: null
|
322 |
+
value: null
|
323 |
+
decoder_start_token_id:
|
324 |
+
desc: null
|
325 |
+
value: 50258
|
326 |
+
task_specific_params:
|
327 |
+
desc: null
|
328 |
+
value: null
|
329 |
+
problem_type:
|
330 |
+
desc: null
|
331 |
+
value: null
|
332 |
+
_name_or_path:
|
333 |
+
desc: null
|
334 |
+
value: distil-whisper/distil-large-v3
|
335 |
+
transformers_version:
|
336 |
+
desc: null
|
337 |
+
value: 4.40.0.dev0
|
338 |
+
model_type:
|
339 |
+
desc: null
|
340 |
+
value: whisper
|
341 |
+
forced_decoder_ids:
|
342 |
+
desc: null
|
343 |
+
value: null
|
344 |
+
output_dir:
|
345 |
+
desc: null
|
346 |
+
value: ./
|
347 |
+
overwrite_output_dir:
|
348 |
+
desc: null
|
349 |
+
value: true
|
350 |
+
do_train:
|
351 |
+
desc: null
|
352 |
+
value: true
|
353 |
+
do_eval:
|
354 |
+
desc: null
|
355 |
+
value: true
|
356 |
+
do_predict:
|
357 |
+
desc: null
|
358 |
+
value: false
|
359 |
+
evaluation_strategy:
|
360 |
+
desc: null
|
361 |
+
value: steps
|
362 |
+
prediction_loss_only:
|
363 |
+
desc: null
|
364 |
+
value: false
|
365 |
+
per_device_train_batch_size:
|
366 |
+
desc: null
|
367 |
+
value: 32
|
368 |
+
per_device_eval_batch_size:
|
369 |
+
desc: null
|
370 |
+
value: 32
|
371 |
+
per_gpu_train_batch_size:
|
372 |
+
desc: null
|
373 |
+
value: null
|
374 |
+
per_gpu_eval_batch_size:
|
375 |
+
desc: null
|
376 |
+
value: null
|
377 |
+
gradient_accumulation_steps:
|
378 |
+
desc: null
|
379 |
+
value: 1
|
380 |
+
eval_accumulation_steps:
|
381 |
+
desc: null
|
382 |
+
value: null
|
383 |
+
eval_delay:
|
384 |
+
desc: null
|
385 |
+
value: 0
|
386 |
+
learning_rate:
|
387 |
+
desc: null
|
388 |
+
value: 0.0001
|
389 |
+
weight_decay:
|
390 |
+
desc: null
|
391 |
+
value: 0.0
|
392 |
+
adam_beta1:
|
393 |
+
desc: null
|
394 |
+
value: 0.9
|
395 |
+
adam_beta2:
|
396 |
+
desc: null
|
397 |
+
value: 0.999
|
398 |
+
adam_epsilon:
|
399 |
+
desc: null
|
400 |
+
value: 1.0e-08
|
401 |
+
max_grad_norm:
|
402 |
+
desc: null
|
403 |
+
value: 1.0
|
404 |
+
num_train_epochs:
|
405 |
+
desc: null
|
406 |
+
value: 3.0
|
407 |
+
max_steps:
|
408 |
+
desc: null
|
409 |
+
value: 5000
|
410 |
+
lr_scheduler_type:
|
411 |
+
desc: null
|
412 |
+
value: linear
|
413 |
+
lr_scheduler_kwargs:
|
414 |
+
desc: null
|
415 |
+
value: {}
|
416 |
+
warmup_ratio:
|
417 |
+
desc: null
|
418 |
+
value: 0.0
|
419 |
+
warmup_steps:
|
420 |
+
desc: null
|
421 |
+
value: 500
|
422 |
+
log_level:
|
423 |
+
desc: null
|
424 |
+
value: passive
|
425 |
+
log_level_replica:
|
426 |
+
desc: null
|
427 |
+
value: warning
|
428 |
+
log_on_each_node:
|
429 |
+
desc: null
|
430 |
+
value: true
|
431 |
+
logging_dir:
|
432 |
+
desc: null
|
433 |
+
value: ./runs/Mar27_12-58-17_hf-dgx-01
|
434 |
+
logging_strategy:
|
435 |
+
desc: null
|
436 |
+
value: steps
|
437 |
+
logging_first_step:
|
438 |
+
desc: null
|
439 |
+
value: false
|
440 |
+
logging_steps:
|
441 |
+
desc: null
|
442 |
+
value: 25
|
443 |
+
logging_nan_inf_filter:
|
444 |
+
desc: null
|
445 |
+
value: true
|
446 |
+
save_strategy:
|
447 |
+
desc: null
|
448 |
+
value: steps
|
449 |
+
save_steps:
|
450 |
+
desc: null
|
451 |
+
value: 1000
|
452 |
+
save_total_limit:
|
453 |
+
desc: null
|
454 |
+
value: null
|
455 |
+
save_safetensors:
|
456 |
+
desc: null
|
457 |
+
value: true
|
458 |
+
save_on_each_node:
|
459 |
+
desc: null
|
460 |
+
value: false
|
461 |
+
save_only_model:
|
462 |
+
desc: null
|
463 |
+
value: false
|
464 |
+
no_cuda:
|
465 |
+
desc: null
|
466 |
+
value: false
|
467 |
+
use_cpu:
|
468 |
+
desc: null
|
469 |
+
value: false
|
470 |
+
use_mps_device:
|
471 |
+
desc: null
|
472 |
+
value: false
|
473 |
+
seed:
|
474 |
+
desc: null
|
475 |
+
value: 42
|
476 |
+
data_seed:
|
477 |
+
desc: null
|
478 |
+
value: null
|
479 |
+
jit_mode_eval:
|
480 |
+
desc: null
|
481 |
+
value: false
|
482 |
+
use_ipex:
|
483 |
+
desc: null
|
484 |
+
value: false
|
485 |
+
bf16:
|
486 |
+
desc: null
|
487 |
+
value: false
|
488 |
+
fp16:
|
489 |
+
desc: null
|
490 |
+
value: true
|
491 |
+
fp16_opt_level:
|
492 |
+
desc: null
|
493 |
+
value: O1
|
494 |
+
half_precision_backend:
|
495 |
+
desc: null
|
496 |
+
value: auto
|
497 |
+
bf16_full_eval:
|
498 |
+
desc: null
|
499 |
+
value: false
|
500 |
+
fp16_full_eval:
|
501 |
+
desc: null
|
502 |
+
value: false
|
503 |
+
tf32:
|
504 |
+
desc: null
|
505 |
+
value: null
|
506 |
+
local_rank:
|
507 |
+
desc: null
|
508 |
+
value: 0
|
509 |
+
ddp_backend:
|
510 |
+
desc: null
|
511 |
+
value: null
|
512 |
+
tpu_num_cores:
|
513 |
+
desc: null
|
514 |
+
value: null
|
515 |
+
tpu_metrics_debug:
|
516 |
+
desc: null
|
517 |
+
value: false
|
518 |
+
debug:
|
519 |
+
desc: null
|
520 |
+
value: []
|
521 |
+
dataloader_drop_last:
|
522 |
+
desc: null
|
523 |
+
value: false
|
524 |
+
eval_steps:
|
525 |
+
desc: null
|
526 |
+
value: 1000
|
527 |
+
dataloader_num_workers:
|
528 |
+
desc: null
|
529 |
+
value: 4
|
530 |
+
dataloader_prefetch_factor:
|
531 |
+
desc: null
|
532 |
+
value: null
|
533 |
+
past_index:
|
534 |
+
desc: null
|
535 |
+
value: -1
|
536 |
+
run_name:
|
537 |
+
desc: null
|
538 |
+
value: ./
|
539 |
+
disable_tqdm:
|
540 |
+
desc: null
|
541 |
+
value: false
|
542 |
+
remove_unused_columns:
|
543 |
+
desc: null
|
544 |
+
value: true
|
545 |
+
label_names:
|
546 |
+
desc: null
|
547 |
+
value: null
|
548 |
+
load_best_model_at_end:
|
549 |
+
desc: null
|
550 |
+
value: false
|
551 |
+
metric_for_best_model:
|
552 |
+
desc: null
|
553 |
+
value: null
|
554 |
+
greater_is_better:
|
555 |
+
desc: null
|
556 |
+
value: null
|
557 |
+
ignore_data_skip:
|
558 |
+
desc: null
|
559 |
+
value: false
|
560 |
+
fsdp:
|
561 |
+
desc: null
|
562 |
+
value: []
|
563 |
+
fsdp_min_num_params:
|
564 |
+
desc: null
|
565 |
+
value: 0
|
566 |
+
fsdp_config:
|
567 |
+
desc: null
|
568 |
+
value:
|
569 |
+
min_num_params: 0
|
570 |
+
xla: false
|
571 |
+
xla_fsdp_v2: false
|
572 |
+
xla_fsdp_grad_ckpt: false
|
573 |
+
fsdp_transformer_layer_cls_to_wrap:
|
574 |
+
desc: null
|
575 |
+
value: null
|
576 |
+
accelerator_config:
|
577 |
+
desc: null
|
578 |
+
value:
|
579 |
+
split_batches: false
|
580 |
+
dispatch_batches: null
|
581 |
+
even_batches: true
|
582 |
+
use_seedable_sampler: true
|
583 |
+
deepspeed:
|
584 |
+
desc: null
|
585 |
+
value: null
|
586 |
+
label_smoothing_factor:
|
587 |
+
desc: null
|
588 |
+
value: 0.0
|
589 |
+
optim:
|
590 |
+
desc: null
|
591 |
+
value: adamw_torch
|
592 |
+
optim_args:
|
593 |
+
desc: null
|
594 |
+
value: null
|
595 |
+
adafactor:
|
596 |
+
desc: null
|
597 |
+
value: false
|
598 |
+
group_by_length:
|
599 |
+
desc: null
|
600 |
+
value: false
|
601 |
+
length_column_name:
|
602 |
+
desc: null
|
603 |
+
value: input_length
|
604 |
+
report_to:
|
605 |
+
desc: null
|
606 |
+
value:
|
607 |
+
- tensorboard
|
608 |
+
- wandb
|
609 |
+
ddp_find_unused_parameters:
|
610 |
+
desc: null
|
611 |
+
value: null
|
612 |
+
ddp_bucket_cap_mb:
|
613 |
+
desc: null
|
614 |
+
value: null
|
615 |
+
ddp_broadcast_buffers:
|
616 |
+
desc: null
|
617 |
+
value: null
|
618 |
+
dataloader_pin_memory:
|
619 |
+
desc: null
|
620 |
+
value: true
|
621 |
+
dataloader_persistent_workers:
|
622 |
+
desc: null
|
623 |
+
value: false
|
624 |
+
skip_memory_metrics:
|
625 |
+
desc: null
|
626 |
+
value: true
|
627 |
+
use_legacy_prediction_loop:
|
628 |
+
desc: null
|
629 |
+
value: false
|
630 |
+
push_to_hub:
|
631 |
+
desc: null
|
632 |
+
value: true
|
633 |
+
resume_from_checkpoint:
|
634 |
+
desc: null
|
635 |
+
value: null
|
636 |
+
hub_model_id:
|
637 |
+
desc: null
|
638 |
+
value: null
|
639 |
+
hub_strategy:
|
640 |
+
desc: null
|
641 |
+
value: every_save
|
642 |
+
hub_token:
|
643 |
+
desc: null
|
644 |
+
value: <HUB_TOKEN>
|
645 |
+
hub_private_repo:
|
646 |
+
desc: null
|
647 |
+
value: false
|
648 |
+
hub_always_push:
|
649 |
+
desc: null
|
650 |
+
value: false
|
651 |
+
gradient_checkpointing:
|
652 |
+
desc: null
|
653 |
+
value: true
|
654 |
+
gradient_checkpointing_kwargs:
|
655 |
+
desc: null
|
656 |
+
value: null
|
657 |
+
include_inputs_for_metrics:
|
658 |
+
desc: null
|
659 |
+
value: false
|
660 |
+
fp16_backend:
|
661 |
+
desc: null
|
662 |
+
value: auto
|
663 |
+
push_to_hub_model_id:
|
664 |
+
desc: null
|
665 |
+
value: null
|
666 |
+
push_to_hub_organization:
|
667 |
+
desc: null
|
668 |
+
value: null
|
669 |
+
push_to_hub_token:
|
670 |
+
desc: null
|
671 |
+
value: <PUSH_TO_HUB_TOKEN>
|
672 |
+
mp_parameters:
|
673 |
+
desc: null
|
674 |
+
value: ''
|
675 |
+
auto_find_batch_size:
|
676 |
+
desc: null
|
677 |
+
value: false
|
678 |
+
full_determinism:
|
679 |
+
desc: null
|
680 |
+
value: false
|
681 |
+
torchdynamo:
|
682 |
+
desc: null
|
683 |
+
value: null
|
684 |
+
ray_scope:
|
685 |
+
desc: null
|
686 |
+
value: last
|
687 |
+
ddp_timeout:
|
688 |
+
desc: null
|
689 |
+
value: 1800
|
690 |
+
torch_compile:
|
691 |
+
desc: null
|
692 |
+
value: false
|
693 |
+
torch_compile_backend:
|
694 |
+
desc: null
|
695 |
+
value: null
|
696 |
+
torch_compile_mode:
|
697 |
+
desc: null
|
698 |
+
value: null
|
699 |
+
dispatch_batches:
|
700 |
+
desc: null
|
701 |
+
value: null
|
702 |
+
split_batches:
|
703 |
+
desc: null
|
704 |
+
value: null
|
705 |
+
include_tokens_per_second:
|
706 |
+
desc: null
|
707 |
+
value: false
|
708 |
+
include_num_input_tokens_seen:
|
709 |
+
desc: null
|
710 |
+
value: false
|
711 |
+
neftune_noise_alpha:
|
712 |
+
desc: null
|
713 |
+
value: null
|
714 |
+
optim_target_modules:
|
715 |
+
desc: null
|
716 |
+
value: null
|
717 |
+
sortish_sampler:
|
718 |
+
desc: null
|
719 |
+
value: false
|
720 |
+
predict_with_generate:
|
721 |
+
desc: null
|
722 |
+
value: true
|
723 |
+
generation_max_length:
|
724 |
+
desc: null
|
725 |
+
value: 225
|
726 |
+
generation_num_beams:
|
727 |
+
desc: null
|
728 |
+
value: null
|
729 |
+
generation_config:
|
730 |
+
desc: null
|
731 |
+
value: null
|
wandb/run-20240327_125828-wlmqyk6v/files/output.log
ADDED
@@ -0,0 +1,1071 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
0%| | 0/5000 [00:00<?, ?it/s]/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/checkpoint.py:460: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
|
3 |
+
warnings.warn(
|
4 |
+
[WARNING|logging.py:329] 2024-03-27 12:58:42,468 >> `use_cache = True` is incompatible with gradient checkpointing. Setting `use_cache = False`...
|
5 |
+
|
6 |
+
|
7 |
+
|
8 |
+
|
9 |
+
|
10 |
+
|
11 |
+
|
12 |
+
|
13 |
+
|
14 |
+
|
15 |
+
|
16 |
+
|
17 |
+
|
18 |
+
|
19 |
+
|
20 |
+
|
21 |
+
|
22 |
+
|
23 |
+
|
24 |
+
|
25 |
+
|
26 |
+
|
27 |
+
|
28 |
+
|
29 |
+
0%|▍ | 25/5000 [01:24<3:47:02, 2.74s/it]
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
|
40 |
+
|
41 |
+
|
42 |
+
|
43 |
+
|
44 |
+
|
45 |
+
|
46 |
+
|
47 |
+
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
1%|▊ | 50/5000 [02:32<3:45:18, 2.73s/it]
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
+
|
62 |
+
|
63 |
+
|
64 |
+
|
65 |
+
|
66 |
+
|
67 |
+
|
68 |
+
|
69 |
+
|
70 |
+
|
71 |
+
|
72 |
+
|
73 |
+
|
74 |
+
|
75 |
+
|
76 |
+
|
77 |
+
|
78 |
+
|
79 |
+
|
80 |
+
|
81 |
+
2%|█▏ | 75/5000 [03:41<3:44:12, 2.73s/it]
|
82 |
+
|
83 |
+
|
84 |
+
|
85 |
+
|
86 |
+
|
87 |
+
|
88 |
+
|
89 |
+
|
90 |
+
|
91 |
+
|
92 |
+
|
93 |
+
|
94 |
+
|
95 |
+
|
96 |
+
|
97 |
+
|
98 |
+
|
99 |
+
|
100 |
+
|
101 |
+
|
102 |
+
|
103 |
+
|
104 |
+
|
105 |
+
|
106 |
+
|
107 |
+
2%|█▌ | 100/5000 [04:49<3:43:05, 2.73s/it]
|
108 |
+
|
109 |
+
|
110 |
+
|
111 |
+
|
112 |
+
|
113 |
+
|
114 |
+
|
115 |
+
|
116 |
+
|
117 |
+
|
118 |
+
|
119 |
+
|
120 |
+
|
121 |
+
|
122 |
+
|
123 |
+
|
124 |
+
|
125 |
+
|
126 |
+
|
127 |
+
|
128 |
+
|
129 |
+
|
130 |
+
|
131 |
+
|
132 |
+
|
133 |
+
2%|█▉ | 125/5000 [05:57<3:41:50, 2.73s/it]
|
134 |
+
|
135 |
+
|
136 |
+
|
137 |
+
|
138 |
+
|
139 |
+
|
140 |
+
|
141 |
+
|
142 |
+
|
143 |
+
|
144 |
+
|
145 |
+
|
146 |
+
|
147 |
+
|
148 |
+
|
149 |
+
|
150 |
+
|
151 |
+
|
152 |
+
|
153 |
+
|
154 |
+
|
155 |
+
|
156 |
+
|
157 |
+
|
158 |
+
|
159 |
+
3%|██▎ | 150/5000 [07:06<3:40:44, 2.73s/it]
|
160 |
+
|
161 |
+
|
162 |
+
|
163 |
+
|
164 |
+
|
165 |
+
|
166 |
+
|
167 |
+
|
168 |
+
|
169 |
+
|
170 |
+
|
171 |
+
|
172 |
+
|
173 |
+
|
174 |
+
|
175 |
+
|
176 |
+
|
177 |
+
|
178 |
+
|
179 |
+
|
180 |
+
|
181 |
+
|
182 |
+
|
183 |
+
|
184 |
+
|
185 |
+
4%|██▋ | 175/5000 [08:14<3:39:44, 2.73s/it]
|
186 |
+
|
187 |
+
|
188 |
+
|
189 |
+
|
190 |
+
|
191 |
+
|
192 |
+
|
193 |
+
|
194 |
+
|
195 |
+
|
196 |
+
|
197 |
+
|
198 |
+
|
199 |
+
|
200 |
+
|
201 |
+
|
202 |
+
|
203 |
+
|
204 |
+
|
205 |
+
|
206 |
+
|
207 |
+
|
208 |
+
|
209 |
+
|
210 |
+
|
211 |
+
4%|███ | 200/5000 [09:22<3:38:48, 2.74s/it]
|
212 |
+
|
213 |
+
|
214 |
+
|
215 |
+
|
216 |
+
|
217 |
+
|
218 |
+
|
219 |
+
|
220 |
+
|
221 |
+
|
222 |
+
|
223 |
+
|
224 |
+
|
225 |
+
|
226 |
+
|
227 |
+
|
228 |
+
|
229 |
+
|
230 |
+
|
231 |
+
|
232 |
+
|
233 |
+
|
234 |
+
|
235 |
+
|
236 |
+
|
237 |
+
4%|███▌ | 225/5000 [10:42<5:52:41, 4.43s/it]
|
238 |
+
|
239 |
+
|
240 |
+
|
241 |
+
|
242 |
+
|
243 |
+
|
244 |
+
|
245 |
+
|
246 |
+
|
247 |
+
|
248 |
+
|
249 |
+
|
250 |
+
|
251 |
+
|
252 |
+
|
253 |
+
|
254 |
+
|
255 |
+
|
256 |
+
|
257 |
+
|
258 |
+
|
259 |
+
|
260 |
+
|
261 |
+
|
262 |
+
|
263 |
+
5%|███▉ | 250/5000 [11:51<3:38:16, 2.76s/it]
|
264 |
+
|
265 |
+
|
266 |
+
|
267 |
+
|
268 |
+
|
269 |
+
|
270 |
+
|
271 |
+
|
272 |
+
|
273 |
+
|
274 |
+
|
275 |
+
|
276 |
+
|
277 |
+
|
278 |
+
|
279 |
+
|
280 |
+
|
281 |
+
|
282 |
+
|
283 |
+
|
284 |
+
|
285 |
+
|
286 |
+
|
287 |
+
|
288 |
+
|
289 |
+
6%|████▎ | 275/5000 [14:04<7:56:27, 6.05s/it]
|
290 |
+
|
291 |
+
|
292 |
+
|
293 |
+
|
294 |
+
|
295 |
+
|
296 |
+
|
297 |
+
|
298 |
+
|
299 |
+
|
300 |
+
|
301 |
+
|
302 |
+
|
303 |
+
|
304 |
+
|
305 |
+
|
306 |
+
|
307 |
+
|
308 |
+
|
309 |
+
|
310 |
+
|
311 |
+
|
312 |
+
|
313 |
+
|
314 |
+
6%|████▋ | 299/5000 [16:29<7:54:14, 6.05s/it]
|
315 |
+
|
316 |
+
|
317 |
+
|
318 |
+
|
319 |
+
|
320 |
+
|
321 |
+
|
322 |
+
|
323 |
+
|
324 |
+
|
325 |
+
|
326 |
+
|
327 |
+
|
328 |
+
|
329 |
+
|
330 |
+
|
331 |
+
|
332 |
+
|
333 |
+
|
334 |
+
|
335 |
+
|
336 |
+
|
337 |
+
|
338 |
+
|
339 |
+
|
340 |
+
6%|█████ | 324/5000 [18:45<6:47:35, 5.23s/it]
|
341 |
+
|
342 |
+
|
343 |
+
|
344 |
+
|
345 |
+
|
346 |
+
|
347 |
+
|
348 |
+
|
349 |
+
|
350 |
+
|
351 |
+
|
352 |
+
|
353 |
+
|
354 |
+
|
355 |
+
|
356 |
+
|
357 |
+
|
358 |
+
|
359 |
+
|
360 |
+
|
361 |
+
|
362 |
+
|
363 |
+
|
364 |
+
|
365 |
+
|
366 |
+
7%|█████▍ | 349/5000 [21:17<7:49:34, 6.06s/it]
|
367 |
+
|
368 |
+
|
369 |
+
|
370 |
+
|
371 |
+
|
372 |
+
|
373 |
+
|
374 |
+
|
375 |
+
|
376 |
+
|
377 |
+
|
378 |
+
|
379 |
+
|
380 |
+
|
381 |
+
|
382 |
+
|
383 |
+
|
384 |
+
|
385 |
+
|
386 |
+
|
387 |
+
|
388 |
+
|
389 |
+
|
390 |
+
|
391 |
+
|
392 |
+
7%|█████▊ | 374/5000 [23:48<7:46:46, 6.05s/it]
|
393 |
+
|
394 |
+
|
395 |
+
|
396 |
+
|
397 |
+
|
398 |
+
|
399 |
+
|
400 |
+
|
401 |
+
|
402 |
+
|
403 |
+
|
404 |
+
|
405 |
+
|
406 |
+
|
407 |
+
|
408 |
+
|
409 |
+
|
410 |
+
|
411 |
+
|
412 |
+
|
413 |
+
|
414 |
+
|
415 |
+
|
416 |
+
|
417 |
+
|
418 |
+
8%|██████▏ | 399/5000 [26:05<7:43:02, 6.04s/it]
|
419 |
+
|
420 |
+
|
421 |
+
|
422 |
+
|
423 |
+
|
424 |
+
|
425 |
+
|
426 |
+
|
427 |
+
|
428 |
+
|
429 |
+
|
430 |
+
|
431 |
+
|
432 |
+
|
433 |
+
|
434 |
+
|
435 |
+
|
436 |
+
|
437 |
+
|
438 |
+
|
439 |
+
|
440 |
+
|
441 |
+
|
442 |
+
|
443 |
+
|
444 |
+
8%|██████▌ | 424/5000 [28:37<7:42:37, 6.07s/it]
|
445 |
+
|
446 |
+
|
447 |
+
|
448 |
+
|
449 |
+
|
450 |
+
|
451 |
+
|
452 |
+
|
453 |
+
|
454 |
+
|
455 |
+
|
456 |
+
|
457 |
+
|
458 |
+
|
459 |
+
|
460 |
+
|
461 |
+
|
462 |
+
|
463 |
+
|
464 |
+
|
465 |
+
|
466 |
+
|
467 |
+
|
468 |
+
|
469 |
+
|
470 |
+
9%|███████ | 449/5000 [31:11<7:34:02, 5.99s/it]
|
471 |
+
|
472 |
+
|
473 |
+
|
474 |
+
|
475 |
+
|
476 |
+
|
477 |
+
|
478 |
+
|
479 |
+
|
480 |
+
|
481 |
+
|
482 |
+
|
483 |
+
|
484 |
+
|
485 |
+
|
486 |
+
|
487 |
+
|
488 |
+
|
489 |
+
|
490 |
+
|
491 |
+
|
492 |
+
|
493 |
+
|
494 |
+
|
495 |
+
|
496 |
+
9%|███████▍ | 474/5000 [33:42<7:37:08, 6.06s/it]
|
497 |
+
|
498 |
+
|
499 |
+
|
500 |
+
|
501 |
+
|
502 |
+
|
503 |
+
|
504 |
+
|
505 |
+
|
506 |
+
|
507 |
+
|
508 |
+
|
509 |
+
|
510 |
+
|
511 |
+
|
512 |
+
|
513 |
+
|
514 |
+
|
515 |
+
|
516 |
+
|
517 |
+
|
518 |
+
|
519 |
+
|
520 |
+
|
521 |
+
|
522 |
+
10%|███████▊ | 499/5000 [36:14<7:34:28, 6.06s/it]
|
523 |
+
|
524 |
+
|
525 |
+
|
526 |
+
|
527 |
+
|
528 |
+
|
529 |
+
|
530 |
+
|
531 |
+
|
532 |
+
|
533 |
+
|
534 |
+
|
535 |
+
|
536 |
+
|
537 |
+
|
538 |
+
|
539 |
+
|
540 |
+
|
541 |
+
|
542 |
+
|
543 |
+
|
544 |
+
|
545 |
+
|
546 |
+
|
547 |
+
|
548 |
+
|
549 |
+
10%|████████▏ | 525/5000 [38:37<7:30:01, 6.03s/it]
|
550 |
+
|
551 |
+
|
552 |
+
|
553 |
+
|
554 |
+
|
555 |
+
|
556 |
+
|
557 |
+
|
558 |
+
|
559 |
+
|
560 |
+
|
561 |
+
|
562 |
+
|
563 |
+
|
564 |
+
|
565 |
+
|
566 |
+
|
567 |
+
|
568 |
+
|
569 |
+
|
570 |
+
|
571 |
+
|
572 |
+
|
573 |
+
|
574 |
+
11%|████████▌ | 549/5000 [40:39<3:43:32, 3.01s/it]
|
575 |
+
|
576 |
+
|
577 |
+
|
578 |
+
|
579 |
+
|
580 |
+
|
581 |
+
|
582 |
+
|
583 |
+
|
584 |
+
|
585 |
+
|
586 |
+
|
587 |
+
|
588 |
+
|
589 |
+
|
590 |
+
|
591 |
+
|
592 |
+
|
593 |
+
|
594 |
+
|
595 |
+
|
596 |
+
|
597 |
+
|
598 |
+
|
599 |
+
|
600 |
+
11%|████████▉ | 574/5000 [41:47<3:21:19, 2.73s/it]
|
601 |
+
|
602 |
+
|
603 |
+
|
604 |
+
|
605 |
+
|
606 |
+
|
607 |
+
|
608 |
+
|
609 |
+
|
610 |
+
|
611 |
+
|
612 |
+
|
613 |
+
|
614 |
+
|
615 |
+
|
616 |
+
|
617 |
+
|
618 |
+
|
619 |
+
|
620 |
+
|
621 |
+
|
622 |
+
|
623 |
+
|
624 |
+
|
625 |
+
|
626 |
+
12%|█████████▎ | 599/5000 [42:55<3:20:28, 2.73s/it]
|
627 |
+
|
628 |
+
|
629 |
+
|
630 |
+
|
631 |
+
|
632 |
+
|
633 |
+
|
634 |
+
|
635 |
+
|
636 |
+
|
637 |
+
|
638 |
+
|
639 |
+
|
640 |
+
|
641 |
+
|
642 |
+
|
643 |
+
|
644 |
+
|
645 |
+
|
646 |
+
|
647 |
+
|
648 |
+
|
649 |
+
|
650 |
+
|
651 |
+
|
652 |
+
12%|█████████▋ | 624/5000 [44:04<3:19:12, 2.73s/it]
|
653 |
+
|
654 |
+
|
655 |
+
|
656 |
+
|
657 |
+
|
658 |
+
|
659 |
+
|
660 |
+
|
661 |
+
|
662 |
+
|
663 |
+
|
664 |
+
|
665 |
+
|
666 |
+
|
667 |
+
|
668 |
+
|
669 |
+
|
670 |
+
|
671 |
+
|
672 |
+
|
673 |
+
|
674 |
+
|
675 |
+
|
676 |
+
|
677 |
+
|
678 |
+
13%|██████████ | 649/5000 [45:12<3:18:00, 2.73s/it]
|
679 |
+
|
680 |
+
|
681 |
+
|
682 |
+
|
683 |
+
|
684 |
+
|
685 |
+
|
686 |
+
|
687 |
+
|
688 |
+
|
689 |
+
|
690 |
+
|
691 |
+
|
692 |
+
|
693 |
+
|
694 |
+
|
695 |
+
|
696 |
+
|
697 |
+
|
698 |
+
|
699 |
+
|
700 |
+
|
701 |
+
|
702 |
+
|
703 |
+
|
704 |
+
13%|██████████▌ | 674/5000 [46:31<3:39:21, 3.04s/it]
|
705 |
+
|
706 |
+
|
707 |
+
|
708 |
+
|
709 |
+
|
710 |
+
|
711 |
+
|
712 |
+
|
713 |
+
|
714 |
+
|
715 |
+
|
716 |
+
|
717 |
+
|
718 |
+
|
719 |
+
|
720 |
+
|
721 |
+
|
722 |
+
|
723 |
+
|
724 |
+
|
725 |
+
|
726 |
+
|
727 |
+
|
728 |
+
|
729 |
+
|
730 |
+
14%|██████████▉ | 699/5000 [47:39<3:15:13, 2.72s/it]
|
731 |
+
|
732 |
+
|
733 |
+
|
734 |
+
|
735 |
+
|
736 |
+
|
737 |
+
|
738 |
+
|
739 |
+
|
740 |
+
|
741 |
+
|
742 |
+
|
743 |
+
|
744 |
+
|
745 |
+
|
746 |
+
|
747 |
+
|
748 |
+
|
749 |
+
|
750 |
+
|
751 |
+
|
752 |
+
|
753 |
+
|
754 |
+
|
755 |
+
|
756 |
+
14%|███████████▎ | 724/5000 [48:48<3:14:19, 2.73s/it]
|
757 |
+
|
758 |
+
|
759 |
+
|
760 |
+
|
761 |
+
|
762 |
+
|
763 |
+
|
764 |
+
|
765 |
+
|
766 |
+
|
767 |
+
|
768 |
+
|
769 |
+
|
770 |
+
|
771 |
+
|
772 |
+
|
773 |
+
|
774 |
+
|
775 |
+
|
776 |
+
|
777 |
+
|
778 |
+
|
779 |
+
|
780 |
+
|
781 |
+
|
782 |
+
15%|███████████▋ | 749/5000 [49:56<3:13:25, 2.73s/it]
|
783 |
+
|
784 |
+
|
785 |
+
|
786 |
+
|
787 |
+
|
788 |
+
|
789 |
+
|
790 |
+
|
791 |
+
|
792 |
+
|
793 |
+
|
794 |
+
|
795 |
+
|
796 |
+
|
797 |
+
|
798 |
+
|
799 |
+
|
800 |
+
|
801 |
+
|
802 |
+
|
803 |
+
|
804 |
+
|
805 |
+
|
806 |
+
|
807 |
+
|
808 |
+
15%|████████████ | 774/5000 [51:04<3:12:09, 2.73s/it]
|
809 |
+
|
810 |
+
|
811 |
+
|
812 |
+
|
813 |
+
|
814 |
+
|
815 |
+
|
816 |
+
|
817 |
+
|
818 |
+
|
819 |
+
|
820 |
+
|
821 |
+
|
822 |
+
|
823 |
+
|
824 |
+
|
825 |
+
|
826 |
+
|
827 |
+
|
828 |
+
|
829 |
+
|
830 |
+
|
831 |
+
|
832 |
+
|
833 |
+
|
834 |
+
16%|████████████▍ | 799/5000 [52:12<3:10:33, 2.72s/it]
|
835 |
+
|
836 |
+
|
837 |
+
|
838 |
+
|
839 |
+
|
840 |
+
|
841 |
+
|
842 |
+
|
843 |
+
|
844 |
+
|
845 |
+
|
846 |
+
|
847 |
+
|
848 |
+
|
849 |
+
|
850 |
+
|
851 |
+
|
852 |
+
|
853 |
+
|
854 |
+
|
855 |
+
|
856 |
+
|
857 |
+
|
858 |
+
|
859 |
+
|
860 |
+
16%|████████████▊ | 824/5000 [53:20<3:09:50, 2.73s/it]
|
861 |
+
|
862 |
+
|
863 |
+
|
864 |
+
|
865 |
+
|
866 |
+
|
867 |
+
|
868 |
+
|
869 |
+
|
870 |
+
|
871 |
+
|
872 |
+
|
873 |
+
|
874 |
+
|
875 |
+
|
876 |
+
|
877 |
+
|
878 |
+
|
879 |
+
|
880 |
+
|
881 |
+
|
882 |
+
|
883 |
+
|
884 |
+
|
885 |
+
|
886 |
+
17%|█████████████▏ | 849/5000 [54:29<3:08:52, 2.73s/it]
|
887 |
+
|
888 |
+
|
889 |
+
|
890 |
+
|
891 |
+
|
892 |
+
|
893 |
+
|
894 |
+
|
895 |
+
|
896 |
+
|
897 |
+
|
898 |
+
|
899 |
+
|
900 |
+
|
901 |
+
|
902 |
+
|
903 |
+
|
904 |
+
|
905 |
+
|
906 |
+
|
907 |
+
|
908 |
+
|
909 |
+
|
910 |
+
|
911 |
+
|
912 |
+
17%|█████████████▋ | 874/5000 [55:37<3:07:48, 2.73s/it]
|
913 |
+
|
914 |
+
|
915 |
+
|
916 |
+
|
917 |
+
|
918 |
+
|
919 |
+
|
920 |
+
|
921 |
+
|
922 |
+
|
923 |
+
|
924 |
+
|
925 |
+
|
926 |
+
|
927 |
+
|
928 |
+
|
929 |
+
|
930 |
+
|
931 |
+
|
932 |
+
|
933 |
+
|
934 |
+
|
935 |
+
|
936 |
+
|
937 |
+
|
938 |
+
18%|██████████████ | 899/5000 [56:56<3:12:22, 2.81s/it]
|
939 |
+
|
940 |
+
|
941 |
+
|
942 |
+
|
943 |
+
|
944 |
+
|
945 |
+
|
946 |
+
|
947 |
+
|
948 |
+
|
949 |
+
|
950 |
+
|
951 |
+
|
952 |
+
|
953 |
+
|
954 |
+
|
955 |
+
|
956 |
+
|
957 |
+
|
958 |
+
|
959 |
+
|
960 |
+
|
961 |
+
|
962 |
+
|
963 |
+
|
964 |
+
18%|██████████████▍ | 924/5000 [58:04<3:04:42, 2.72s/it]
|
965 |
+
|
966 |
+
|
967 |
+
|
968 |
+
|
969 |
+
|
970 |
+
|
971 |
+
|
972 |
+
|
973 |
+
|
974 |
+
|
975 |
+
|
976 |
+
|
977 |
+
|
978 |
+
|
979 |
+
|
980 |
+
|
981 |
+
|
982 |
+
|
983 |
+
|
984 |
+
|
985 |
+
|
986 |
+
|
987 |
+
|
988 |
+
|
989 |
+
|
990 |
+
19%|██████████████▊ | 949/5000 [59:13<3:04:17, 2.73s/it]
|
991 |
+
|
992 |
+
|
993 |
+
|
994 |
+
|
995 |
+
|
996 |
+
|
997 |
+
|
998 |
+
|
999 |
+
|
1000 |
+
|
1001 |
+
|
1002 |
+
|
1003 |
+
|
1004 |
+
|
1005 |
+
|
1006 |
+
|
1007 |
+
|
1008 |
+
|
1009 |
+
|
1010 |
+
|
1011 |
+
|
1012 |
+
|
1013 |
+
|
1014 |
+
|
1015 |
+
|
1016 |
+
19%|██████████████▊ | 974/5000 [1:00:21<3:03:19, 2.73s/it]
|
1017 |
+
|
1018 |
+
|
1019 |
+
|
1020 |
+
|
1021 |
+
|
1022 |
+
|
1023 |
+
|
1024 |
+
|
1025 |
+
|
1026 |
+
|
1027 |
+
|
1028 |
+
|
1029 |
+
|
1030 |
+
|
1031 |
+
|
1032 |
+
|
1033 |
+
|
1034 |
+
|
1035 |
+
|
1036 |
+
|
1037 |
+
|
1038 |
+
|
1039 |
+
|
1040 |
+
|
1041 |
+
|
1042 |
+
20%|███████████████▏ | 999/5000 [1:01:29<3:02:09, 2.73s/it]
|
1043 |
+
20%|███████████████ | 1000/5000 [1:01:32<3:01:55, 2.73s/it][INFO|trainer.py:768] 2024-03-27 14:00:01,697 >> The following columns in the evaluation set don't have a corresponding argument in `WhisperForConditionalGeneration.forward` and have been ignored: input_length. If input_length are not expected by `WhisperForConditionalGeneration.forward`, you can safely ignore this message.
|
1044 |
+
[INFO|trainer.py:3515] 2024-03-27 14:00:01,699 >> ***** Running Evaluation *****
|
1045 |
+
[INFO|trainer.py:3517] 2024-03-27 14:00:01,699 >> Num examples = 3123
|
1046 |
+
[INFO|trainer.py:3520] 2024-03-27 14:00:01,699 >> Batch size = 32
|
1047 |
+
[INFO|generation_whisper.py:1111] 2024-03-27 14:00:13,927 >> You have passed task=50360, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=50360.
|
1048 |
+
Traceback (most recent call last):
|
1049 |
+
File "run_speech_recognition_seq2seq.py", line 627, in <module>
|
1050 |
+
main()
|
1051 |
+
File "run_speech_recognition_seq2seq.py", line 577, in main
|
1052 |
+
train_result = trainer.train(resume_from_checkpoint=checkpoint)
|
1053 |
+
File "/home/sanchit/transformers/src/transformers/trainer.py", line 1774, in train
|
1054 |
+
return inner_training_loop(
|
1055 |
+
File "/home/sanchit/transformers/src/transformers/trainer.py", line 2196, in _inner_training_loop
|
1056 |
+
self._maybe_log_save_evaluate(tr_loss, grad_norm, model, trial, epoch, ignore_keys_for_eval)
|
1057 |
+
File "/home/sanchit/transformers/src/transformers/trainer.py", line 2580, in _maybe_log_save_evaluate
|
1058 |
+
metrics = self.evaluate(ignore_keys=ignore_keys_for_eval)
|
1059 |
+
File "/home/sanchit/transformers/src/transformers/trainer_seq2seq.py", line 180, in evaluate
|
1060 |
+
return super().evaluate(eval_dataset, ignore_keys=ignore_keys, metric_key_prefix=metric_key_prefix)
|
1061 |
+
File "/home/sanchit/transformers/src/transformers/trainer.py", line 3368, in evaluate
|
1062 |
+
output = eval_loop(
|
1063 |
+
File "/home/sanchit/transformers/src/transformers/trainer.py", line 3557, in evaluation_loop
|
1064 |
+
loss, logits, labels = self.prediction_step(model, inputs, prediction_loss_only, ignore_keys=ignore_keys)
|
1065 |
+
File "/home/sanchit/transformers/src/transformers/trainer_seq2seq.py", line 310, in prediction_step
|
1066 |
+
generated_tokens = self.model.generate(**generation_inputs, **gen_kwargs)
|
1067 |
+
File "/home/sanchit/transformers/src/transformers/models/whisper/generation_whisper.py", line 534, in generate
|
1068 |
+
init_tokens = self._retrieve_init_tokens(
|
1069 |
+
File "/home/sanchit/transformers/src/transformers/models/whisper/generation_whisper.py", line 1146, in _retrieve_init_tokens
|
1070 |
+
is_language_code = len(language) == 2
|
1071 |
+
TypeError: object of type 'int' has no len()
|
wandb/run-20240327_125828-wlmqyk6v/files/requirements.txt
ADDED
@@ -0,0 +1,247 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
absl-py==2.1.0
|
2 |
+
accelerate==0.27.2
|
3 |
+
aiohttp==3.9.3
|
4 |
+
aiosignal==1.3.1
|
5 |
+
anyio==4.2.0
|
6 |
+
appdirs==1.4.4
|
7 |
+
argon2-cffi-bindings==21.2.0
|
8 |
+
argon2-cffi==23.1.0
|
9 |
+
arrow==1.3.0
|
10 |
+
asttokens==2.4.1
|
11 |
+
astunparse==1.6.3
|
12 |
+
async-lru==2.0.4
|
13 |
+
async-timeout==4.0.3
|
14 |
+
attrs==23.2.0
|
15 |
+
audioread==3.0.1
|
16 |
+
av==11.0.0
|
17 |
+
babel==2.14.0
|
18 |
+
backcall==0.2.0
|
19 |
+
beautifulsoup4==4.12.3
|
20 |
+
bitsandbytes==0.42.0
|
21 |
+
bleach==6.1.0
|
22 |
+
cached-property==1.5.2
|
23 |
+
cachetools==5.3.2
|
24 |
+
certifi==2024.2.2
|
25 |
+
cffi==1.16.0
|
26 |
+
charset-normalizer==3.3.2
|
27 |
+
chex==0.1.7
|
28 |
+
click==8.1.7
|
29 |
+
coloredlogs==15.0.1
|
30 |
+
comm==0.2.1
|
31 |
+
contourpy==1.1.1
|
32 |
+
ctranslate2==4.1.0
|
33 |
+
cycler==0.12.1
|
34 |
+
datasets==2.18.0
|
35 |
+
debugpy==1.8.0
|
36 |
+
decorator==5.1.1
|
37 |
+
defusedxml==0.7.1
|
38 |
+
dill==0.3.7
|
39 |
+
dm-tree==0.1.8
|
40 |
+
docker-pycreds==0.4.0
|
41 |
+
docstring-parser==0.15
|
42 |
+
einops==0.7.0
|
43 |
+
etils==1.3.0
|
44 |
+
evaluate==0.4.1
|
45 |
+
exceptiongroup==1.2.0
|
46 |
+
executing==2.0.1
|
47 |
+
faster-whisper==1.0.1
|
48 |
+
fastjsonschema==2.19.1
|
49 |
+
filelock==3.13.1
|
50 |
+
flash-attn==2.5.3
|
51 |
+
flatbuffers==23.5.26
|
52 |
+
flax==0.7.2
|
53 |
+
fonttools==4.48.1
|
54 |
+
fqdn==1.5.1
|
55 |
+
frozenlist==1.4.1
|
56 |
+
fsspec==2024.2.0
|
57 |
+
gast==0.4.0
|
58 |
+
gitdb==4.0.11
|
59 |
+
gitpython==3.1.41
|
60 |
+
google-auth-oauthlib==1.0.0
|
61 |
+
google-auth==2.27.0
|
62 |
+
google-pasta==0.2.0
|
63 |
+
grpcio==1.60.1
|
64 |
+
h11==0.14.0
|
65 |
+
h5py==3.10.0
|
66 |
+
httpcore==1.0.2
|
67 |
+
httpx==0.26.0
|
68 |
+
huggingface-hub==0.21.4
|
69 |
+
humanfriendly==10.0
|
70 |
+
idna==3.6
|
71 |
+
importlib-metadata==7.0.1
|
72 |
+
importlib-resources==6.1.1
|
73 |
+
iniconfig==2.0.0
|
74 |
+
ipdb==0.13.13
|
75 |
+
ipykernel==6.29.2
|
76 |
+
ipython==8.12.3
|
77 |
+
isoduration==20.11.0
|
78 |
+
jax==0.4.13
|
79 |
+
jaxlib==0.4.13
|
80 |
+
jedi==0.19.1
|
81 |
+
jinja2==3.1.2
|
82 |
+
jiwer==3.0.3
|
83 |
+
joblib==1.3.2
|
84 |
+
json5==0.9.14
|
85 |
+
jsonpointer==2.4
|
86 |
+
jsonschema-specifications==2023.12.1
|
87 |
+
jsonschema==4.21.1
|
88 |
+
jupyter-client==8.6.0
|
89 |
+
jupyter-core==5.7.1
|
90 |
+
jupyter-events==0.9.0
|
91 |
+
jupyter-lsp==2.2.2
|
92 |
+
jupyter-server-terminals==0.5.2
|
93 |
+
jupyter-server==2.12.5
|
94 |
+
jupyterlab-pygments==0.3.0
|
95 |
+
jupyterlab-server==2.25.2
|
96 |
+
jupyterlab==4.1.0
|
97 |
+
keras==2.13.1
|
98 |
+
kiwisolver==1.4.5
|
99 |
+
lazy-loader==0.3
|
100 |
+
libclang==16.0.6
|
101 |
+
librosa==0.10.1
|
102 |
+
llvmlite==0.41.1
|
103 |
+
markdown-it-py==3.0.0
|
104 |
+
markdown==3.5.2
|
105 |
+
markupsafe==2.1.3
|
106 |
+
matplotlib-inline==0.1.6
|
107 |
+
matplotlib==3.7.4
|
108 |
+
mdurl==0.1.2
|
109 |
+
mistune==3.0.2
|
110 |
+
ml-dtypes==0.2.0
|
111 |
+
more-itertools==10.2.0
|
112 |
+
mpmath==1.2.1
|
113 |
+
msclap==1.3.3
|
114 |
+
msgpack==1.0.7
|
115 |
+
multidict==6.0.5
|
116 |
+
multiprocess==0.70.15
|
117 |
+
nbclient==0.9.0
|
118 |
+
nbconvert==7.16.0
|
119 |
+
nbformat==5.9.2
|
120 |
+
nest-asyncio==1.6.0
|
121 |
+
networkx==3.0rc1
|
122 |
+
ninja==1.11.1.1
|
123 |
+
notebook-shim==0.2.3
|
124 |
+
numba==0.58.1
|
125 |
+
numpy==1.24.3
|
126 |
+
nvidia-cublas-cu12==12.1.3.1
|
127 |
+
nvidia-cuda-cupti-cu12==12.1.105
|
128 |
+
nvidia-cuda-nvrtc-cu12==12.1.105
|
129 |
+
nvidia-cuda-runtime-cu12==12.1.105
|
130 |
+
nvidia-cudnn-cu12==8.9.2.26
|
131 |
+
nvidia-cufft-cu12==11.0.2.54
|
132 |
+
nvidia-curand-cu12==10.3.2.106
|
133 |
+
nvidia-cusolver-cu12==11.4.5.107
|
134 |
+
nvidia-cusparse-cu12==12.1.0.106
|
135 |
+
nvidia-nccl-cu12==2.19.3
|
136 |
+
nvidia-nvjitlink-cu12==12.1.105
|
137 |
+
nvidia-nvtx-cu12==12.1.105
|
138 |
+
oauthlib==3.2.2
|
139 |
+
onnxruntime==1.17.1
|
140 |
+
openai-whisper==20231117
|
141 |
+
opt-einsum==3.3.0
|
142 |
+
optax==0.1.8
|
143 |
+
orbax-checkpoint==0.2.3
|
144 |
+
overrides==7.7.0
|
145 |
+
packaging==23.2
|
146 |
+
pandas==2.0.3
|
147 |
+
pandocfilters==1.5.1
|
148 |
+
parameterized==0.9.0
|
149 |
+
parso==0.8.3
|
150 |
+
peft==0.8.2
|
151 |
+
pexpect==4.9.0
|
152 |
+
pickleshare==0.7.5
|
153 |
+
pillow==9.3.0
|
154 |
+
pip==24.0
|
155 |
+
pkg-resources==0.0.0
|
156 |
+
pkgutil-resolve-name==1.3.10
|
157 |
+
platformdirs==4.2.0
|
158 |
+
pluggy==1.4.0
|
159 |
+
pooch==1.8.0
|
160 |
+
prometheus-client==0.19.0
|
161 |
+
prompt-toolkit==3.0.43
|
162 |
+
protobuf==4.25.2
|
163 |
+
psutil==5.9.8
|
164 |
+
ptyprocess==0.7.0
|
165 |
+
pure-eval==0.2.2
|
166 |
+
pyarrow-hotfix==0.6
|
167 |
+
pyarrow==15.0.0
|
168 |
+
pyasn1-modules==0.3.0
|
169 |
+
pyasn1==0.5.1
|
170 |
+
pycparser==2.21
|
171 |
+
pygments==2.17.2
|
172 |
+
pyparsing==3.1.1
|
173 |
+
pytest==7.4.4
|
174 |
+
python-dateutil==2.8.2
|
175 |
+
python-json-logger==2.0.7
|
176 |
+
pytorch-triton==3.0.0+901819d2b6
|
177 |
+
pytz==2024.1
|
178 |
+
pyyaml==6.0.1
|
179 |
+
pyzmq==25.1.2
|
180 |
+
rapidfuzz==3.6.1
|
181 |
+
referencing==0.33.0
|
182 |
+
regex==2023.12.25
|
183 |
+
requests-oauthlib==1.3.1
|
184 |
+
requests==2.31.0
|
185 |
+
responses==0.18.0
|
186 |
+
rfc3339-validator==0.1.4
|
187 |
+
rfc3986-validator==0.1.1
|
188 |
+
rich==13.7.0
|
189 |
+
rpds-py==0.17.1
|
190 |
+
rsa==4.9
|
191 |
+
safetensors==0.4.2
|
192 |
+
scikit-learn==1.3.2
|
193 |
+
scipy==1.10.1
|
194 |
+
send2trash==1.8.2
|
195 |
+
sentry-sdk==1.40.0
|
196 |
+
setproctitle==1.3.3
|
197 |
+
setuptools==44.0.0
|
198 |
+
shtab==1.7.0
|
199 |
+
six==1.16.0
|
200 |
+
smmap==5.0.1
|
201 |
+
sniffio==1.3.0
|
202 |
+
soundfile==0.12.1
|
203 |
+
soupsieve==2.5
|
204 |
+
soxr==0.3.7
|
205 |
+
stack-data==0.6.3
|
206 |
+
sympy==1.11.1
|
207 |
+
tensorboard-data-server==0.7.2
|
208 |
+
tensorboard==2.13.0
|
209 |
+
tensorflow-cpu==2.13.1
|
210 |
+
tensorflow-estimator==2.13.0
|
211 |
+
tensorflow-io-gcs-filesystem==0.34.0
|
212 |
+
tensorstore==0.1.45
|
213 |
+
termcolor==2.4.0
|
214 |
+
terminado==0.18.0
|
215 |
+
threadpoolctl==3.2.0
|
216 |
+
tiktoken==0.6.0
|
217 |
+
tinycss2==1.2.1
|
218 |
+
tokenizers==0.15.1
|
219 |
+
tomli==2.0.1
|
220 |
+
toolz==0.12.1
|
221 |
+
torch==2.2.1
|
222 |
+
torchaudio==2.2.1
|
223 |
+
torchlibrosa==0.1.0
|
224 |
+
torchvision==0.17.1
|
225 |
+
tornado==6.4
|
226 |
+
tqdm==4.66.1
|
227 |
+
traitlets==5.14.1
|
228 |
+
transformers==4.39.0.dev0
|
229 |
+
triton==2.2.0
|
230 |
+
trl==0.7.11
|
231 |
+
types-python-dateutil==2.8.19.20240106
|
232 |
+
typing-extensions==4.9.0
|
233 |
+
tyro==0.7.3
|
234 |
+
tzdata==2023.4
|
235 |
+
uri-template==1.3.0
|
236 |
+
urllib3==2.2.0
|
237 |
+
wandb==0.16.2
|
238 |
+
wcwidth==0.2.13
|
239 |
+
webcolors==1.13
|
240 |
+
webencodings==0.5.1
|
241 |
+
websocket-client==1.7.0
|
242 |
+
werkzeug==3.0.1
|
243 |
+
wheel==0.42.0
|
244 |
+
wrapt==1.16.0
|
245 |
+
xxhash==3.4.1
|
246 |
+
yarl==1.9.4
|
247 |
+
zipp==3.17.0
|
wandb/run-20240327_125828-wlmqyk6v/files/wandb-metadata.json
ADDED
@@ -0,0 +1,737 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"os": "Linux-5.4.0-166-generic-x86_64-with-glibc2.29",
|
3 |
+
"python": "3.8.10",
|
4 |
+
"heartbeatAt": "2024-03-27T11:58:29.232856",
|
5 |
+
"startedAt": "2024-03-27T11:58:28.736770",
|
6 |
+
"docker": null,
|
7 |
+
"cuda": null,
|
8 |
+
"args": [
|
9 |
+
"--model_name_or_path=distil-whisper/distil-large-v3",
|
10 |
+
"--dataset_name=mozilla-foundation/common_voice_16_1",
|
11 |
+
"--dataset_config_name=hi",
|
12 |
+
"--language=hindi",
|
13 |
+
"--train_split_name=train+validation",
|
14 |
+
"--eval_split_name=test",
|
15 |
+
"--max_steps=5000",
|
16 |
+
"--output_dir=./",
|
17 |
+
"--per_device_train_batch_size=32",
|
18 |
+
"--per_device_eval_batch_size=32",
|
19 |
+
"--logging_steps=25",
|
20 |
+
"--learning_rate=1e-4",
|
21 |
+
"--warmup_steps=500",
|
22 |
+
"--evaluation_strategy=steps",
|
23 |
+
"--eval_steps=1000",
|
24 |
+
"--save_strategy=steps",
|
25 |
+
"--save_steps=1000",
|
26 |
+
"--generation_max_length=225",
|
27 |
+
"--preprocessing_num_workers=1",
|
28 |
+
"--dataloader_num_workers=4",
|
29 |
+
"--length_column_name=input_length",
|
30 |
+
"--max_duration_in_seconds=30",
|
31 |
+
"--text_column_name=sentence",
|
32 |
+
"--freeze_feature_encoder=False",
|
33 |
+
"--gradient_checkpointing",
|
34 |
+
"--fp16",
|
35 |
+
"--overwrite_output_dir",
|
36 |
+
"--do_train",
|
37 |
+
"--do_eval",
|
38 |
+
"--predict_with_generate",
|
39 |
+
"--use_auth_token",
|
40 |
+
"--push_to_hub"
|
41 |
+
],
|
42 |
+
"state": "running",
|
43 |
+
"program": "run_speech_recognition_seq2seq.py",
|
44 |
+
"codePathLocal": "run_speech_recognition_seq2seq.py",
|
45 |
+
"codePath": "run_speech_recognition_seq2seq.py",
|
46 |
+
"git": {
|
47 |
+
"remote": "https://huggingface.co/sanchit-gandhi/distil-large-v3-hi-ft",
|
48 |
+
"commit": "40c686df113c0e98e7363c1bd523f58d11848fc0"
|
49 |
+
},
|
50 |
+
"email": "sanchit@huggingface.co",
|
51 |
+
"root": "/home/sanchit/distil-large-v3-hi-ft",
|
52 |
+
"host": "hf-dgx-01",
|
53 |
+
"username": "sanchit",
|
54 |
+
"executable": "/home/sanchit/hf/bin/python",
|
55 |
+
"cpu_count": 64,
|
56 |
+
"cpu_count_logical": 128,
|
57 |
+
"cpu_freq": {
|
58 |
+
"current": 2170.403765625001,
|
59 |
+
"min": 1500.0,
|
60 |
+
"max": 2250.0
|
61 |
+
},
|
62 |
+
"cpu_freq_per_core": [
|
63 |
+
{
|
64 |
+
"current": 3142.167,
|
65 |
+
"min": 1500.0,
|
66 |
+
"max": 2250.0
|
67 |
+
},
|
68 |
+
{
|
69 |
+
"current": 1693.807,
|
70 |
+
"min": 1500.0,
|
71 |
+
"max": 2250.0
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"current": 1693.66,
|
75 |
+
"min": 1500.0,
|
76 |
+
"max": 2250.0
|
77 |
+
},
|
78 |
+
{
|
79 |
+
"current": 1694.174,
|
80 |
+
"min": 1500.0,
|
81 |
+
"max": 2250.0
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"current": 1756.198,
|
85 |
+
"min": 1500.0,
|
86 |
+
"max": 2250.0
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"current": 1659.0,
|
90 |
+
"min": 1500.0,
|
91 |
+
"max": 2250.0
|
92 |
+
},
|
93 |
+
{
|
94 |
+
"current": 1650.564,
|
95 |
+
"min": 1500.0,
|
96 |
+
"max": 2250.0
|
97 |
+
},
|
98 |
+
{
|
99 |
+
"current": 1661.469,
|
100 |
+
"min": 1500.0,
|
101 |
+
"max": 2250.0
|
102 |
+
},
|
103 |
+
{
|
104 |
+
"current": 1782.824,
|
105 |
+
"min": 1500.0,
|
106 |
+
"max": 2250.0
|
107 |
+
},
|
108 |
+
{
|
109 |
+
"current": 1795.775,
|
110 |
+
"min": 1500.0,
|
111 |
+
"max": 2250.0
|
112 |
+
},
|
113 |
+
{
|
114 |
+
"current": 1796.555,
|
115 |
+
"min": 1500.0,
|
116 |
+
"max": 2250.0
|
117 |
+
},
|
118 |
+
{
|
119 |
+
"current": 1795.692,
|
120 |
+
"min": 1500.0,
|
121 |
+
"max": 2250.0
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"current": 2239.537,
|
125 |
+
"min": 1500.0,
|
126 |
+
"max": 2250.0
|
127 |
+
},
|
128 |
+
{
|
129 |
+
"current": 1717.572,
|
130 |
+
"min": 1500.0,
|
131 |
+
"max": 2250.0
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"current": 1740.418,
|
135 |
+
"min": 1500.0,
|
136 |
+
"max": 2250.0
|
137 |
+
},
|
138 |
+
{
|
139 |
+
"current": 1736.353,
|
140 |
+
"min": 1500.0,
|
141 |
+
"max": 2250.0
|
142 |
+
},
|
143 |
+
{
|
144 |
+
"current": 1795.306,
|
145 |
+
"min": 1500.0,
|
146 |
+
"max": 2250.0
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"current": 1795.383,
|
150 |
+
"min": 1500.0,
|
151 |
+
"max": 2250.0
|
152 |
+
},
|
153 |
+
{
|
154 |
+
"current": 1797.94,
|
155 |
+
"min": 1500.0,
|
156 |
+
"max": 2250.0
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"current": 1791.563,
|
160 |
+
"min": 1500.0,
|
161 |
+
"max": 2250.0
|
162 |
+
},
|
163 |
+
{
|
164 |
+
"current": 1795.737,
|
165 |
+
"min": 1500.0,
|
166 |
+
"max": 2250.0
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"current": 1793.785,
|
170 |
+
"min": 1500.0,
|
171 |
+
"max": 2250.0
|
172 |
+
},
|
173 |
+
{
|
174 |
+
"current": 1794.26,
|
175 |
+
"min": 1500.0,
|
176 |
+
"max": 2250.0
|
177 |
+
},
|
178 |
+
{
|
179 |
+
"current": 1790.759,
|
180 |
+
"min": 1500.0,
|
181 |
+
"max": 2250.0
|
182 |
+
},
|
183 |
+
{
|
184 |
+
"current": 1794.862,
|
185 |
+
"min": 1500.0,
|
186 |
+
"max": 2250.0
|
187 |
+
},
|
188 |
+
{
|
189 |
+
"current": 1656.393,
|
190 |
+
"min": 1500.0,
|
191 |
+
"max": 2250.0
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"current": 1778.215,
|
195 |
+
"min": 1500.0,
|
196 |
+
"max": 2250.0
|
197 |
+
},
|
198 |
+
{
|
199 |
+
"current": 1797.531,
|
200 |
+
"min": 1500.0,
|
201 |
+
"max": 2250.0
|
202 |
+
},
|
203 |
+
{
|
204 |
+
"current": 1651.956,
|
205 |
+
"min": 1500.0,
|
206 |
+
"max": 2250.0
|
207 |
+
},
|
208 |
+
{
|
209 |
+
"current": 1794.074,
|
210 |
+
"min": 1500.0,
|
211 |
+
"max": 2250.0
|
212 |
+
},
|
213 |
+
{
|
214 |
+
"current": 1793.083,
|
215 |
+
"min": 1500.0,
|
216 |
+
"max": 2250.0
|
217 |
+
},
|
218 |
+
{
|
219 |
+
"current": 1795.94,
|
220 |
+
"min": 1500.0,
|
221 |
+
"max": 2250.0
|
222 |
+
},
|
223 |
+
{
|
224 |
+
"current": 3295.364,
|
225 |
+
"min": 1500.0,
|
226 |
+
"max": 2250.0
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"current": 1694.524,
|
230 |
+
"min": 1500.0,
|
231 |
+
"max": 2250.0
|
232 |
+
},
|
233 |
+
{
|
234 |
+
"current": 2694.844,
|
235 |
+
"min": 1500.0,
|
236 |
+
"max": 2250.0
|
237 |
+
},
|
238 |
+
{
|
239 |
+
"current": 1696.681,
|
240 |
+
"min": 1500.0,
|
241 |
+
"max": 2250.0
|
242 |
+
},
|
243 |
+
{
|
244 |
+
"current": 1794.48,
|
245 |
+
"min": 1500.0,
|
246 |
+
"max": 2250.0
|
247 |
+
},
|
248 |
+
{
|
249 |
+
"current": 1796.042,
|
250 |
+
"min": 1500.0,
|
251 |
+
"max": 2250.0
|
252 |
+
},
|
253 |
+
{
|
254 |
+
"current": 1796.014,
|
255 |
+
"min": 1500.0,
|
256 |
+
"max": 2250.0
|
257 |
+
},
|
258 |
+
{
|
259 |
+
"current": 1793.604,
|
260 |
+
"min": 1500.0,
|
261 |
+
"max": 2250.0
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"current": 1694.327,
|
265 |
+
"min": 1500.0,
|
266 |
+
"max": 2250.0
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"current": 1694.912,
|
270 |
+
"min": 1500.0,
|
271 |
+
"max": 2250.0
|
272 |
+
},
|
273 |
+
{
|
274 |
+
"current": 1693.123,
|
275 |
+
"min": 1500.0,
|
276 |
+
"max": 2250.0
|
277 |
+
},
|
278 |
+
{
|
279 |
+
"current": 3302.73,
|
280 |
+
"min": 1500.0,
|
281 |
+
"max": 2250.0
|
282 |
+
},
|
283 |
+
{
|
284 |
+
"current": 1794.31,
|
285 |
+
"min": 1500.0,
|
286 |
+
"max": 2250.0
|
287 |
+
},
|
288 |
+
{
|
289 |
+
"current": 1796.051,
|
290 |
+
"min": 1500.0,
|
291 |
+
"max": 2250.0
|
292 |
+
},
|
293 |
+
{
|
294 |
+
"current": 1792.762,
|
295 |
+
"min": 1500.0,
|
296 |
+
"max": 2250.0
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"current": 1659.911,
|
300 |
+
"min": 1500.0,
|
301 |
+
"max": 2250.0
|
302 |
+
},
|
303 |
+
{
|
304 |
+
"current": 1658.156,
|
305 |
+
"min": 1500.0,
|
306 |
+
"max": 2250.0
|
307 |
+
},
|
308 |
+
{
|
309 |
+
"current": 1665.033,
|
310 |
+
"min": 1500.0,
|
311 |
+
"max": 2250.0
|
312 |
+
},
|
313 |
+
{
|
314 |
+
"current": 1730.449,
|
315 |
+
"min": 1500.0,
|
316 |
+
"max": 2250.0
|
317 |
+
},
|
318 |
+
{
|
319 |
+
"current": 1667.591,
|
320 |
+
"min": 1500.0,
|
321 |
+
"max": 2250.0
|
322 |
+
},
|
323 |
+
{
|
324 |
+
"current": 1678.524,
|
325 |
+
"min": 1500.0,
|
326 |
+
"max": 2250.0
|
327 |
+
},
|
328 |
+
{
|
329 |
+
"current": 3380.556,
|
330 |
+
"min": 1500.0,
|
331 |
+
"max": 2250.0
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"current": 1690.177,
|
335 |
+
"min": 1500.0,
|
336 |
+
"max": 2250.0
|
337 |
+
},
|
338 |
+
{
|
339 |
+
"current": 1694.329,
|
340 |
+
"min": 1500.0,
|
341 |
+
"max": 2250.0
|
342 |
+
},
|
343 |
+
{
|
344 |
+
"current": 1689.671,
|
345 |
+
"min": 1500.0,
|
346 |
+
"max": 2250.0
|
347 |
+
},
|
348 |
+
{
|
349 |
+
"current": 1687.925,
|
350 |
+
"min": 1500.0,
|
351 |
+
"max": 2250.0
|
352 |
+
},
|
353 |
+
{
|
354 |
+
"current": 1688.548,
|
355 |
+
"min": 1500.0,
|
356 |
+
"max": 2250.0
|
357 |
+
},
|
358 |
+
{
|
359 |
+
"current": 3388.168,
|
360 |
+
"min": 1500.0,
|
361 |
+
"max": 2250.0
|
362 |
+
},
|
363 |
+
{
|
364 |
+
"current": 1692.888,
|
365 |
+
"min": 1500.0,
|
366 |
+
"max": 2250.0
|
367 |
+
},
|
368 |
+
{
|
369 |
+
"current": 1690.168,
|
370 |
+
"min": 1500.0,
|
371 |
+
"max": 2250.0
|
372 |
+
},
|
373 |
+
{
|
374 |
+
"current": 3388.021,
|
375 |
+
"min": 1500.0,
|
376 |
+
"max": 2250.0
|
377 |
+
},
|
378 |
+
{
|
379 |
+
"current": 1692.056,
|
380 |
+
"min": 1500.0,
|
381 |
+
"max": 2250.0
|
382 |
+
},
|
383 |
+
{
|
384 |
+
"current": 3387.636,
|
385 |
+
"min": 1500.0,
|
386 |
+
"max": 2250.0
|
387 |
+
},
|
388 |
+
{
|
389 |
+
"current": 1689.004,
|
390 |
+
"min": 1500.0,
|
391 |
+
"max": 2250.0
|
392 |
+
},
|
393 |
+
{
|
394 |
+
"current": 1687.968,
|
395 |
+
"min": 1500.0,
|
396 |
+
"max": 2250.0
|
397 |
+
},
|
398 |
+
{
|
399 |
+
"current": 1687.714,
|
400 |
+
"min": 1500.0,
|
401 |
+
"max": 2250.0
|
402 |
+
},
|
403 |
+
{
|
404 |
+
"current": 1792.29,
|
405 |
+
"min": 1500.0,
|
406 |
+
"max": 2250.0
|
407 |
+
},
|
408 |
+
{
|
409 |
+
"current": 1793.495,
|
410 |
+
"min": 1500.0,
|
411 |
+
"max": 2250.0
|
412 |
+
},
|
413 |
+
{
|
414 |
+
"current": 1791.513,
|
415 |
+
"min": 1500.0,
|
416 |
+
"max": 2250.0
|
417 |
+
},
|
418 |
+
{
|
419 |
+
"current": 1452.265,
|
420 |
+
"min": 1500.0,
|
421 |
+
"max": 2250.0
|
422 |
+
},
|
423 |
+
{
|
424 |
+
"current": 1794.895,
|
425 |
+
"min": 1500.0,
|
426 |
+
"max": 2250.0
|
427 |
+
},
|
428 |
+
{
|
429 |
+
"current": 1791.828,
|
430 |
+
"min": 1500.0,
|
431 |
+
"max": 2250.0
|
432 |
+
},
|
433 |
+
{
|
434 |
+
"current": 1794.173,
|
435 |
+
"min": 1500.0,
|
436 |
+
"max": 2250.0
|
437 |
+
},
|
438 |
+
{
|
439 |
+
"current": 1796.185,
|
440 |
+
"min": 1500.0,
|
441 |
+
"max": 2250.0
|
442 |
+
},
|
443 |
+
{
|
444 |
+
"current": 2921.311,
|
445 |
+
"min": 1500.0,
|
446 |
+
"max": 2250.0
|
447 |
+
},
|
448 |
+
{
|
449 |
+
"current": 1684.587,
|
450 |
+
"min": 1500.0,
|
451 |
+
"max": 2250.0
|
452 |
+
},
|
453 |
+
{
|
454 |
+
"current": 1684.206,
|
455 |
+
"min": 1500.0,
|
456 |
+
"max": 2250.0
|
457 |
+
},
|
458 |
+
{
|
459 |
+
"current": 1683.624,
|
460 |
+
"min": 1500.0,
|
461 |
+
"max": 2250.0
|
462 |
+
},
|
463 |
+
{
|
464 |
+
"current": 2446.569,
|
465 |
+
"min": 1500.0,
|
466 |
+
"max": 2250.0
|
467 |
+
},
|
468 |
+
{
|
469 |
+
"current": 2445.849,
|
470 |
+
"min": 1500.0,
|
471 |
+
"max": 2250.0
|
472 |
+
},
|
473 |
+
{
|
474 |
+
"current": 2440.241,
|
475 |
+
"min": 1500.0,
|
476 |
+
"max": 2250.0
|
477 |
+
},
|
478 |
+
{
|
479 |
+
"current": 2465.162,
|
480 |
+
"min": 1500.0,
|
481 |
+
"max": 2250.0
|
482 |
+
},
|
483 |
+
{
|
484 |
+
"current": 2450.373,
|
485 |
+
"min": 1500.0,
|
486 |
+
"max": 2250.0
|
487 |
+
},
|
488 |
+
{
|
489 |
+
"current": 1962.985,
|
490 |
+
"min": 1500.0,
|
491 |
+
"max": 2250.0
|
492 |
+
},
|
493 |
+
{
|
494 |
+
"current": 2409.641,
|
495 |
+
"min": 1500.0,
|
496 |
+
"max": 2250.0
|
497 |
+
},
|
498 |
+
{
|
499 |
+
"current": 2471.639,
|
500 |
+
"min": 1500.0,
|
501 |
+
"max": 2250.0
|
502 |
+
},
|
503 |
+
{
|
504 |
+
"current": 2460.459,
|
505 |
+
"min": 1500.0,
|
506 |
+
"max": 2250.0
|
507 |
+
},
|
508 |
+
{
|
509 |
+
"current": 2464.668,
|
510 |
+
"min": 1500.0,
|
511 |
+
"max": 2250.0
|
512 |
+
},
|
513 |
+
{
|
514 |
+
"current": 2475.763,
|
515 |
+
"min": 1500.0,
|
516 |
+
"max": 2250.0
|
517 |
+
},
|
518 |
+
{
|
519 |
+
"current": 2467.527,
|
520 |
+
"min": 1500.0,
|
521 |
+
"max": 2250.0
|
522 |
+
},
|
523 |
+
{
|
524 |
+
"current": 2201.123,
|
525 |
+
"min": 1500.0,
|
526 |
+
"max": 2250.0
|
527 |
+
},
|
528 |
+
{
|
529 |
+
"current": 1895.971,
|
530 |
+
"min": 1500.0,
|
531 |
+
"max": 2250.0
|
532 |
+
},
|
533 |
+
{
|
534 |
+
"current": 2443.965,
|
535 |
+
"min": 1500.0,
|
536 |
+
"max": 2250.0
|
537 |
+
},
|
538 |
+
{
|
539 |
+
"current": 2476.288,
|
540 |
+
"min": 1500.0,
|
541 |
+
"max": 2250.0
|
542 |
+
},
|
543 |
+
{
|
544 |
+
"current": 3288.307,
|
545 |
+
"min": 1500.0,
|
546 |
+
"max": 2250.0
|
547 |
+
},
|
548 |
+
{
|
549 |
+
"current": 2493.81,
|
550 |
+
"min": 1500.0,
|
551 |
+
"max": 2250.0
|
552 |
+
},
|
553 |
+
{
|
554 |
+
"current": 2565.406,
|
555 |
+
"min": 1500.0,
|
556 |
+
"max": 2250.0
|
557 |
+
},
|
558 |
+
{
|
559 |
+
"current": 2039.03,
|
560 |
+
"min": 1500.0,
|
561 |
+
"max": 2250.0
|
562 |
+
},
|
563 |
+
{
|
564 |
+
"current": 2453.201,
|
565 |
+
"min": 1500.0,
|
566 |
+
"max": 2250.0
|
567 |
+
},
|
568 |
+
{
|
569 |
+
"current": 2474.673,
|
570 |
+
"min": 1500.0,
|
571 |
+
"max": 2250.0
|
572 |
+
},
|
573 |
+
{
|
574 |
+
"current": 2440.26,
|
575 |
+
"min": 1500.0,
|
576 |
+
"max": 2250.0
|
577 |
+
},
|
578 |
+
{
|
579 |
+
"current": 2459.283,
|
580 |
+
"min": 1500.0,
|
581 |
+
"max": 2250.0
|
582 |
+
},
|
583 |
+
{
|
584 |
+
"current": 1695.187,
|
585 |
+
"min": 1500.0,
|
586 |
+
"max": 2250.0
|
587 |
+
},
|
588 |
+
{
|
589 |
+
"current": 1685.3,
|
590 |
+
"min": 1500.0,
|
591 |
+
"max": 2250.0
|
592 |
+
},
|
593 |
+
{
|
594 |
+
"current": 1676.674,
|
595 |
+
"min": 1500.0,
|
596 |
+
"max": 2250.0
|
597 |
+
},
|
598 |
+
{
|
599 |
+
"current": 3348.729,
|
600 |
+
"min": 1500.0,
|
601 |
+
"max": 2250.0
|
602 |
+
},
|
603 |
+
{
|
604 |
+
"current": 1794.959,
|
605 |
+
"min": 1500.0,
|
606 |
+
"max": 2250.0
|
607 |
+
},
|
608 |
+
{
|
609 |
+
"current": 1795.686,
|
610 |
+
"min": 1500.0,
|
611 |
+
"max": 2250.0
|
612 |
+
},
|
613 |
+
{
|
614 |
+
"current": 1793.511,
|
615 |
+
"min": 1500.0,
|
616 |
+
"max": 2250.0
|
617 |
+
},
|
618 |
+
{
|
619 |
+
"current": 1793.74,
|
620 |
+
"min": 1500.0,
|
621 |
+
"max": 2250.0
|
622 |
+
},
|
623 |
+
{
|
624 |
+
"current": 1549.158,
|
625 |
+
"min": 1500.0,
|
626 |
+
"max": 2250.0
|
627 |
+
},
|
628 |
+
{
|
629 |
+
"current": 1835.582,
|
630 |
+
"min": 1500.0,
|
631 |
+
"max": 2250.0
|
632 |
+
},
|
633 |
+
{
|
634 |
+
"current": 1728.186,
|
635 |
+
"min": 1500.0,
|
636 |
+
"max": 2250.0
|
637 |
+
},
|
638 |
+
{
|
639 |
+
"current": 1728.447,
|
640 |
+
"min": 1500.0,
|
641 |
+
"max": 2250.0
|
642 |
+
},
|
643 |
+
{
|
644 |
+
"current": 1693.83,
|
645 |
+
"min": 1500.0,
|
646 |
+
"max": 2250.0
|
647 |
+
},
|
648 |
+
{
|
649 |
+
"current": 3377.374,
|
650 |
+
"min": 1500.0,
|
651 |
+
"max": 2250.0
|
652 |
+
},
|
653 |
+
{
|
654 |
+
"current": 1693.537,
|
655 |
+
"min": 1500.0,
|
656 |
+
"max": 2250.0
|
657 |
+
},
|
658 |
+
{
|
659 |
+
"current": 1739.397,
|
660 |
+
"min": 1500.0,
|
661 |
+
"max": 2250.0
|
662 |
+
},
|
663 |
+
{
|
664 |
+
"current": 1695.165,
|
665 |
+
"min": 1500.0,
|
666 |
+
"max": 2250.0
|
667 |
+
},
|
668 |
+
{
|
669 |
+
"current": 1693.65,
|
670 |
+
"min": 1500.0,
|
671 |
+
"max": 2250.0
|
672 |
+
},
|
673 |
+
{
|
674 |
+
"current": 1695.685,
|
675 |
+
"min": 1500.0,
|
676 |
+
"max": 2250.0
|
677 |
+
},
|
678 |
+
{
|
679 |
+
"current": 3401.224,
|
680 |
+
"min": 1500.0,
|
681 |
+
"max": 2250.0
|
682 |
+
},
|
683 |
+
{
|
684 |
+
"current": 1695.872,
|
685 |
+
"min": 1500.0,
|
686 |
+
"max": 2250.0
|
687 |
+
},
|
688 |
+
{
|
689 |
+
"current": 1695.855,
|
690 |
+
"min": 1500.0,
|
691 |
+
"max": 2250.0
|
692 |
+
},
|
693 |
+
{
|
694 |
+
"current": 3330.83,
|
695 |
+
"min": 1500.0,
|
696 |
+
"max": 2250.0
|
697 |
+
},
|
698 |
+
{
|
699 |
+
"current": 1694.199,
|
700 |
+
"min": 1500.0,
|
701 |
+
"max": 2250.0
|
702 |
+
}
|
703 |
+
],
|
704 |
+
"disk": {
|
705 |
+
"/": {
|
706 |
+
"total": 1757.8785285949707,
|
707 |
+
"used": 1497.0357131958008
|
708 |
+
}
|
709 |
+
},
|
710 |
+
"gpu": "NVIDIA A100-SXM4-80GB",
|
711 |
+
"gpu_count": 5,
|
712 |
+
"gpu_devices": [
|
713 |
+
{
|
714 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
715 |
+
"memory_total": 85899345920
|
716 |
+
},
|
717 |
+
{
|
718 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
719 |
+
"memory_total": 85899345920
|
720 |
+
},
|
721 |
+
{
|
722 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
723 |
+
"memory_total": 85899345920
|
724 |
+
},
|
725 |
+
{
|
726 |
+
"name": "NVIDIA DGX Display",
|
727 |
+
"memory_total": 4294967296
|
728 |
+
},
|
729 |
+
{
|
730 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
731 |
+
"memory_total": 85899345920
|
732 |
+
}
|
733 |
+
],
|
734 |
+
"memory": {
|
735 |
+
"total": 503.5396919250488
|
736 |
+
}
|
737 |
+
}
|
wandb/run-20240327_125828-wlmqyk6v/files/wandb-summary.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"train/loss": 0.1298, "train/grad_norm": 1.4711946249008179, "train/learning_rate": 8.900000000000001e-05, "train/epoch": 4.5, "train/global_step": 1000, "_timestamp": 1711544401.6963835, "_runtime": 3692.95591545105, "_step": 39, "_wandb": {"runtime": 3710}}
|
wandb/run-20240327_125828-wlmqyk6v/logs/debug-internal.log
ADDED
The diff for this file is too large to render.
See raw diff
|
|
wandb/run-20240327_125828-wlmqyk6v/logs/debug.log
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_setup.py:_flush():76] Current SDK version is 0.16.2
|
2 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_setup.py:_flush():76] Configure stats pid to 1389604
|
3 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/.config/wandb/settings
|
4 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/distil-large-v3-hi-ft/wandb/settings
|
5 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
|
6 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
|
7 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program_relpath': 'run_speech_recognition_seq2seq.py', 'program_abspath': '/home/sanchit/distil-large-v3-hi-ft/run_speech_recognition_seq2seq.py', 'program': 'run_speech_recognition_seq2seq.py'}
|
8 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_init.py:_log_setup():526] Logging user logs to /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125828-wlmqyk6v/logs/debug.log
|
9 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_init.py:_log_setup():527] Logging internal logs to /home/sanchit/distil-large-v3-hi-ft/wandb/run-20240327_125828-wlmqyk6v/logs/debug-internal.log
|
10 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_init.py:init():566] calling init triggers
|
11 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_init.py:init():573] wandb.init called with sweep_config: {}
|
12 |
+
config: {}
|
13 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_init.py:init():616] starting backend
|
14 |
+
2024-03-27 12:58:28,738 INFO MainThread:1389604 [wandb_init.py:init():620] setting up manager
|
15 |
+
2024-03-27 12:58:28,739 INFO MainThread:1389604 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
|
16 |
+
2024-03-27 12:58:28,740 INFO MainThread:1389604 [wandb_init.py:init():628] backend started and connected
|
17 |
+
2024-03-27 12:58:28,743 INFO MainThread:1389604 [wandb_init.py:init():720] updated telemetry
|
18 |
+
2024-03-27 12:58:28,815 INFO MainThread:1389604 [wandb_init.py:init():753] communicating run to backend with 90.0 second timeout
|
19 |
+
2024-03-27 12:58:29,137 INFO MainThread:1389604 [wandb_run.py:_on_init():2254] communicating current version
|
20 |
+
2024-03-27 12:58:29,161 INFO MainThread:1389604 [wandb_run.py:_on_init():2263] got version response upgrade_message: "wandb version 0.16.5 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
|
21 |
+
|
22 |
+
2024-03-27 12:58:29,161 INFO MainThread:1389604 [wandb_init.py:init():804] starting run threads in backend
|
23 |
+
2024-03-27 12:58:29,259 INFO MainThread:1389604 [wandb_run.py:_console_start():2233] atexit reg
|
24 |
+
2024-03-27 12:58:29,259 INFO MainThread:1389604 [wandb_run.py:_redirect():2088] redirect: wrap_raw
|
25 |
+
2024-03-27 12:58:29,259 INFO MainThread:1389604 [wandb_run.py:_redirect():2153] Wrapping output streams.
|
26 |
+
2024-03-27 12:58:29,259 INFO MainThread:1389604 [wandb_run.py:_redirect():2178] Redirects installed.
|
27 |
+
2024-03-27 12:58:29,259 INFO MainThread:1389604 [wandb_init.py:init():847] run started, returning control to user process
|
28 |
+
2024-03-27 12:58:29,261 INFO MainThread:1389604 [wandb_run.py:_config_callback():1342] config_cb None None {'vocab_size': 51866, 'num_mel_bins': 128, 'd_model': 1280, 'encoder_layers': 32, 'encoder_attention_heads': 20, 'decoder_layers': 2, 'decoder_attention_heads': 20, 'decoder_ffn_dim': 5120, 'encoder_ffn_dim': 5120, 'dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.0, 'activation_function': 'gelu', 'init_std': 0.02, 'encoder_layerdrop': 0.0, 'decoder_layerdrop': 0.0, 'use_cache': True, 'num_hidden_layers': 32, 'scale_embedding': False, 'max_source_positions': 1500, 'max_target_positions': 448, 'classifier_proj_size': 256, 'use_weighted_layer_sum': False, 'apply_spec_augment': False, 'mask_time_prob': 0.05, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.0, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'median_filter_width': 7, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': True, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 448, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': [220, 50257], 'architectures': ['WhisperForConditionalGeneration'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 50257, 'pad_token_id': 50256, 'eos_token_id': 50257, 'sep_token_id': None, 'decoder_start_token_id': 50258, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'distil-whisper/distil-large-v3', 'transformers_version': '4.40.0.dev0', 'model_type': 'whisper', 'forced_decoder_ids': None, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 32, 'per_device_eval_batch_size': 32, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 3.0, 'max_steps': 5000, 'lr_scheduler_type': 'linear', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.0, 'warmup_steps': 500, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': './runs/Mar27_12-58-17_hf-dgx-01', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 25, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 1000, 'save_total_limit': None, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': 1000, 'dataloader_num_workers': 4, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'adamw_torch', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'input_length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None, 'sortish_sampler': False, 'predict_with_generate': True, 'generation_max_length': 225, 'generation_num_beams': None, 'generation_config': None}
|
29 |
+
2024-03-27 14:00:26,596 WARNING MsgRouterThr:1389604 [router.py:message_loop():77] message_loop has been closed
|
wandb/run-20240327_125828-wlmqyk6v/run-wlmqyk6v.wandb
ADDED
Binary file (863 kB). View file
|
|
wandb/run-20240327_141031-aoxf8fxn/files/config.yaml
ADDED
@@ -0,0 +1,751 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
wandb_version: 1
|
2 |
+
|
3 |
+
_wandb:
|
4 |
+
desc: null
|
5 |
+
value:
|
6 |
+
python_version: 3.8.10
|
7 |
+
cli_version: 0.16.2
|
8 |
+
framework: huggingface
|
9 |
+
huggingface_version: 4.40.0.dev0
|
10 |
+
is_jupyter_run: false
|
11 |
+
is_kaggle_kernel: false
|
12 |
+
start_time: 1711545031.570969
|
13 |
+
t:
|
14 |
+
1:
|
15 |
+
- 1
|
16 |
+
- 2
|
17 |
+
- 3
|
18 |
+
- 5
|
19 |
+
- 11
|
20 |
+
- 12
|
21 |
+
- 49
|
22 |
+
- 51
|
23 |
+
- 53
|
24 |
+
- 55
|
25 |
+
- 71
|
26 |
+
- 98
|
27 |
+
- 100
|
28 |
+
2:
|
29 |
+
- 1
|
30 |
+
- 2
|
31 |
+
- 3
|
32 |
+
- 5
|
33 |
+
- 11
|
34 |
+
- 12
|
35 |
+
- 49
|
36 |
+
- 51
|
37 |
+
- 53
|
38 |
+
- 55
|
39 |
+
- 71
|
40 |
+
- 98
|
41 |
+
- 100
|
42 |
+
3:
|
43 |
+
- 7
|
44 |
+
- 23
|
45 |
+
4: 3.8.10
|
46 |
+
5: 0.16.2
|
47 |
+
6: 4.40.0.dev0
|
48 |
+
8:
|
49 |
+
- 5
|
50 |
+
9:
|
51 |
+
1: transformers_trainer
|
52 |
+
13: linux-x86_64
|
53 |
+
m:
|
54 |
+
- 1: train/global_step
|
55 |
+
6:
|
56 |
+
- 3
|
57 |
+
- 1: train/loss
|
58 |
+
5: 1
|
59 |
+
6:
|
60 |
+
- 1
|
61 |
+
- 1: train/grad_norm
|
62 |
+
5: 1
|
63 |
+
6:
|
64 |
+
- 1
|
65 |
+
- 1: train/learning_rate
|
66 |
+
5: 1
|
67 |
+
6:
|
68 |
+
- 1
|
69 |
+
- 1: train/epoch
|
70 |
+
5: 1
|
71 |
+
6:
|
72 |
+
- 1
|
73 |
+
- 1: eval/loss
|
74 |
+
5: 1
|
75 |
+
6:
|
76 |
+
- 1
|
77 |
+
- 1: eval/wer
|
78 |
+
5: 1
|
79 |
+
6:
|
80 |
+
- 1
|
81 |
+
- 1: eval/runtime
|
82 |
+
5: 1
|
83 |
+
6:
|
84 |
+
- 1
|
85 |
+
- 1: eval/samples_per_second
|
86 |
+
5: 1
|
87 |
+
6:
|
88 |
+
- 1
|
89 |
+
- 1: eval/steps_per_second
|
90 |
+
5: 1
|
91 |
+
6:
|
92 |
+
- 1
|
93 |
+
vocab_size:
|
94 |
+
desc: null
|
95 |
+
value: 51866
|
96 |
+
num_mel_bins:
|
97 |
+
desc: null
|
98 |
+
value: 128
|
99 |
+
d_model:
|
100 |
+
desc: null
|
101 |
+
value: 1280
|
102 |
+
encoder_layers:
|
103 |
+
desc: null
|
104 |
+
value: 32
|
105 |
+
encoder_attention_heads:
|
106 |
+
desc: null
|
107 |
+
value: 20
|
108 |
+
decoder_layers:
|
109 |
+
desc: null
|
110 |
+
value: 2
|
111 |
+
decoder_attention_heads:
|
112 |
+
desc: null
|
113 |
+
value: 20
|
114 |
+
decoder_ffn_dim:
|
115 |
+
desc: null
|
116 |
+
value: 5120
|
117 |
+
encoder_ffn_dim:
|
118 |
+
desc: null
|
119 |
+
value: 5120
|
120 |
+
dropout:
|
121 |
+
desc: null
|
122 |
+
value: 0.0
|
123 |
+
attention_dropout:
|
124 |
+
desc: null
|
125 |
+
value: 0.0
|
126 |
+
activation_dropout:
|
127 |
+
desc: null
|
128 |
+
value: 0.0
|
129 |
+
activation_function:
|
130 |
+
desc: null
|
131 |
+
value: gelu
|
132 |
+
init_std:
|
133 |
+
desc: null
|
134 |
+
value: 0.02
|
135 |
+
encoder_layerdrop:
|
136 |
+
desc: null
|
137 |
+
value: 0.0
|
138 |
+
decoder_layerdrop:
|
139 |
+
desc: null
|
140 |
+
value: 0.0
|
141 |
+
use_cache:
|
142 |
+
desc: null
|
143 |
+
value: true
|
144 |
+
num_hidden_layers:
|
145 |
+
desc: null
|
146 |
+
value: 32
|
147 |
+
scale_embedding:
|
148 |
+
desc: null
|
149 |
+
value: false
|
150 |
+
max_source_positions:
|
151 |
+
desc: null
|
152 |
+
value: 1500
|
153 |
+
max_target_positions:
|
154 |
+
desc: null
|
155 |
+
value: 448
|
156 |
+
classifier_proj_size:
|
157 |
+
desc: null
|
158 |
+
value: 256
|
159 |
+
use_weighted_layer_sum:
|
160 |
+
desc: null
|
161 |
+
value: false
|
162 |
+
apply_spec_augment:
|
163 |
+
desc: null
|
164 |
+
value: false
|
165 |
+
mask_time_prob:
|
166 |
+
desc: null
|
167 |
+
value: 0.05
|
168 |
+
mask_time_length:
|
169 |
+
desc: null
|
170 |
+
value: 10
|
171 |
+
mask_time_min_masks:
|
172 |
+
desc: null
|
173 |
+
value: 2
|
174 |
+
mask_feature_prob:
|
175 |
+
desc: null
|
176 |
+
value: 0.0
|
177 |
+
mask_feature_length:
|
178 |
+
desc: null
|
179 |
+
value: 10
|
180 |
+
mask_feature_min_masks:
|
181 |
+
desc: null
|
182 |
+
value: 0
|
183 |
+
median_filter_width:
|
184 |
+
desc: null
|
185 |
+
value: 7
|
186 |
+
return_dict:
|
187 |
+
desc: null
|
188 |
+
value: true
|
189 |
+
output_hidden_states:
|
190 |
+
desc: null
|
191 |
+
value: false
|
192 |
+
output_attentions:
|
193 |
+
desc: null
|
194 |
+
value: false
|
195 |
+
torchscript:
|
196 |
+
desc: null
|
197 |
+
value: false
|
198 |
+
torch_dtype:
|
199 |
+
desc: null
|
200 |
+
value: float16
|
201 |
+
use_bfloat16:
|
202 |
+
desc: null
|
203 |
+
value: false
|
204 |
+
tf_legacy_loss:
|
205 |
+
desc: null
|
206 |
+
value: false
|
207 |
+
pruned_heads:
|
208 |
+
desc: null
|
209 |
+
value: {}
|
210 |
+
tie_word_embeddings:
|
211 |
+
desc: null
|
212 |
+
value: true
|
213 |
+
chunk_size_feed_forward:
|
214 |
+
desc: null
|
215 |
+
value: 0
|
216 |
+
is_encoder_decoder:
|
217 |
+
desc: null
|
218 |
+
value: true
|
219 |
+
is_decoder:
|
220 |
+
desc: null
|
221 |
+
value: false
|
222 |
+
cross_attention_hidden_size:
|
223 |
+
desc: null
|
224 |
+
value: null
|
225 |
+
add_cross_attention:
|
226 |
+
desc: null
|
227 |
+
value: false
|
228 |
+
tie_encoder_decoder:
|
229 |
+
desc: null
|
230 |
+
value: false
|
231 |
+
max_length:
|
232 |
+
desc: null
|
233 |
+
value: 448
|
234 |
+
min_length:
|
235 |
+
desc: null
|
236 |
+
value: 0
|
237 |
+
do_sample:
|
238 |
+
desc: null
|
239 |
+
value: false
|
240 |
+
early_stopping:
|
241 |
+
desc: null
|
242 |
+
value: false
|
243 |
+
num_beams:
|
244 |
+
desc: null
|
245 |
+
value: 1
|
246 |
+
num_beam_groups:
|
247 |
+
desc: null
|
248 |
+
value: 1
|
249 |
+
diversity_penalty:
|
250 |
+
desc: null
|
251 |
+
value: 0.0
|
252 |
+
temperature:
|
253 |
+
desc: null
|
254 |
+
value: 1.0
|
255 |
+
top_k:
|
256 |
+
desc: null
|
257 |
+
value: 50
|
258 |
+
top_p:
|
259 |
+
desc: null
|
260 |
+
value: 1.0
|
261 |
+
typical_p:
|
262 |
+
desc: null
|
263 |
+
value: 1.0
|
264 |
+
repetition_penalty:
|
265 |
+
desc: null
|
266 |
+
value: 1.0
|
267 |
+
length_penalty:
|
268 |
+
desc: null
|
269 |
+
value: 1.0
|
270 |
+
no_repeat_ngram_size:
|
271 |
+
desc: null
|
272 |
+
value: 0
|
273 |
+
encoder_no_repeat_ngram_size:
|
274 |
+
desc: null
|
275 |
+
value: 0
|
276 |
+
bad_words_ids:
|
277 |
+
desc: null
|
278 |
+
value: null
|
279 |
+
num_return_sequences:
|
280 |
+
desc: null
|
281 |
+
value: 1
|
282 |
+
output_scores:
|
283 |
+
desc: null
|
284 |
+
value: false
|
285 |
+
return_dict_in_generate:
|
286 |
+
desc: null
|
287 |
+
value: false
|
288 |
+
forced_bos_token_id:
|
289 |
+
desc: null
|
290 |
+
value: null
|
291 |
+
forced_eos_token_id:
|
292 |
+
desc: null
|
293 |
+
value: null
|
294 |
+
remove_invalid_values:
|
295 |
+
desc: null
|
296 |
+
value: false
|
297 |
+
exponential_decay_length_penalty:
|
298 |
+
desc: null
|
299 |
+
value: null
|
300 |
+
suppress_tokens:
|
301 |
+
desc: null
|
302 |
+
value: null
|
303 |
+
begin_suppress_tokens:
|
304 |
+
desc: null
|
305 |
+
value:
|
306 |
+
- 220
|
307 |
+
- 50257
|
308 |
+
architectures:
|
309 |
+
desc: null
|
310 |
+
value:
|
311 |
+
- WhisperForConditionalGeneration
|
312 |
+
finetuning_task:
|
313 |
+
desc: null
|
314 |
+
value: null
|
315 |
+
id2label:
|
316 |
+
desc: null
|
317 |
+
value:
|
318 |
+
'0': LABEL_0
|
319 |
+
'1': LABEL_1
|
320 |
+
label2id:
|
321 |
+
desc: null
|
322 |
+
value:
|
323 |
+
LABEL_0: 0
|
324 |
+
LABEL_1: 1
|
325 |
+
tokenizer_class:
|
326 |
+
desc: null
|
327 |
+
value: null
|
328 |
+
prefix:
|
329 |
+
desc: null
|
330 |
+
value: null
|
331 |
+
bos_token_id:
|
332 |
+
desc: null
|
333 |
+
value: 50257
|
334 |
+
pad_token_id:
|
335 |
+
desc: null
|
336 |
+
value: 50256
|
337 |
+
eos_token_id:
|
338 |
+
desc: null
|
339 |
+
value: 50257
|
340 |
+
sep_token_id:
|
341 |
+
desc: null
|
342 |
+
value: null
|
343 |
+
decoder_start_token_id:
|
344 |
+
desc: null
|
345 |
+
value: 50258
|
346 |
+
task_specific_params:
|
347 |
+
desc: null
|
348 |
+
value: null
|
349 |
+
problem_type:
|
350 |
+
desc: null
|
351 |
+
value: null
|
352 |
+
_name_or_path:
|
353 |
+
desc: null
|
354 |
+
value: distil-whisper/distil-large-v3
|
355 |
+
transformers_version:
|
356 |
+
desc: null
|
357 |
+
value: 4.40.0.dev0
|
358 |
+
model_type:
|
359 |
+
desc: null
|
360 |
+
value: whisper
|
361 |
+
forced_decoder_ids:
|
362 |
+
desc: null
|
363 |
+
value: null
|
364 |
+
output_dir:
|
365 |
+
desc: null
|
366 |
+
value: ./
|
367 |
+
overwrite_output_dir:
|
368 |
+
desc: null
|
369 |
+
value: true
|
370 |
+
do_train:
|
371 |
+
desc: null
|
372 |
+
value: true
|
373 |
+
do_eval:
|
374 |
+
desc: null
|
375 |
+
value: true
|
376 |
+
do_predict:
|
377 |
+
desc: null
|
378 |
+
value: false
|
379 |
+
evaluation_strategy:
|
380 |
+
desc: null
|
381 |
+
value: steps
|
382 |
+
prediction_loss_only:
|
383 |
+
desc: null
|
384 |
+
value: false
|
385 |
+
per_device_train_batch_size:
|
386 |
+
desc: null
|
387 |
+
value: 32
|
388 |
+
per_device_eval_batch_size:
|
389 |
+
desc: null
|
390 |
+
value: 32
|
391 |
+
per_gpu_train_batch_size:
|
392 |
+
desc: null
|
393 |
+
value: null
|
394 |
+
per_gpu_eval_batch_size:
|
395 |
+
desc: null
|
396 |
+
value: null
|
397 |
+
gradient_accumulation_steps:
|
398 |
+
desc: null
|
399 |
+
value: 1
|
400 |
+
eval_accumulation_steps:
|
401 |
+
desc: null
|
402 |
+
value: null
|
403 |
+
eval_delay:
|
404 |
+
desc: null
|
405 |
+
value: 0
|
406 |
+
learning_rate:
|
407 |
+
desc: null
|
408 |
+
value: 0.0001
|
409 |
+
weight_decay:
|
410 |
+
desc: null
|
411 |
+
value: 0.0
|
412 |
+
adam_beta1:
|
413 |
+
desc: null
|
414 |
+
value: 0.9
|
415 |
+
adam_beta2:
|
416 |
+
desc: null
|
417 |
+
value: 0.999
|
418 |
+
adam_epsilon:
|
419 |
+
desc: null
|
420 |
+
value: 1.0e-08
|
421 |
+
max_grad_norm:
|
422 |
+
desc: null
|
423 |
+
value: 1.0
|
424 |
+
num_train_epochs:
|
425 |
+
desc: null
|
426 |
+
value: 3.0
|
427 |
+
max_steps:
|
428 |
+
desc: null
|
429 |
+
value: 5000
|
430 |
+
lr_scheduler_type:
|
431 |
+
desc: null
|
432 |
+
value: linear
|
433 |
+
lr_scheduler_kwargs:
|
434 |
+
desc: null
|
435 |
+
value: {}
|
436 |
+
warmup_ratio:
|
437 |
+
desc: null
|
438 |
+
value: 0.0
|
439 |
+
warmup_steps:
|
440 |
+
desc: null
|
441 |
+
value: 500
|
442 |
+
log_level:
|
443 |
+
desc: null
|
444 |
+
value: passive
|
445 |
+
log_level_replica:
|
446 |
+
desc: null
|
447 |
+
value: warning
|
448 |
+
log_on_each_node:
|
449 |
+
desc: null
|
450 |
+
value: true
|
451 |
+
logging_dir:
|
452 |
+
desc: null
|
453 |
+
value: ./runs/Mar27_14-10-21_hf-dgx-01
|
454 |
+
logging_strategy:
|
455 |
+
desc: null
|
456 |
+
value: steps
|
457 |
+
logging_first_step:
|
458 |
+
desc: null
|
459 |
+
value: false
|
460 |
+
logging_steps:
|
461 |
+
desc: null
|
462 |
+
value: 25
|
463 |
+
logging_nan_inf_filter:
|
464 |
+
desc: null
|
465 |
+
value: true
|
466 |
+
save_strategy:
|
467 |
+
desc: null
|
468 |
+
value: steps
|
469 |
+
save_steps:
|
470 |
+
desc: null
|
471 |
+
value: 1000
|
472 |
+
save_total_limit:
|
473 |
+
desc: null
|
474 |
+
value: null
|
475 |
+
save_safetensors:
|
476 |
+
desc: null
|
477 |
+
value: true
|
478 |
+
save_on_each_node:
|
479 |
+
desc: null
|
480 |
+
value: false
|
481 |
+
save_only_model:
|
482 |
+
desc: null
|
483 |
+
value: false
|
484 |
+
no_cuda:
|
485 |
+
desc: null
|
486 |
+
value: false
|
487 |
+
use_cpu:
|
488 |
+
desc: null
|
489 |
+
value: false
|
490 |
+
use_mps_device:
|
491 |
+
desc: null
|
492 |
+
value: false
|
493 |
+
seed:
|
494 |
+
desc: null
|
495 |
+
value: 42
|
496 |
+
data_seed:
|
497 |
+
desc: null
|
498 |
+
value: null
|
499 |
+
jit_mode_eval:
|
500 |
+
desc: null
|
501 |
+
value: false
|
502 |
+
use_ipex:
|
503 |
+
desc: null
|
504 |
+
value: false
|
505 |
+
bf16:
|
506 |
+
desc: null
|
507 |
+
value: false
|
508 |
+
fp16:
|
509 |
+
desc: null
|
510 |
+
value: true
|
511 |
+
fp16_opt_level:
|
512 |
+
desc: null
|
513 |
+
value: O1
|
514 |
+
half_precision_backend:
|
515 |
+
desc: null
|
516 |
+
value: auto
|
517 |
+
bf16_full_eval:
|
518 |
+
desc: null
|
519 |
+
value: false
|
520 |
+
fp16_full_eval:
|
521 |
+
desc: null
|
522 |
+
value: false
|
523 |
+
tf32:
|
524 |
+
desc: null
|
525 |
+
value: null
|
526 |
+
local_rank:
|
527 |
+
desc: null
|
528 |
+
value: 0
|
529 |
+
ddp_backend:
|
530 |
+
desc: null
|
531 |
+
value: null
|
532 |
+
tpu_num_cores:
|
533 |
+
desc: null
|
534 |
+
value: null
|
535 |
+
tpu_metrics_debug:
|
536 |
+
desc: null
|
537 |
+
value: false
|
538 |
+
debug:
|
539 |
+
desc: null
|
540 |
+
value: []
|
541 |
+
dataloader_drop_last:
|
542 |
+
desc: null
|
543 |
+
value: false
|
544 |
+
eval_steps:
|
545 |
+
desc: null
|
546 |
+
value: 1000
|
547 |
+
dataloader_num_workers:
|
548 |
+
desc: null
|
549 |
+
value: 4
|
550 |
+
dataloader_prefetch_factor:
|
551 |
+
desc: null
|
552 |
+
value: null
|
553 |
+
past_index:
|
554 |
+
desc: null
|
555 |
+
value: -1
|
556 |
+
run_name:
|
557 |
+
desc: null
|
558 |
+
value: ./
|
559 |
+
disable_tqdm:
|
560 |
+
desc: null
|
561 |
+
value: false
|
562 |
+
remove_unused_columns:
|
563 |
+
desc: null
|
564 |
+
value: true
|
565 |
+
label_names:
|
566 |
+
desc: null
|
567 |
+
value: null
|
568 |
+
load_best_model_at_end:
|
569 |
+
desc: null
|
570 |
+
value: false
|
571 |
+
metric_for_best_model:
|
572 |
+
desc: null
|
573 |
+
value: null
|
574 |
+
greater_is_better:
|
575 |
+
desc: null
|
576 |
+
value: null
|
577 |
+
ignore_data_skip:
|
578 |
+
desc: null
|
579 |
+
value: false
|
580 |
+
fsdp:
|
581 |
+
desc: null
|
582 |
+
value: []
|
583 |
+
fsdp_min_num_params:
|
584 |
+
desc: null
|
585 |
+
value: 0
|
586 |
+
fsdp_config:
|
587 |
+
desc: null
|
588 |
+
value:
|
589 |
+
min_num_params: 0
|
590 |
+
xla: false
|
591 |
+
xla_fsdp_v2: false
|
592 |
+
xla_fsdp_grad_ckpt: false
|
593 |
+
fsdp_transformer_layer_cls_to_wrap:
|
594 |
+
desc: null
|
595 |
+
value: null
|
596 |
+
accelerator_config:
|
597 |
+
desc: null
|
598 |
+
value:
|
599 |
+
split_batches: false
|
600 |
+
dispatch_batches: null
|
601 |
+
even_batches: true
|
602 |
+
use_seedable_sampler: true
|
603 |
+
deepspeed:
|
604 |
+
desc: null
|
605 |
+
value: null
|
606 |
+
label_smoothing_factor:
|
607 |
+
desc: null
|
608 |
+
value: 0.0
|
609 |
+
optim:
|
610 |
+
desc: null
|
611 |
+
value: adamw_torch
|
612 |
+
optim_args:
|
613 |
+
desc: null
|
614 |
+
value: null
|
615 |
+
adafactor:
|
616 |
+
desc: null
|
617 |
+
value: false
|
618 |
+
group_by_length:
|
619 |
+
desc: null
|
620 |
+
value: false
|
621 |
+
length_column_name:
|
622 |
+
desc: null
|
623 |
+
value: input_length
|
624 |
+
report_to:
|
625 |
+
desc: null
|
626 |
+
value:
|
627 |
+
- tensorboard
|
628 |
+
- wandb
|
629 |
+
ddp_find_unused_parameters:
|
630 |
+
desc: null
|
631 |
+
value: null
|
632 |
+
ddp_bucket_cap_mb:
|
633 |
+
desc: null
|
634 |
+
value: null
|
635 |
+
ddp_broadcast_buffers:
|
636 |
+
desc: null
|
637 |
+
value: null
|
638 |
+
dataloader_pin_memory:
|
639 |
+
desc: null
|
640 |
+
value: true
|
641 |
+
dataloader_persistent_workers:
|
642 |
+
desc: null
|
643 |
+
value: false
|
644 |
+
skip_memory_metrics:
|
645 |
+
desc: null
|
646 |
+
value: true
|
647 |
+
use_legacy_prediction_loop:
|
648 |
+
desc: null
|
649 |
+
value: false
|
650 |
+
push_to_hub:
|
651 |
+
desc: null
|
652 |
+
value: true
|
653 |
+
resume_from_checkpoint:
|
654 |
+
desc: null
|
655 |
+
value: null
|
656 |
+
hub_model_id:
|
657 |
+
desc: null
|
658 |
+
value: null
|
659 |
+
hub_strategy:
|
660 |
+
desc: null
|
661 |
+
value: every_save
|
662 |
+
hub_token:
|
663 |
+
desc: null
|
664 |
+
value: <HUB_TOKEN>
|
665 |
+
hub_private_repo:
|
666 |
+
desc: null
|
667 |
+
value: false
|
668 |
+
hub_always_push:
|
669 |
+
desc: null
|
670 |
+
value: false
|
671 |
+
gradient_checkpointing:
|
672 |
+
desc: null
|
673 |
+
value: true
|
674 |
+
gradient_checkpointing_kwargs:
|
675 |
+
desc: null
|
676 |
+
value: null
|
677 |
+
include_inputs_for_metrics:
|
678 |
+
desc: null
|
679 |
+
value: false
|
680 |
+
fp16_backend:
|
681 |
+
desc: null
|
682 |
+
value: auto
|
683 |
+
push_to_hub_model_id:
|
684 |
+
desc: null
|
685 |
+
value: null
|
686 |
+
push_to_hub_organization:
|
687 |
+
desc: null
|
688 |
+
value: null
|
689 |
+
push_to_hub_token:
|
690 |
+
desc: null
|
691 |
+
value: <PUSH_TO_HUB_TOKEN>
|
692 |
+
mp_parameters:
|
693 |
+
desc: null
|
694 |
+
value: ''
|
695 |
+
auto_find_batch_size:
|
696 |
+
desc: null
|
697 |
+
value: false
|
698 |
+
full_determinism:
|
699 |
+
desc: null
|
700 |
+
value: false
|
701 |
+
torchdynamo:
|
702 |
+
desc: null
|
703 |
+
value: null
|
704 |
+
ray_scope:
|
705 |
+
desc: null
|
706 |
+
value: last
|
707 |
+
ddp_timeout:
|
708 |
+
desc: null
|
709 |
+
value: 1800
|
710 |
+
torch_compile:
|
711 |
+
desc: null
|
712 |
+
value: false
|
713 |
+
torch_compile_backend:
|
714 |
+
desc: null
|
715 |
+
value: null
|
716 |
+
torch_compile_mode:
|
717 |
+
desc: null
|
718 |
+
value: null
|
719 |
+
dispatch_batches:
|
720 |
+
desc: null
|
721 |
+
value: null
|
722 |
+
split_batches:
|
723 |
+
desc: null
|
724 |
+
value: null
|
725 |
+
include_tokens_per_second:
|
726 |
+
desc: null
|
727 |
+
value: false
|
728 |
+
include_num_input_tokens_seen:
|
729 |
+
desc: null
|
730 |
+
value: false
|
731 |
+
neftune_noise_alpha:
|
732 |
+
desc: null
|
733 |
+
value: null
|
734 |
+
optim_target_modules:
|
735 |
+
desc: null
|
736 |
+
value: null
|
737 |
+
sortish_sampler:
|
738 |
+
desc: null
|
739 |
+
value: false
|
740 |
+
predict_with_generate:
|
741 |
+
desc: null
|
742 |
+
value: true
|
743 |
+
generation_max_length:
|
744 |
+
desc: null
|
745 |
+
value: 225
|
746 |
+
generation_num_beams:
|
747 |
+
desc: null
|
748 |
+
value: null
|
749 |
+
generation_config:
|
750 |
+
desc: null
|
751 |
+
value: null
|
wandb/run-20240327_141031-aoxf8fxn/files/output.log
ADDED
@@ -0,0 +1,1170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
0%| | 0/5000 [00:00<?, ?it/s]/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/checkpoint.py:460: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
|
3 |
+
warnings.warn(
|
4 |
+
[WARNING|logging.py:329] 2024-03-27 14:10:46,121 >> `use_cache = True` is incompatible with gradient checkpointing. Setting `use_cache = False`...
|
5 |
+
|
6 |
+
|
7 |
+
|
8 |
+
|
9 |
+
|
10 |
+
|
11 |
+
|
12 |
+
|
13 |
+
|
14 |
+
|
15 |
+
|
16 |
+
|
17 |
+
|
18 |
+
|
19 |
+
|
20 |
+
|
21 |
+
|
22 |
+
|
23 |
+
|
24 |
+
|
25 |
+
|
26 |
+
|
27 |
+
|
28 |
+
|
29 |
+
0%|▍ | 25/5000 [02:39<8:21:19, 6.05s/it]
|
30 |
+
|
31 |
+
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
|
40 |
+
|
41 |
+
|
42 |
+
|
43 |
+
|
44 |
+
|
45 |
+
|
46 |
+
|
47 |
+
|
48 |
+
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
1%|▊ | 49/5000 [05:05<8:20:00, 6.06s/it]
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
+
|
62 |
+
|
63 |
+
|
64 |
+
|
65 |
+
|
66 |
+
|
67 |
+
|
68 |
+
|
69 |
+
|
70 |
+
|
71 |
+
|
72 |
+
|
73 |
+
|
74 |
+
|
75 |
+
|
76 |
+
|
77 |
+
|
78 |
+
|
79 |
+
|
80 |
+
|
81 |
+
2%|█▏ | 75/5000 [07:28<8:10:11, 5.97s/it]
|
82 |
+
|
83 |
+
|
84 |
+
|
85 |
+
|
86 |
+
|
87 |
+
|
88 |
+
|
89 |
+
|
90 |
+
|
91 |
+
|
92 |
+
|
93 |
+
|
94 |
+
|
95 |
+
|
96 |
+
|
97 |
+
|
98 |
+
|
99 |
+
|
100 |
+
|
101 |
+
|
102 |
+
|
103 |
+
|
104 |
+
|
105 |
+
|
106 |
+
|
107 |
+
2%|█▌ | 100/5000 [10:00<8:15:20, 6.07s/it]
|
108 |
+
|
109 |
+
|
110 |
+
|
111 |
+
|
112 |
+
|
113 |
+
|
114 |
+
|
115 |
+
|
116 |
+
|
117 |
+
|
118 |
+
|
119 |
+
|
120 |
+
|
121 |
+
|
122 |
+
|
123 |
+
|
124 |
+
|
125 |
+
|
126 |
+
|
127 |
+
|
128 |
+
|
129 |
+
|
130 |
+
|
131 |
+
|
132 |
+
|
133 |
+
2%|█▉ | 125/5000 [12:32<8:12:37, 6.06s/it]
|
134 |
+
|
135 |
+
|
136 |
+
|
137 |
+
|
138 |
+
|
139 |
+
|
140 |
+
|
141 |
+
|
142 |
+
|
143 |
+
|
144 |
+
|
145 |
+
|
146 |
+
|
147 |
+
|
148 |
+
|
149 |
+
|
150 |
+
|
151 |
+
|
152 |
+
|
153 |
+
|
154 |
+
|
155 |
+
|
156 |
+
|
157 |
+
|
158 |
+
3%|██▎ | 149/5000 [14:43<8:11:31, 6.08s/it]
|
159 |
+
|
160 |
+
|
161 |
+
|
162 |
+
|
163 |
+
|
164 |
+
|
165 |
+
|
166 |
+
|
167 |
+
|
168 |
+
|
169 |
+
|
170 |
+
|
171 |
+
|
172 |
+
|
173 |
+
|
174 |
+
|
175 |
+
|
176 |
+
|
177 |
+
|
178 |
+
|
179 |
+
|
180 |
+
|
181 |
+
|
182 |
+
|
183 |
+
|
184 |
+
|
185 |
+
4%|██▋ | 175/5000 [17:21<8:07:13, 6.06s/it]
|
186 |
+
|
187 |
+
|
188 |
+
|
189 |
+
|
190 |
+
|
191 |
+
|
192 |
+
|
193 |
+
|
194 |
+
|
195 |
+
|
196 |
+
|
197 |
+
|
198 |
+
|
199 |
+
|
200 |
+
|
201 |
+
|
202 |
+
|
203 |
+
|
204 |
+
|
205 |
+
|
206 |
+
|
207 |
+
|
208 |
+
|
209 |
+
|
210 |
+
4%|███ | 199/5000 [19:32<6:47:30, 5.09s/it]
|
211 |
+
|
212 |
+
|
213 |
+
|
214 |
+
|
215 |
+
|
216 |
+
|
217 |
+
|
218 |
+
|
219 |
+
|
220 |
+
|
221 |
+
|
222 |
+
|
223 |
+
|
224 |
+
|
225 |
+
|
226 |
+
|
227 |
+
|
228 |
+
|
229 |
+
|
230 |
+
|
231 |
+
|
232 |
+
|
233 |
+
|
234 |
+
|
235 |
+
|
236 |
+
|
237 |
+
4%|███▌ | 225/5000 [22:16<9:18:29, 7.02s/it]
|
238 |
+
|
239 |
+
|
240 |
+
|
241 |
+
|
242 |
+
|
243 |
+
|
244 |
+
|
245 |
+
|
246 |
+
|
247 |
+
|
248 |
+
|
249 |
+
|
250 |
+
|
251 |
+
|
252 |
+
|
253 |
+
|
254 |
+
|
255 |
+
|
256 |
+
|
257 |
+
|
258 |
+
|
259 |
+
|
260 |
+
|
261 |
+
|
262 |
+
|
263 |
+
5%|███▉ | 250/5000 [24:44<8:01:25, 6.08s/it]
|
264 |
+
|
265 |
+
|
266 |
+
|
267 |
+
|
268 |
+
|
269 |
+
|
270 |
+
|
271 |
+
|
272 |
+
|
273 |
+
|
274 |
+
|
275 |
+
|
276 |
+
|
277 |
+
|
278 |
+
|
279 |
+
|
280 |
+
|
281 |
+
|
282 |
+
|
283 |
+
|
284 |
+
|
285 |
+
|
286 |
+
|
287 |
+
|
288 |
+
|
289 |
+
6%|████▎ | 275/5000 [26:58<7:27:24, 5.68s/it]
|
290 |
+
|
291 |
+
|
292 |
+
|
293 |
+
|
294 |
+
|
295 |
+
|
296 |
+
|
297 |
+
|
298 |
+
|
299 |
+
|
300 |
+
|
301 |
+
|
302 |
+
|
303 |
+
|
304 |
+
|
305 |
+
|
306 |
+
|
307 |
+
|
308 |
+
|
309 |
+
|
310 |
+
|
311 |
+
|
312 |
+
|
313 |
+
|
314 |
+
|
315 |
+
6%|████▋ | 300/5000 [29:09<4:23:23, 3.36s/it]
|
316 |
+
|
317 |
+
|
318 |
+
|
319 |
+
|
320 |
+
|
321 |
+
|
322 |
+
|
323 |
+
|
324 |
+
|
325 |
+
|
326 |
+
|
327 |
+
|
328 |
+
|
329 |
+
|
330 |
+
|
331 |
+
|
332 |
+
|
333 |
+
|
334 |
+
|
335 |
+
|
336 |
+
|
337 |
+
|
338 |
+
|
339 |
+
|
340 |
+
|
341 |
+
6%|█████ | 325/5000 [31:26<7:29:10, 5.76s/it]
|
342 |
+
|
343 |
+
|
344 |
+
|
345 |
+
|
346 |
+
|
347 |
+
|
348 |
+
|
349 |
+
|
350 |
+
|
351 |
+
|
352 |
+
|
353 |
+
|
354 |
+
|
355 |
+
|
356 |
+
|
357 |
+
|
358 |
+
|
359 |
+
|
360 |
+
|
361 |
+
|
362 |
+
|
363 |
+
|
364 |
+
|
365 |
+
|
366 |
+
7%|█████▍ | 349/5000 [33:45<7:30:31, 5.81s/it]
|
367 |
+
|
368 |
+
|
369 |
+
|
370 |
+
|
371 |
+
|
372 |
+
|
373 |
+
|
374 |
+
|
375 |
+
|
376 |
+
|
377 |
+
|
378 |
+
|
379 |
+
|
380 |
+
|
381 |
+
|
382 |
+
|
383 |
+
|
384 |
+
|
385 |
+
|
386 |
+
|
387 |
+
|
388 |
+
|
389 |
+
|
390 |
+
|
391 |
+
|
392 |
+
|
393 |
+
8%|█████▊ | 375/5000 [36:16<7:28:38, 5.82s/it]
|
394 |
+
|
395 |
+
|
396 |
+
|
397 |
+
|
398 |
+
|
399 |
+
|
400 |
+
|
401 |
+
|
402 |
+
|
403 |
+
|
404 |
+
|
405 |
+
|
406 |
+
|
407 |
+
|
408 |
+
|
409 |
+
|
410 |
+
|
411 |
+
|
412 |
+
|
413 |
+
|
414 |
+
|
415 |
+
|
416 |
+
|
417 |
+
|
418 |
+
|
419 |
+
8%|██████▏ | 400/5000 [38:41<7:25:46, 5.81s/it]
|
420 |
+
|
421 |
+
|
422 |
+
|
423 |
+
|
424 |
+
|
425 |
+
|
426 |
+
|
427 |
+
|
428 |
+
|
429 |
+
|
430 |
+
|
431 |
+
|
432 |
+
|
433 |
+
|
434 |
+
|
435 |
+
|
436 |
+
|
437 |
+
|
438 |
+
|
439 |
+
|
440 |
+
|
441 |
+
|
442 |
+
|
443 |
+
|
444 |
+
|
445 |
+
8%|██████▋ | 425/5000 [41:05<7:22:15, 5.80s/it]
|
446 |
+
|
447 |
+
|
448 |
+
|
449 |
+
|
450 |
+
|
451 |
+
|
452 |
+
|
453 |
+
|
454 |
+
|
455 |
+
|
456 |
+
|
457 |
+
|
458 |
+
|
459 |
+
|
460 |
+
|
461 |
+
|
462 |
+
|
463 |
+
|
464 |
+
|
465 |
+
|
466 |
+
|
467 |
+
|
468 |
+
|
469 |
+
|
470 |
+
|
471 |
+
9%|███████ | 450/5000 [43:42<8:02:20, 6.36s/it]
|
472 |
+
|
473 |
+
|
474 |
+
|
475 |
+
|
476 |
+
|
477 |
+
|
478 |
+
|
479 |
+
|
480 |
+
|
481 |
+
|
482 |
+
|
483 |
+
|
484 |
+
|
485 |
+
|
486 |
+
|
487 |
+
|
488 |
+
|
489 |
+
|
490 |
+
|
491 |
+
|
492 |
+
|
493 |
+
|
494 |
+
|
495 |
+
|
496 |
+
|
497 |
+
10%|███████▍ | 475/5000 [46:06<7:11:45, 5.73s/it]
|
498 |
+
|
499 |
+
|
500 |
+
|
501 |
+
|
502 |
+
|
503 |
+
|
504 |
+
|
505 |
+
|
506 |
+
|
507 |
+
|
508 |
+
|
509 |
+
|
510 |
+
|
511 |
+
|
512 |
+
|
513 |
+
|
514 |
+
|
515 |
+
|
516 |
+
|
517 |
+
|
518 |
+
|
519 |
+
|
520 |
+
|
521 |
+
|
522 |
+
|
523 |
+
10%|███████▊ | 500/5000 [48:31<7:12:01, 5.76s/it]
|
524 |
+
|
525 |
+
|
526 |
+
|
527 |
+
|
528 |
+
|
529 |
+
|
530 |
+
|
531 |
+
|
532 |
+
|
533 |
+
|
534 |
+
|
535 |
+
|
536 |
+
|
537 |
+
|
538 |
+
|
539 |
+
|
540 |
+
|
541 |
+
|
542 |
+
|
543 |
+
|
544 |
+
|
545 |
+
|
546 |
+
|
547 |
+
|
548 |
+
|
549 |
+
10%|████████▏ | 525/5000 [50:47<4:38:57, 3.74s/it]
|
550 |
+
|
551 |
+
|
552 |
+
|
553 |
+
|
554 |
+
|
555 |
+
|
556 |
+
|
557 |
+
|
558 |
+
|
559 |
+
|
560 |
+
|
561 |
+
|
562 |
+
|
563 |
+
|
564 |
+
|
565 |
+
|
566 |
+
|
567 |
+
|
568 |
+
|
569 |
+
|
570 |
+
|
571 |
+
|
572 |
+
|
573 |
+
|
574 |
+
|
575 |
+
11%|████████▌ | 550/5000 [53:09<7:40:49, 6.21s/it]
|
576 |
+
|
577 |
+
|
578 |
+
|
579 |
+
|
580 |
+
|
581 |
+
|
582 |
+
|
583 |
+
|
584 |
+
|
585 |
+
|
586 |
+
|
587 |
+
|
588 |
+
|
589 |
+
|
590 |
+
|
591 |
+
|
592 |
+
|
593 |
+
|
594 |
+
|
595 |
+
|
596 |
+
|
597 |
+
|
598 |
+
|
599 |
+
|
600 |
+
|
601 |
+
12%|████████▉ | 575/5000 [55:27<7:30:32, 6.11s/it]
|
602 |
+
|
603 |
+
|
604 |
+
|
605 |
+
|
606 |
+
|
607 |
+
|
608 |
+
|
609 |
+
|
610 |
+
|
611 |
+
|
612 |
+
|
613 |
+
|
614 |
+
|
615 |
+
|
616 |
+
|
617 |
+
|
618 |
+
|
619 |
+
|
620 |
+
|
621 |
+
|
622 |
+
|
623 |
+
|
624 |
+
|
625 |
+
|
626 |
+
|
627 |
+
12%|█████████▎ | 600/5000 [58:01<7:30:08, 6.14s/it]
|
628 |
+
|
629 |
+
|
630 |
+
|
631 |
+
|
632 |
+
|
633 |
+
|
634 |
+
|
635 |
+
|
636 |
+
|
637 |
+
|
638 |
+
|
639 |
+
|
640 |
+
|
641 |
+
|
642 |
+
|
643 |
+
|
644 |
+
|
645 |
+
|
646 |
+
|
647 |
+
|
648 |
+
|
649 |
+
|
650 |
+
|
651 |
+
|
652 |
+
|
653 |
+
12%|█████████▌ | 625/5000 [1:00:33<6:04:58, 5.01s/it]
|
654 |
+
|
655 |
+
|
656 |
+
|
657 |
+
|
658 |
+
|
659 |
+
|
660 |
+
|
661 |
+
|
662 |
+
|
663 |
+
|
664 |
+
|
665 |
+
|
666 |
+
|
667 |
+
|
668 |
+
|
669 |
+
|
670 |
+
|
671 |
+
|
672 |
+
|
673 |
+
|
674 |
+
|
675 |
+
|
676 |
+
|
677 |
+
|
678 |
+
|
679 |
+
13%|█████████▉ | 650/5000 [1:02:56<6:58:37, 5.77s/it]
|
680 |
+
|
681 |
+
|
682 |
+
|
683 |
+
|
684 |
+
|
685 |
+
|
686 |
+
|
687 |
+
|
688 |
+
|
689 |
+
|
690 |
+
|
691 |
+
|
692 |
+
|
693 |
+
|
694 |
+
|
695 |
+
|
696 |
+
|
697 |
+
|
698 |
+
|
699 |
+
|
700 |
+
|
701 |
+
|
702 |
+
|
703 |
+
|
704 |
+
|
705 |
+
14%|██████████▎ | 675/5000 [1:05:37<7:36:17, 6.33s/it]
|
706 |
+
|
707 |
+
|
708 |
+
|
709 |
+
|
710 |
+
|
711 |
+
|
712 |
+
|
713 |
+
|
714 |
+
|
715 |
+
|
716 |
+
|
717 |
+
|
718 |
+
|
719 |
+
|
720 |
+
|
721 |
+
|
722 |
+
|
723 |
+
|
724 |
+
|
725 |
+
|
726 |
+
|
727 |
+
|
728 |
+
|
729 |
+
|
730 |
+
|
731 |
+
14%|██████████▋ | 700/5000 [1:07:55<7:00:19, 5.86s/it]
|
732 |
+
|
733 |
+
|
734 |
+
|
735 |
+
|
736 |
+
|
737 |
+
|
738 |
+
|
739 |
+
|
740 |
+
|
741 |
+
|
742 |
+
|
743 |
+
|
744 |
+
|
745 |
+
|
746 |
+
|
747 |
+
|
748 |
+
|
749 |
+
|
750 |
+
|
751 |
+
|
752 |
+
|
753 |
+
|
754 |
+
|
755 |
+
|
756 |
+
|
757 |
+
14%|███████████ | 725/5000 [1:10:29<7:18:52, 6.16s/it]
|
758 |
+
|
759 |
+
|
760 |
+
|
761 |
+
|
762 |
+
|
763 |
+
|
764 |
+
|
765 |
+
|
766 |
+
|
767 |
+
|
768 |
+
|
769 |
+
|
770 |
+
|
771 |
+
|
772 |
+
|
773 |
+
|
774 |
+
|
775 |
+
|
776 |
+
|
777 |
+
|
778 |
+
|
779 |
+
|
780 |
+
|
781 |
+
|
782 |
+
|
783 |
+
15%|███████████▍ | 750/5000 [1:13:01<7:14:17, 6.13s/it]
|
784 |
+
|
785 |
+
|
786 |
+
|
787 |
+
|
788 |
+
|
789 |
+
|
790 |
+
|
791 |
+
|
792 |
+
|
793 |
+
|
794 |
+
|
795 |
+
|
796 |
+
|
797 |
+
|
798 |
+
|
799 |
+
|
800 |
+
|
801 |
+
|
802 |
+
|
803 |
+
|
804 |
+
|
805 |
+
|
806 |
+
|
807 |
+
|
808 |
+
|
809 |
+
16%|███████████▊ | 775/5000 [1:15:21<7:12:46, 6.15s/it]
|
810 |
+
|
811 |
+
|
812 |
+
|
813 |
+
|
814 |
+
|
815 |
+
|
816 |
+
|
817 |
+
|
818 |
+
|
819 |
+
|
820 |
+
|
821 |
+
|
822 |
+
|
823 |
+
|
824 |
+
|
825 |
+
|
826 |
+
|
827 |
+
|
828 |
+
|
829 |
+
|
830 |
+
|
831 |
+
|
832 |
+
|
833 |
+
|
834 |
+
|
835 |
+
16%|████████████▏ | 800/5000 [1:17:54<7:11:25, 6.16s/it]
|
836 |
+
|
837 |
+
|
838 |
+
|
839 |
+
|
840 |
+
|
841 |
+
|
842 |
+
|
843 |
+
|
844 |
+
|
845 |
+
|
846 |
+
|
847 |
+
|
848 |
+
|
849 |
+
|
850 |
+
|
851 |
+
|
852 |
+
|
853 |
+
|
854 |
+
|
855 |
+
|
856 |
+
|
857 |
+
|
858 |
+
|
859 |
+
|
860 |
+
|
861 |
+
16%|████████████▌ | 825/5000 [1:20:13<4:32:16, 3.91s/it]
|
862 |
+
|
863 |
+
|
864 |
+
|
865 |
+
|
866 |
+
|
867 |
+
|
868 |
+
|
869 |
+
|
870 |
+
|
871 |
+
|
872 |
+
|
873 |
+
|
874 |
+
|
875 |
+
|
876 |
+
|
877 |
+
|
878 |
+
|
879 |
+
|
880 |
+
|
881 |
+
|
882 |
+
|
883 |
+
|
884 |
+
|
885 |
+
|
886 |
+
|
887 |
+
17%|████████████▉ | 850/5000 [1:22:26<6:35:50, 5.72s/it]
|
888 |
+
|
889 |
+
|
890 |
+
|
891 |
+
|
892 |
+
|
893 |
+
|
894 |
+
|
895 |
+
|
896 |
+
|
897 |
+
|
898 |
+
|
899 |
+
|
900 |
+
|
901 |
+
|
902 |
+
|
903 |
+
|
904 |
+
|
905 |
+
|
906 |
+
|
907 |
+
|
908 |
+
|
909 |
+
|
910 |
+
|
911 |
+
|
912 |
+
|
913 |
+
18%|█████████████▎ | 875/5000 [1:24:50<6:34:17, 5.74s/it]
|
914 |
+
|
915 |
+
|
916 |
+
|
917 |
+
|
918 |
+
|
919 |
+
|
920 |
+
|
921 |
+
|
922 |
+
|
923 |
+
|
924 |
+
|
925 |
+
|
926 |
+
|
927 |
+
|
928 |
+
|
929 |
+
|
930 |
+
|
931 |
+
|
932 |
+
|
933 |
+
|
934 |
+
|
935 |
+
|
936 |
+
|
937 |
+
|
938 |
+
|
939 |
+
18%|█████████████▋ | 900/5000 [1:27:26<6:44:05, 5.91s/it]
|
940 |
+
|
941 |
+
|
942 |
+
|
943 |
+
|
944 |
+
|
945 |
+
|
946 |
+
|
947 |
+
|
948 |
+
|
949 |
+
|
950 |
+
|
951 |
+
|
952 |
+
|
953 |
+
|
954 |
+
|
955 |
+
|
956 |
+
|
957 |
+
|
958 |
+
|
959 |
+
|
960 |
+
|
961 |
+
|
962 |
+
|
963 |
+
|
964 |
+
18%|██████████████ | 924/5000 [1:29:45<6:38:47, 5.87s/it]
|
965 |
+
|
966 |
+
|
967 |
+
|
968 |
+
|
969 |
+
|
970 |
+
|
971 |
+
|
972 |
+
|
973 |
+
|
974 |
+
|
975 |
+
|
976 |
+
|
977 |
+
|
978 |
+
|
979 |
+
|
980 |
+
|
981 |
+
|
982 |
+
|
983 |
+
|
984 |
+
|
985 |
+
|
986 |
+
|
987 |
+
|
988 |
+
|
989 |
+
|
990 |
+
19%|██████████████▍ | 949/5000 [1:32:12<6:39:41, 5.92s/it]
|
991 |
+
|
992 |
+
|
993 |
+
|
994 |
+
|
995 |
+
|
996 |
+
|
997 |
+
|
998 |
+
|
999 |
+
|
1000 |
+
|
1001 |
+
|
1002 |
+
|
1003 |
+
|
1004 |
+
|
1005 |
+
|
1006 |
+
|
1007 |
+
|
1008 |
+
|
1009 |
+
|
1010 |
+
|
1011 |
+
|
1012 |
+
|
1013 |
+
|
1014 |
+
|
1015 |
+
|
1016 |
+
|
1017 |
+
20%|██████████████▊ | 975/5000 [1:34:43<6:30:56, 5.83s/it]
|
1018 |
+
|
1019 |
+
|
1020 |
+
|
1021 |
+
|
1022 |
+
|
1023 |
+
|
1024 |
+
|
1025 |
+
|
1026 |
+
|
1027 |
+
|
1028 |
+
|
1029 |
+
|
1030 |
+
|
1031 |
+
|
1032 |
+
|
1033 |
+
|
1034 |
+
|
1035 |
+
|
1036 |
+
|
1037 |
+
|
1038 |
+
|
1039 |
+
|
1040 |
+
|
1041 |
+
|
1042 |
+
20%|███████████████▏ | 999/5000 [1:37:04<6:37:16, 5.96s/it]
|
1043 |
+
20%|███████████████ | 1000/5000 [1:37:10<6:43:54, 6.06s/it][INFO|trainer.py:768] 2024-03-27 15:47:43,288 >> The following columns in the evaluation set don't have a corresponding argument in `WhisperForConditionalGeneration.forward` and have been ignored: input_length. If input_length are not expected by `WhisperForConditionalGeneration.forward`, you can safely ignore this message.
|
1044 |
+
[INFO|trainer.py:3515] 2024-03-27 15:47:43,291 >> ***** Running Evaluation *****
|
1045 |
+
[INFO|trainer.py:3517] 2024-03-27 15:47:43,291 >> Num examples = 3123
|
1046 |
+
[INFO|trainer.py:3520] 2024-03-27 15:47:43,291 >> Batch size = 32
|
1047 |
+
[INFO|generation_whisper.py:1111] 2024-03-27 15:47:55,876 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1048 |
+
[INFO|generation_whisper.py:1111] 2024-03-27 15:48:09,190 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1049 |
+
0%| | 0/98 [00:00<?, ?it/s][INFO|generation_whisper.py:1111] 2024-03-27 15:48:21,971 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1050 |
+
2%|█▋ | 2/98 [00:12<10:13, 6.39s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:48:34,747 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1051 |
+
3%|██▌ | 3/98 [00:25<14:19, 9.05s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:48:47,626 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1052 |
+
4%|███▍ | 4/98 [00:38<16:25, 10.48s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:49:00,401 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1053 |
+
5%|████▎ | 5/98 [00:51<17:28, 11.28s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:49:12,811 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1054 |
+
6%|█████▏ | 6/98 [01:03<17:52, 11.65s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:49:25,250 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1055 |
+
7%|██████ | 7/98 [01:16<18:03, 11.91s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:49:37,701 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1056 |
+
8%|██████▊ | 8/98 [01:28<18:06, 12.08s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:49:50,125 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1057 |
+
9%|███████▋ | 9/98 [01:40<18:04, 12.19s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:50:03,333 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1058 |
+
10%|████████▍ | 10/98 [01:54<18:19, 12.50s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:50:15,940 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1059 |
+
11%|█████████▎ | 11/98 [02:06<18:10, 12.53s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:50:28,182 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1060 |
+
12%|██████████▏ | 12/98 [02:18<17:50, 12.44s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:50:40,437 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1061 |
+
13%|███████████ | 13/98 [02:31<17:32, 12.39s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:50:52,701 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1062 |
+
14%|███████████▊ | 14/98 [02:43<17:17, 12.35s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:51:05,133 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1063 |
+
15%|████████████▋ | 15/98 [02:55<17:07, 12.37s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:51:17,672 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1064 |
+
16%|█████████████▌ | 16/98 [03:08<16:58, 12.42s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:51:30,778 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1065 |
+
17%|██████████████▍ | 17/98 [03:21<17:02, 12.63s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:51:43,543 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1066 |
+
18%|███████████████▏ | 18/98 [03:34<16:53, 12.67s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:51:55,077 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1067 |
+
19%|████████████████ | 19/98 [03:45<16:14, 12.33s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:52:00,751 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1068 |
+
20%|████████████████▉ | 20/98 [03:51<13:25, 10.33s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:52:06,392 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1069 |
+
21%|█████████████████▊ | 21/98 [03:57<11:27, 8.92s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:52:12,050 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1070 |
+
22%|██████████████████▋ | 22/98 [04:02<10:03, 7.94s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:52:23,071 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1071 |
+
23%|███████████████████▍ | 23/98 [04:13<11:04, 8.87s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:52:36,625 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1072 |
+
24%|████████████████████▎ | 24/98 [04:27<12:40, 10.27s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:52:49,853 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1073 |
+
26%|█████████████████████▏ | 25/98 [04:40<13:34, 11.16s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:53:03,610 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1074 |
+
27%|██████████████████████ | 26/98 [04:54<14:19, 11.94s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:53:17,508 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1075 |
+
28%|██████████████████████▊ | 27/98 [05:08<14:49, 12.53s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:53:30,793 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1076 |
+
29%|███████████████████████▋ | 28/98 [05:21<14:52, 12.75s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:53:43,927 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1077 |
+
30%|████████████████████████▌ | 29/98 [05:34<14:47, 12.87s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:53:57,243 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1078 |
+
31%|█████████████████████████▍ | 30/98 [05:48<14:44, 13.00s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:54:10,529 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1079 |
+
32%|██████████████████████████▎ | 31/98 [06:01<14:36, 13.09s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:54:23,585 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1080 |
+
33%|███████████████████████████ | 32/98 [06:14<14:23, 13.08s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:54:36,862 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1081 |
+
34%|███████████████████████████▉ | 33/98 [06:27<14:13, 13.14s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:54:49,844 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1082 |
+
35%|████████████████████████████▊ | 34/98 [06:40<13:57, 13.09s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:55:03,123 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1083 |
+
36%|█████████████████████████████▋ | 35/98 [06:53<13:48, 13.15s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:55:16,213 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1084 |
+
37%|██████████████████████████████▍ | 36/98 [07:07<13:33, 13.13s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:55:28,213 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1085 |
+
38%|███████████████████████████████▎ | 37/98 [07:19<13:00, 12.79s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:55:38,031 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1086 |
+
39%|████████████████████████████████▏ | 38/98 [07:28<11:53, 11.90s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:55:51,192 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1087 |
+
40%|█████████████████████████████████ | 39/98 [07:41<12:04, 12.28s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:56:04,232 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1088 |
+
41%|███████████████████████████���█████▉ | 40/98 [07:55<12:05, 12.51s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:56:17,401 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1089 |
+
42%|██████████████████████████████████▋ | 41/98 [08:08<12:04, 12.71s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:56:30,467 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1090 |
+
43%|███████████████████████████████████▌ | 42/98 [08:21<11:57, 12.81s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:56:43,645 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1091 |
+
44%|████████████████████████████████████▍ | 43/98 [08:34<11:50, 12.92s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:56:56,747 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1092 |
+
45%|█████████████████████████████████████▎ | 44/98 [08:47<11:40, 12.98s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:57:09,849 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1093 |
+
46%|██████████████████████████████████████ | 45/98 [09:00<11:29, 13.01s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:57:23,086 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1094 |
+
47%|██████████████████████████████████████▉ | 46/98 [09:13<11:20, 13.08s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:57:36,596 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1095 |
+
48%|███████████████████████████████████████▊ | 47/98 [09:27<11:13, 13.21s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:57:49,724 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1096 |
+
49%|████████████████████████████████████████▋ | 48/98 [09:40<10:59, 13.19s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:58:03,014 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1097 |
+
50%|█████████████████████████████████████████▌ | 49/98 [09:53<10:47, 13.22s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:58:11,307 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1098 |
+
51%|██████████████████████████████████████████▎ | 50/98 [10:02<09:23, 11.74s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:58:16,924 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1099 |
+
52%|███████████████████████████████████████████▏ | 51/98 [10:07<07:45, 9.90s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:58:25,530 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1100 |
+
53%|████████████████████████████████████████████ | 52/98 [10:16<07:17, 9.51s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:58:38,753 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1101 |
+
54%|████████████████████████████████████████████▉ | 53/98 [10:29<07:58, 10.63s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:58:51,854 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1102 |
+
55%|█████████████████████████████████████████████▋ | 54/98 [10:42<08:20, 11.37s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:59:04,946 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1103 |
+
56%|██████████████████████████████████████████████▌ | 55/98 [10:55<08:31, 11.89s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:59:18,141 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1104 |
+
57%|███████████████████████████████████████████████▍ | 56/98 [11:08<08:35, 12.28s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:59:31,365 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1105 |
+
58%|████████████████████████████████████████████████▎ | 57/98 [11:22<08:35, 12.56s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:59:44,586 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1106 |
+
59%|█████████████████████████████████████████████████ | 58/98 [11:35<08:30, 12.76s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:59:57,792 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1107 |
+
60%|█████████████████████████████████████████████████▉ | 59/98 [11:48<08:22, 12.89s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:00:11,027 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1108 |
+
61%|██████████████████████████████████████████████████▊ | 60/98 [12:01<08:13, 13.00s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:00:24,067 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1109 |
+
62%|███████████████████████████████████████████████████▋ | 61/98 [12:14<08:01, 13.01s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:00:37,152 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1110 |
+
63%|████████████████████████████████████████████████████▌ | 62/98 [12:27<07:49, 13.03s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:00:50,334 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1111 |
+
64%|█████████████████████████████████████████████████████▎ | 63/98 [12:41<07:37, 13.08s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:01:03,550 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1112 |
+
65%|██████████████████████████████████████████████████████▏ | 64/98 [12:54<07:26, 13.12s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:01:16,592 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1113 |
+
66%|███████████████████████████████████████████████████████ | 65/98 [13:07<07:12, 13.10s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:01:29,643 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1114 |
+
67%|███████████████████████████████████████████████████████▉ | 66/98 [13:20<06:58, 13.08s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:01:39,896 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1115 |
+
68%|████████████████████████████████████████████████████████▋ | 67/98 [13:30<06:19, 12.23s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:01:48,411 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1116 |
+
69%|█████████████████████████████████████████████████████████▌ | 68/98 [13:39<05:33, 11.12s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:02:01,682 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1117 |
+
70%|██████████████████████████████████████████████████████████▍ | 69/98 [13:52<05:41, 11.76s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:02:14,861 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1118 |
+
71%|███████████████████████████████████████████████████████████▎ | 70/98 [14:05<05:41, 12.19s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:02:28,219 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1119 |
+
72%|████████████████████████████████████████████████████████████▏ | 71/98 [14:19<05:38, 12.54s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:02:41,455 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1120 |
+
73%|████████████████████████████████████████████████████████████▉ | 72/98 [14:32<05:31, 12.75s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:02:55,046 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1121 |
+
74%|█████████████████████████████████████████████████████████████▊ | 73/98 [14:45<05:25, 13.00s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:03:08,597 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1122 |
+
76%|██████████████████████████████████████████████████████████████▋ | 74/98 [14:59<05:15, 13.17s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:03:22,029 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1123 |
+
77%|███████████████████████████████████████████████████████████████▌ | 75/98 [15:12<05:04, 13.25s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:03:35,382 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1124 |
+
78%|████████████████████████████████████████████████████████████████▎ | 76/98 [15:26<04:52, 13.28s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:03:49,015 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1125 |
+
79%|█████████████████████████████████████████████████████████████████▏ | 77/98 [15:39<04:41, 13.38s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:04:02,759 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1126 |
+
80%|██████████████████████████████████████████████████████████████████ | 78/98 [15:53<04:29, 13.49s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:04:16,169 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1127 |
+
81%|██████████████████████████████████████████████████████████████████▉ | 79/98 [16:06<04:15, 13.47s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:04:24,852 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1128 |
+
82%|███████████████████████████████████████████████████████████████████▊ | 80/98 [16:15<03:36, 12.03s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:04:30,499 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1129 |
+
83%|████████████████████████████████████████████████████████████████████▌ | 81/98 [16:21<02:51, 10.12s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:04:39,591 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1130 |
+
84%|█████████████████████████████████████████████████████████████████████▍ | 82/98 [16:30<02:36, 9.81s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:04:52,842 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1131 |
+
85%|████████████████���█████████████████████████████████████████████████████▎ | 83/98 [16:43<02:42, 10.84s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:05:06,030 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1132 |
+
86%|███████████████████████████████████████████████████████████████████████▏ | 84/98 [16:56<02:41, 11.55s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:05:19,200 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1133 |
+
87%|███████████████████████████████████████████████████████████████████████▉ | 85/98 [17:10<02:36, 12.03s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:05:32,096 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1134 |
+
88%|████████████████████████████████████████████████████████████████████████▊ | 86/98 [17:22<02:27, 12.29s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:05:45,267 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1135 |
+
89%|█████████████████████████████████████████████████████████████████████████▋ | 87/98 [17:36<02:18, 12.56s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:05:58,361 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1136 |
+
90%|██████████████████████████████████████████████████████████████████████████▌ | 88/98 [17:49<02:07, 12.72s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:06:11,477 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1137 |
+
91%|███████████████████████████████████████████████████████████████████████████▍ | 89/98 [18:02<01:55, 12.84s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:06:24,637 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1138 |
+
92%|████████████████████████████████████████████████████████████████████████████▏ | 90/98 [18:15<01:43, 12.93s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:06:38,009 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1139 |
+
93%|█████████████████████████████████████████████████████████████████████████████ | 91/98 [18:28<01:31, 13.07s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:06:51,750 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1140 |
+
95%|██████████████████████████████████████████████████████████████████████████████▊ | 93/98 [18:56<01:06, 13.38s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:07:05,403 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1141 |
+
96%|███████████████████████████████████████████████████████████████████████████████▌ | 94/98 [19:09<00:53, 13.46s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:07:19,040 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1142 |
+
97%|████████████████████████████████████████████████████████████████████████████████▍ | 95/98 [19:23<00:40, 13.53s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:07:32,748 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1143 |
+
98%|█████████████████████████████████████████████████████████████████████████████████▎ | 96/98 [19:37<00:27, 13.69s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:07:46,799 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1144 |
+
99%|██████████████████████████████████████████████████████████████████████████████████▏| 97/98 [19:50<00:13, 13.41s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:07:59,716 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1145 |
+
100%|███████████████████████████████████████████████████████████████████████████████████| 98/98 [19:57<00:00, 11.61s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:07:59,716 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1146 |
+
100%|███████████████████████████████████████████████████████████████████████████████████| 98/98 [19:57<00:00, 11.61s/it][INFO|generation_whisper.py:1111] 2024-03-27 16:07:59,716 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
|
1147 |
+
Non-default generation parameters: {'max_length': 448, 'begin_suppress_tokens': [220, 50257]}arameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.of task=transcribe.
|
1148 |
+
Non-default generation parameters: {'max_length': 448, 'begin_suppress_tokens': [220, 50257]}arameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.of task=transcribe.
|
1149 |
+
[INFO|configuration_utils.py:471] 2024-03-27 16:08:07,645 >> Configuration saved in ./checkpoint-1000/config.json
|
1150 |
+
[INFO|configuration_utils.py:697] 2024-03-27 16:08:07,646 >> Configuration saved in ./checkpoint-1000/generation_config.json
|
1151 |
+
[INFO|modeling_utils.py:2474] 2024-03-27 16:08:15,552 >> Model weights saved in ./checkpoint-1000/model.safetensors
|
1152 |
+
[INFO|feature_extraction_utils.py:424] 2024-03-27 16:08:15,553 >> Feature extractor saved in ./checkpoint-1000/preprocessor_config.json
|
1153 |
+
[INFO|feature_extraction_utils.py:424] 2024-03-27 16:08:45,564 >> Feature extractor saved in ./preprocessor_config.json
|
1154 |
+
/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/checkpoint.py:460: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
|
1155 |
+
warnings.warn(
|
1156 |
+
|
1157 |
+
|
1158 |
+
|
1159 |
+
|
1160 |
+
|
1161 |
+
|
1162 |
+
|
1163 |
+
|
1164 |
+
|
1165 |
+
|
1166 |
+
|
1167 |
+
|
1168 |
+
|
1169 |
+
|
1170 |
+
|
wandb/run-20240327_141031-aoxf8fxn/files/requirements.txt
ADDED
@@ -0,0 +1,247 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
absl-py==2.1.0
|
2 |
+
accelerate==0.27.2
|
3 |
+
aiohttp==3.9.3
|
4 |
+
aiosignal==1.3.1
|
5 |
+
anyio==4.2.0
|
6 |
+
appdirs==1.4.4
|
7 |
+
argon2-cffi-bindings==21.2.0
|
8 |
+
argon2-cffi==23.1.0
|
9 |
+
arrow==1.3.0
|
10 |
+
asttokens==2.4.1
|
11 |
+
astunparse==1.6.3
|
12 |
+
async-lru==2.0.4
|
13 |
+
async-timeout==4.0.3
|
14 |
+
attrs==23.2.0
|
15 |
+
audioread==3.0.1
|
16 |
+
av==11.0.0
|
17 |
+
babel==2.14.0
|
18 |
+
backcall==0.2.0
|
19 |
+
beautifulsoup4==4.12.3
|
20 |
+
bitsandbytes==0.42.0
|
21 |
+
bleach==6.1.0
|
22 |
+
cached-property==1.5.2
|
23 |
+
cachetools==5.3.2
|
24 |
+
certifi==2024.2.2
|
25 |
+
cffi==1.16.0
|
26 |
+
charset-normalizer==3.3.2
|
27 |
+
chex==0.1.7
|
28 |
+
click==8.1.7
|
29 |
+
coloredlogs==15.0.1
|
30 |
+
comm==0.2.1
|
31 |
+
contourpy==1.1.1
|
32 |
+
ctranslate2==4.1.0
|
33 |
+
cycler==0.12.1
|
34 |
+
datasets==2.18.0
|
35 |
+
debugpy==1.8.0
|
36 |
+
decorator==5.1.1
|
37 |
+
defusedxml==0.7.1
|
38 |
+
dill==0.3.7
|
39 |
+
dm-tree==0.1.8
|
40 |
+
docker-pycreds==0.4.0
|
41 |
+
docstring-parser==0.15
|
42 |
+
einops==0.7.0
|
43 |
+
etils==1.3.0
|
44 |
+
evaluate==0.4.1
|
45 |
+
exceptiongroup==1.2.0
|
46 |
+
executing==2.0.1
|
47 |
+
faster-whisper==1.0.1
|
48 |
+
fastjsonschema==2.19.1
|
49 |
+
filelock==3.13.1
|
50 |
+
flash-attn==2.5.3
|
51 |
+
flatbuffers==23.5.26
|
52 |
+
flax==0.7.2
|
53 |
+
fonttools==4.48.1
|
54 |
+
fqdn==1.5.1
|
55 |
+
frozenlist==1.4.1
|
56 |
+
fsspec==2024.2.0
|
57 |
+
gast==0.4.0
|
58 |
+
gitdb==4.0.11
|
59 |
+
gitpython==3.1.41
|
60 |
+
google-auth-oauthlib==1.0.0
|
61 |
+
google-auth==2.27.0
|
62 |
+
google-pasta==0.2.0
|
63 |
+
grpcio==1.60.1
|
64 |
+
h11==0.14.0
|
65 |
+
h5py==3.10.0
|
66 |
+
httpcore==1.0.2
|
67 |
+
httpx==0.26.0
|
68 |
+
huggingface-hub==0.21.4
|
69 |
+
humanfriendly==10.0
|
70 |
+
idna==3.6
|
71 |
+
importlib-metadata==7.0.1
|
72 |
+
importlib-resources==6.1.1
|
73 |
+
iniconfig==2.0.0
|
74 |
+
ipdb==0.13.13
|
75 |
+
ipykernel==6.29.2
|
76 |
+
ipython==8.12.3
|
77 |
+
isoduration==20.11.0
|
78 |
+
jax==0.4.13
|
79 |
+
jaxlib==0.4.13
|
80 |
+
jedi==0.19.1
|
81 |
+
jinja2==3.1.2
|
82 |
+
jiwer==3.0.3
|
83 |
+
joblib==1.3.2
|
84 |
+
json5==0.9.14
|
85 |
+
jsonpointer==2.4
|
86 |
+
jsonschema-specifications==2023.12.1
|
87 |
+
jsonschema==4.21.1
|
88 |
+
jupyter-client==8.6.0
|
89 |
+
jupyter-core==5.7.1
|
90 |
+
jupyter-events==0.9.0
|
91 |
+
jupyter-lsp==2.2.2
|
92 |
+
jupyter-server-terminals==0.5.2
|
93 |
+
jupyter-server==2.12.5
|
94 |
+
jupyterlab-pygments==0.3.0
|
95 |
+
jupyterlab-server==2.25.2
|
96 |
+
jupyterlab==4.1.0
|
97 |
+
keras==2.13.1
|
98 |
+
kiwisolver==1.4.5
|
99 |
+
lazy-loader==0.3
|
100 |
+
libclang==16.0.6
|
101 |
+
librosa==0.10.1
|
102 |
+
llvmlite==0.41.1
|
103 |
+
markdown-it-py==3.0.0
|
104 |
+
markdown==3.5.2
|
105 |
+
markupsafe==2.1.3
|
106 |
+
matplotlib-inline==0.1.6
|
107 |
+
matplotlib==3.7.4
|
108 |
+
mdurl==0.1.2
|
109 |
+
mistune==3.0.2
|
110 |
+
ml-dtypes==0.2.0
|
111 |
+
more-itertools==10.2.0
|
112 |
+
mpmath==1.2.1
|
113 |
+
msclap==1.3.3
|
114 |
+
msgpack==1.0.7
|
115 |
+
multidict==6.0.5
|
116 |
+
multiprocess==0.70.15
|
117 |
+
nbclient==0.9.0
|
118 |
+
nbconvert==7.16.0
|
119 |
+
nbformat==5.9.2
|
120 |
+
nest-asyncio==1.6.0
|
121 |
+
networkx==3.0rc1
|
122 |
+
ninja==1.11.1.1
|
123 |
+
notebook-shim==0.2.3
|
124 |
+
numba==0.58.1
|
125 |
+
numpy==1.24.3
|
126 |
+
nvidia-cublas-cu12==12.1.3.1
|
127 |
+
nvidia-cuda-cupti-cu12==12.1.105
|
128 |
+
nvidia-cuda-nvrtc-cu12==12.1.105
|
129 |
+
nvidia-cuda-runtime-cu12==12.1.105
|
130 |
+
nvidia-cudnn-cu12==8.9.2.26
|
131 |
+
nvidia-cufft-cu12==11.0.2.54
|
132 |
+
nvidia-curand-cu12==10.3.2.106
|
133 |
+
nvidia-cusolver-cu12==11.4.5.107
|
134 |
+
nvidia-cusparse-cu12==12.1.0.106
|
135 |
+
nvidia-nccl-cu12==2.19.3
|
136 |
+
nvidia-nvjitlink-cu12==12.1.105
|
137 |
+
nvidia-nvtx-cu12==12.1.105
|
138 |
+
oauthlib==3.2.2
|
139 |
+
onnxruntime==1.17.1
|
140 |
+
openai-whisper==20231117
|
141 |
+
opt-einsum==3.3.0
|
142 |
+
optax==0.1.8
|
143 |
+
orbax-checkpoint==0.2.3
|
144 |
+
overrides==7.7.0
|
145 |
+
packaging==23.2
|
146 |
+
pandas==2.0.3
|
147 |
+
pandocfilters==1.5.1
|
148 |
+
parameterized==0.9.0
|
149 |
+
parso==0.8.3
|
150 |
+
peft==0.8.2
|
151 |
+
pexpect==4.9.0
|
152 |
+
pickleshare==0.7.5
|
153 |
+
pillow==9.3.0
|
154 |
+
pip==24.0
|
155 |
+
pkg-resources==0.0.0
|
156 |
+
pkgutil-resolve-name==1.3.10
|
157 |
+
platformdirs==4.2.0
|
158 |
+
pluggy==1.4.0
|
159 |
+
pooch==1.8.0
|
160 |
+
prometheus-client==0.19.0
|
161 |
+
prompt-toolkit==3.0.43
|
162 |
+
protobuf==4.25.2
|
163 |
+
psutil==5.9.8
|
164 |
+
ptyprocess==0.7.0
|
165 |
+
pure-eval==0.2.2
|
166 |
+
pyarrow-hotfix==0.6
|
167 |
+
pyarrow==15.0.0
|
168 |
+
pyasn1-modules==0.3.0
|
169 |
+
pyasn1==0.5.1
|
170 |
+
pycparser==2.21
|
171 |
+
pygments==2.17.2
|
172 |
+
pyparsing==3.1.1
|
173 |
+
pytest==7.4.4
|
174 |
+
python-dateutil==2.8.2
|
175 |
+
python-json-logger==2.0.7
|
176 |
+
pytorch-triton==3.0.0+901819d2b6
|
177 |
+
pytz==2024.1
|
178 |
+
pyyaml==6.0.1
|
179 |
+
pyzmq==25.1.2
|
180 |
+
rapidfuzz==3.6.1
|
181 |
+
referencing==0.33.0
|
182 |
+
regex==2023.12.25
|
183 |
+
requests-oauthlib==1.3.1
|
184 |
+
requests==2.31.0
|
185 |
+
responses==0.18.0
|
186 |
+
rfc3339-validator==0.1.4
|
187 |
+
rfc3986-validator==0.1.1
|
188 |
+
rich==13.7.0
|
189 |
+
rpds-py==0.17.1
|
190 |
+
rsa==4.9
|
191 |
+
safetensors==0.4.2
|
192 |
+
scikit-learn==1.3.2
|
193 |
+
scipy==1.10.1
|
194 |
+
send2trash==1.8.2
|
195 |
+
sentry-sdk==1.40.0
|
196 |
+
setproctitle==1.3.3
|
197 |
+
setuptools==44.0.0
|
198 |
+
shtab==1.7.0
|
199 |
+
six==1.16.0
|
200 |
+
smmap==5.0.1
|
201 |
+
sniffio==1.3.0
|
202 |
+
soundfile==0.12.1
|
203 |
+
soupsieve==2.5
|
204 |
+
soxr==0.3.7
|
205 |
+
stack-data==0.6.3
|
206 |
+
sympy==1.11.1
|
207 |
+
tensorboard-data-server==0.7.2
|
208 |
+
tensorboard==2.13.0
|
209 |
+
tensorflow-cpu==2.13.1
|
210 |
+
tensorflow-estimator==2.13.0
|
211 |
+
tensorflow-io-gcs-filesystem==0.34.0
|
212 |
+
tensorstore==0.1.45
|
213 |
+
termcolor==2.4.0
|
214 |
+
terminado==0.18.0
|
215 |
+
threadpoolctl==3.2.0
|
216 |
+
tiktoken==0.6.0
|
217 |
+
tinycss2==1.2.1
|
218 |
+
tokenizers==0.15.1
|
219 |
+
tomli==2.0.1
|
220 |
+
toolz==0.12.1
|
221 |
+
torch==2.2.1
|
222 |
+
torchaudio==2.2.1
|
223 |
+
torchlibrosa==0.1.0
|
224 |
+
torchvision==0.17.1
|
225 |
+
tornado==6.4
|
226 |
+
tqdm==4.66.1
|
227 |
+
traitlets==5.14.1
|
228 |
+
transformers==4.39.0.dev0
|
229 |
+
triton==2.2.0
|
230 |
+
trl==0.7.11
|
231 |
+
types-python-dateutil==2.8.19.20240106
|
232 |
+
typing-extensions==4.9.0
|
233 |
+
tyro==0.7.3
|
234 |
+
tzdata==2023.4
|
235 |
+
uri-template==1.3.0
|
236 |
+
urllib3==2.2.0
|
237 |
+
wandb==0.16.2
|
238 |
+
wcwidth==0.2.13
|
239 |
+
webcolors==1.13
|
240 |
+
webencodings==0.5.1
|
241 |
+
websocket-client==1.7.0
|
242 |
+
werkzeug==3.0.1
|
243 |
+
wheel==0.42.0
|
244 |
+
wrapt==1.16.0
|
245 |
+
xxhash==3.4.1
|
246 |
+
yarl==1.9.4
|
247 |
+
zipp==3.17.0
|
wandb/run-20240327_141031-aoxf8fxn/files/wandb-metadata.json
ADDED
@@ -0,0 +1,737 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"os": "Linux-5.4.0-166-generic-x86_64-with-glibc2.29",
|
3 |
+
"python": "3.8.10",
|
4 |
+
"heartbeatAt": "2024-03-27T13:10:32.506516",
|
5 |
+
"startedAt": "2024-03-27T13:10:31.567196",
|
6 |
+
"docker": null,
|
7 |
+
"cuda": null,
|
8 |
+
"args": [
|
9 |
+
"--model_name_or_path=distil-whisper/distil-large-v3",
|
10 |
+
"--dataset_name=mozilla-foundation/common_voice_16_1",
|
11 |
+
"--dataset_config_name=hi",
|
12 |
+
"--language=hindi",
|
13 |
+
"--train_split_name=train+validation",
|
14 |
+
"--eval_split_name=test",
|
15 |
+
"--max_steps=5000",
|
16 |
+
"--output_dir=./",
|
17 |
+
"--per_device_train_batch_size=32",
|
18 |
+
"--per_device_eval_batch_size=32",
|
19 |
+
"--logging_steps=25",
|
20 |
+
"--learning_rate=1e-4",
|
21 |
+
"--warmup_steps=500",
|
22 |
+
"--evaluation_strategy=steps",
|
23 |
+
"--eval_steps=1000",
|
24 |
+
"--save_strategy=steps",
|
25 |
+
"--save_steps=1000",
|
26 |
+
"--generation_max_length=225",
|
27 |
+
"--preprocessing_num_workers=1",
|
28 |
+
"--dataloader_num_workers=4",
|
29 |
+
"--length_column_name=input_length",
|
30 |
+
"--max_duration_in_seconds=30",
|
31 |
+
"--text_column_name=sentence",
|
32 |
+
"--freeze_feature_encoder=False",
|
33 |
+
"--gradient_checkpointing",
|
34 |
+
"--fp16",
|
35 |
+
"--overwrite_output_dir",
|
36 |
+
"--do_train",
|
37 |
+
"--do_eval",
|
38 |
+
"--predict_with_generate",
|
39 |
+
"--use_auth_token",
|
40 |
+
"--push_to_hub"
|
41 |
+
],
|
42 |
+
"state": "running",
|
43 |
+
"program": "run_speech_recognition_seq2seq.py",
|
44 |
+
"codePathLocal": "run_speech_recognition_seq2seq.py",
|
45 |
+
"codePath": "run_speech_recognition_seq2seq.py",
|
46 |
+
"git": {
|
47 |
+
"remote": "https://huggingface.co/sanchit-gandhi/distil-large-v3-hi-ft",
|
48 |
+
"commit": "40c686df113c0e98e7363c1bd523f58d11848fc0"
|
49 |
+
},
|
50 |
+
"email": "sanchit@huggingface.co",
|
51 |
+
"root": "/home/sanchit/distil-large-v3-hi-ft",
|
52 |
+
"host": "hf-dgx-01",
|
53 |
+
"username": "sanchit",
|
54 |
+
"executable": "/home/sanchit/hf/bin/python",
|
55 |
+
"cpu_count": 64,
|
56 |
+
"cpu_count_logical": 128,
|
57 |
+
"cpu_freq": {
|
58 |
+
"current": 2591.1360703124997,
|
59 |
+
"min": 1500.0,
|
60 |
+
"max": 2250.0
|
61 |
+
},
|
62 |
+
"cpu_freq_per_core": [
|
63 |
+
{
|
64 |
+
"current": 2947.674,
|
65 |
+
"min": 1500.0,
|
66 |
+
"max": 2250.0
|
67 |
+
},
|
68 |
+
{
|
69 |
+
"current": 2021.008,
|
70 |
+
"min": 1500.0,
|
71 |
+
"max": 2250.0
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"current": 2933.218,
|
75 |
+
"min": 1500.0,
|
76 |
+
"max": 2250.0
|
77 |
+
},
|
78 |
+
{
|
79 |
+
"current": 2938.11,
|
80 |
+
"min": 1500.0,
|
81 |
+
"max": 2250.0
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"current": 2943.021,
|
85 |
+
"min": 1500.0,
|
86 |
+
"max": 2250.0
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"current": 2943.107,
|
90 |
+
"min": 1500.0,
|
91 |
+
"max": 2250.0
|
92 |
+
},
|
93 |
+
{
|
94 |
+
"current": 2943.106,
|
95 |
+
"min": 1500.0,
|
96 |
+
"max": 2250.0
|
97 |
+
},
|
98 |
+
{
|
99 |
+
"current": 2935.616,
|
100 |
+
"min": 1500.0,
|
101 |
+
"max": 2250.0
|
102 |
+
},
|
103 |
+
{
|
104 |
+
"current": 2936.719,
|
105 |
+
"min": 1500.0,
|
106 |
+
"max": 2250.0
|
107 |
+
},
|
108 |
+
{
|
109 |
+
"current": 2950.399,
|
110 |
+
"min": 1500.0,
|
111 |
+
"max": 2250.0
|
112 |
+
},
|
113 |
+
{
|
114 |
+
"current": 2942.429,
|
115 |
+
"min": 1500.0,
|
116 |
+
"max": 2250.0
|
117 |
+
},
|
118 |
+
{
|
119 |
+
"current": 2935.347,
|
120 |
+
"min": 1500.0,
|
121 |
+
"max": 2250.0
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"current": 2938.48,
|
125 |
+
"min": 1500.0,
|
126 |
+
"max": 2250.0
|
127 |
+
},
|
128 |
+
{
|
129 |
+
"current": 2947.593,
|
130 |
+
"min": 1500.0,
|
131 |
+
"max": 2250.0
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"current": 2944.163,
|
135 |
+
"min": 1500.0,
|
136 |
+
"max": 2250.0
|
137 |
+
},
|
138 |
+
{
|
139 |
+
"current": 1688.785,
|
140 |
+
"min": 1500.0,
|
141 |
+
"max": 2250.0
|
142 |
+
},
|
143 |
+
{
|
144 |
+
"current": 2943.151,
|
145 |
+
"min": 1500.0,
|
146 |
+
"max": 2250.0
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"current": 2952.753,
|
150 |
+
"min": 1500.0,
|
151 |
+
"max": 2250.0
|
152 |
+
},
|
153 |
+
{
|
154 |
+
"current": 2943.208,
|
155 |
+
"min": 1500.0,
|
156 |
+
"max": 2250.0
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"current": 1868.923,
|
160 |
+
"min": 1500.0,
|
161 |
+
"max": 2250.0
|
162 |
+
},
|
163 |
+
{
|
164 |
+
"current": 2959.785,
|
165 |
+
"min": 1500.0,
|
166 |
+
"max": 2250.0
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"current": 2937.134,
|
170 |
+
"min": 1500.0,
|
171 |
+
"max": 2250.0
|
172 |
+
},
|
173 |
+
{
|
174 |
+
"current": 2936.912,
|
175 |
+
"min": 1500.0,
|
176 |
+
"max": 2250.0
|
177 |
+
},
|
178 |
+
{
|
179 |
+
"current": 1982.456,
|
180 |
+
"min": 1500.0,
|
181 |
+
"max": 2250.0
|
182 |
+
},
|
183 |
+
{
|
184 |
+
"current": 2945.492,
|
185 |
+
"min": 1500.0,
|
186 |
+
"max": 2250.0
|
187 |
+
},
|
188 |
+
{
|
189 |
+
"current": 2942.291,
|
190 |
+
"min": 1500.0,
|
191 |
+
"max": 2250.0
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"current": 2942.233,
|
195 |
+
"min": 1500.0,
|
196 |
+
"max": 2250.0
|
197 |
+
},
|
198 |
+
{
|
199 |
+
"current": 2942.064,
|
200 |
+
"min": 1500.0,
|
201 |
+
"max": 2250.0
|
202 |
+
},
|
203 |
+
{
|
204 |
+
"current": 2942.272,
|
205 |
+
"min": 1500.0,
|
206 |
+
"max": 2250.0
|
207 |
+
},
|
208 |
+
{
|
209 |
+
"current": 2942.118,
|
210 |
+
"min": 1500.0,
|
211 |
+
"max": 2250.0
|
212 |
+
},
|
213 |
+
{
|
214 |
+
"current": 2942.057,
|
215 |
+
"min": 1500.0,
|
216 |
+
"max": 2250.0
|
217 |
+
},
|
218 |
+
{
|
219 |
+
"current": 2941.903,
|
220 |
+
"min": 1500.0,
|
221 |
+
"max": 2250.0
|
222 |
+
},
|
223 |
+
{
|
224 |
+
"current": 2950.964,
|
225 |
+
"min": 1500.0,
|
226 |
+
"max": 2250.0
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"current": 2952.685,
|
230 |
+
"min": 1500.0,
|
231 |
+
"max": 2250.0
|
232 |
+
},
|
233 |
+
{
|
234 |
+
"current": 2944.184,
|
235 |
+
"min": 1500.0,
|
236 |
+
"max": 2250.0
|
237 |
+
},
|
238 |
+
{
|
239 |
+
"current": 2945.032,
|
240 |
+
"min": 1500.0,
|
241 |
+
"max": 2250.0
|
242 |
+
},
|
243 |
+
{
|
244 |
+
"current": 2940.66,
|
245 |
+
"min": 1500.0,
|
246 |
+
"max": 2250.0
|
247 |
+
},
|
248 |
+
{
|
249 |
+
"current": 2941.93,
|
250 |
+
"min": 1500.0,
|
251 |
+
"max": 2250.0
|
252 |
+
},
|
253 |
+
{
|
254 |
+
"current": 2940.832,
|
255 |
+
"min": 1500.0,
|
256 |
+
"max": 2250.0
|
257 |
+
},
|
258 |
+
{
|
259 |
+
"current": 2944.647,
|
260 |
+
"min": 1500.0,
|
261 |
+
"max": 2250.0
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"current": 2943.166,
|
265 |
+
"min": 1500.0,
|
266 |
+
"max": 2250.0
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"current": 2942.746,
|
270 |
+
"min": 1500.0,
|
271 |
+
"max": 2250.0
|
272 |
+
},
|
273 |
+
{
|
274 |
+
"current": 2943.47,
|
275 |
+
"min": 1500.0,
|
276 |
+
"max": 2250.0
|
277 |
+
},
|
278 |
+
{
|
279 |
+
"current": 2944.65,
|
280 |
+
"min": 1500.0,
|
281 |
+
"max": 2250.0
|
282 |
+
},
|
283 |
+
{
|
284 |
+
"current": 2937.31,
|
285 |
+
"min": 1500.0,
|
286 |
+
"max": 2250.0
|
287 |
+
},
|
288 |
+
{
|
289 |
+
"current": 2948.814,
|
290 |
+
"min": 1500.0,
|
291 |
+
"max": 2250.0
|
292 |
+
},
|
293 |
+
{
|
294 |
+
"current": 2939.028,
|
295 |
+
"min": 1500.0,
|
296 |
+
"max": 2250.0
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"current": 2938.291,
|
300 |
+
"min": 1500.0,
|
301 |
+
"max": 2250.0
|
302 |
+
},
|
303 |
+
{
|
304 |
+
"current": 2943.541,
|
305 |
+
"min": 1500.0,
|
306 |
+
"max": 2250.0
|
307 |
+
},
|
308 |
+
{
|
309 |
+
"current": 2948.065,
|
310 |
+
"min": 1500.0,
|
311 |
+
"max": 2250.0
|
312 |
+
},
|
313 |
+
{
|
314 |
+
"current": 2943.119,
|
315 |
+
"min": 1500.0,
|
316 |
+
"max": 2250.0
|
317 |
+
},
|
318 |
+
{
|
319 |
+
"current": 2943.127,
|
320 |
+
"min": 1500.0,
|
321 |
+
"max": 2250.0
|
322 |
+
},
|
323 |
+
{
|
324 |
+
"current": 2942.091,
|
325 |
+
"min": 1500.0,
|
326 |
+
"max": 2250.0
|
327 |
+
},
|
328 |
+
{
|
329 |
+
"current": 2942.402,
|
330 |
+
"min": 1500.0,
|
331 |
+
"max": 2250.0
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"current": 2942.105,
|
335 |
+
"min": 1500.0,
|
336 |
+
"max": 2250.0
|
337 |
+
},
|
338 |
+
{
|
339 |
+
"current": 2949.13,
|
340 |
+
"min": 1500.0,
|
341 |
+
"max": 2250.0
|
342 |
+
},
|
343 |
+
{
|
344 |
+
"current": 2940.763,
|
345 |
+
"min": 1500.0,
|
346 |
+
"max": 2250.0
|
347 |
+
},
|
348 |
+
{
|
349 |
+
"current": 2940.78,
|
350 |
+
"min": 1500.0,
|
351 |
+
"max": 2250.0
|
352 |
+
},
|
353 |
+
{
|
354 |
+
"current": 2940.786,
|
355 |
+
"min": 1500.0,
|
356 |
+
"max": 2250.0
|
357 |
+
},
|
358 |
+
{
|
359 |
+
"current": 2940.657,
|
360 |
+
"min": 1500.0,
|
361 |
+
"max": 2250.0
|
362 |
+
},
|
363 |
+
{
|
364 |
+
"current": 1681.397,
|
365 |
+
"min": 1500.0,
|
366 |
+
"max": 2250.0
|
367 |
+
},
|
368 |
+
{
|
369 |
+
"current": 2937.278,
|
370 |
+
"min": 1500.0,
|
371 |
+
"max": 2250.0
|
372 |
+
},
|
373 |
+
{
|
374 |
+
"current": 2951.047,
|
375 |
+
"min": 1500.0,
|
376 |
+
"max": 2250.0
|
377 |
+
},
|
378 |
+
{
|
379 |
+
"current": 1680.886,
|
380 |
+
"min": 1500.0,
|
381 |
+
"max": 2250.0
|
382 |
+
},
|
383 |
+
{
|
384 |
+
"current": 2943.17,
|
385 |
+
"min": 1500.0,
|
386 |
+
"max": 2250.0
|
387 |
+
},
|
388 |
+
{
|
389 |
+
"current": 1754.766,
|
390 |
+
"min": 1500.0,
|
391 |
+
"max": 2250.0
|
392 |
+
},
|
393 |
+
{
|
394 |
+
"current": 2943.394,
|
395 |
+
"min": 1500.0,
|
396 |
+
"max": 2250.0
|
397 |
+
},
|
398 |
+
{
|
399 |
+
"current": 2943.181,
|
400 |
+
"min": 1500.0,
|
401 |
+
"max": 2250.0
|
402 |
+
},
|
403 |
+
{
|
404 |
+
"current": 2943.186,
|
405 |
+
"min": 1500.0,
|
406 |
+
"max": 2250.0
|
407 |
+
},
|
408 |
+
{
|
409 |
+
"current": 2945.466,
|
410 |
+
"min": 1500.0,
|
411 |
+
"max": 2250.0
|
412 |
+
},
|
413 |
+
{
|
414 |
+
"current": 2943.336,
|
415 |
+
"min": 1500.0,
|
416 |
+
"max": 2250.0
|
417 |
+
},
|
418 |
+
{
|
419 |
+
"current": 2943.198,
|
420 |
+
"min": 1500.0,
|
421 |
+
"max": 2250.0
|
422 |
+
},
|
423 |
+
{
|
424 |
+
"current": 2926.611,
|
425 |
+
"min": 1500.0,
|
426 |
+
"max": 2250.0
|
427 |
+
},
|
428 |
+
{
|
429 |
+
"current": 2939.83,
|
430 |
+
"min": 1500.0,
|
431 |
+
"max": 2250.0
|
432 |
+
},
|
433 |
+
{
|
434 |
+
"current": 2939.84,
|
435 |
+
"min": 1500.0,
|
436 |
+
"max": 2250.0
|
437 |
+
},
|
438 |
+
{
|
439 |
+
"current": 2948.356,
|
440 |
+
"min": 1500.0,
|
441 |
+
"max": 2250.0
|
442 |
+
},
|
443 |
+
{
|
444 |
+
"current": 2942.765,
|
445 |
+
"min": 1500.0,
|
446 |
+
"max": 2250.0
|
447 |
+
},
|
448 |
+
{
|
449 |
+
"current": 2940.047,
|
450 |
+
"min": 1500.0,
|
451 |
+
"max": 2250.0
|
452 |
+
},
|
453 |
+
{
|
454 |
+
"current": 2469.696,
|
455 |
+
"min": 1500.0,
|
456 |
+
"max": 2250.0
|
457 |
+
},
|
458 |
+
{
|
459 |
+
"current": 1593.287,
|
460 |
+
"min": 1500.0,
|
461 |
+
"max": 2250.0
|
462 |
+
},
|
463 |
+
{
|
464 |
+
"current": 2952.788,
|
465 |
+
"min": 1500.0,
|
466 |
+
"max": 2250.0
|
467 |
+
},
|
468 |
+
{
|
469 |
+
"current": 2941.794,
|
470 |
+
"min": 1500.0,
|
471 |
+
"max": 2250.0
|
472 |
+
},
|
473 |
+
{
|
474 |
+
"current": 2949.289,
|
475 |
+
"min": 1500.0,
|
476 |
+
"max": 2250.0
|
477 |
+
},
|
478 |
+
{
|
479 |
+
"current": 1684.056,
|
480 |
+
"min": 1500.0,
|
481 |
+
"max": 2250.0
|
482 |
+
},
|
483 |
+
{
|
484 |
+
"current": 2942.118,
|
485 |
+
"min": 1500.0,
|
486 |
+
"max": 2250.0
|
487 |
+
},
|
488 |
+
{
|
489 |
+
"current": 2944.309,
|
490 |
+
"min": 1500.0,
|
491 |
+
"max": 2250.0
|
492 |
+
},
|
493 |
+
{
|
494 |
+
"current": 2952.465,
|
495 |
+
"min": 1500.0,
|
496 |
+
"max": 2250.0
|
497 |
+
},
|
498 |
+
{
|
499 |
+
"current": 1682.593,
|
500 |
+
"min": 1500.0,
|
501 |
+
"max": 2250.0
|
502 |
+
},
|
503 |
+
{
|
504 |
+
"current": 2942.404,
|
505 |
+
"min": 1500.0,
|
506 |
+
"max": 2250.0
|
507 |
+
},
|
508 |
+
{
|
509 |
+
"current": 2960.474,
|
510 |
+
"min": 1500.0,
|
511 |
+
"max": 2250.0
|
512 |
+
},
|
513 |
+
{
|
514 |
+
"current": 2948.539,
|
515 |
+
"min": 1500.0,
|
516 |
+
"max": 2250.0
|
517 |
+
},
|
518 |
+
{
|
519 |
+
"current": 2953.446,
|
520 |
+
"min": 1500.0,
|
521 |
+
"max": 2250.0
|
522 |
+
},
|
523 |
+
{
|
524 |
+
"current": 2968.584,
|
525 |
+
"min": 1500.0,
|
526 |
+
"max": 2250.0
|
527 |
+
},
|
528 |
+
{
|
529 |
+
"current": 2968.447,
|
530 |
+
"min": 1500.0,
|
531 |
+
"max": 2250.0
|
532 |
+
},
|
533 |
+
{
|
534 |
+
"current": 2954.762,
|
535 |
+
"min": 1500.0,
|
536 |
+
"max": 2250.0
|
537 |
+
},
|
538 |
+
{
|
539 |
+
"current": 2936.182,
|
540 |
+
"min": 1500.0,
|
541 |
+
"max": 2250.0
|
542 |
+
},
|
543 |
+
{
|
544 |
+
"current": 2944.46,
|
545 |
+
"min": 1500.0,
|
546 |
+
"max": 2250.0
|
547 |
+
},
|
548 |
+
{
|
549 |
+
"current": 2944.181,
|
550 |
+
"min": 1500.0,
|
551 |
+
"max": 2250.0
|
552 |
+
},
|
553 |
+
{
|
554 |
+
"current": 2958.541,
|
555 |
+
"min": 1500.0,
|
556 |
+
"max": 2250.0
|
557 |
+
},
|
558 |
+
{
|
559 |
+
"current": 2948.457,
|
560 |
+
"min": 1500.0,
|
561 |
+
"max": 2250.0
|
562 |
+
},
|
563 |
+
{
|
564 |
+
"current": 2953.617,
|
565 |
+
"min": 1500.0,
|
566 |
+
"max": 2250.0
|
567 |
+
},
|
568 |
+
{
|
569 |
+
"current": 2942.23,
|
570 |
+
"min": 1500.0,
|
571 |
+
"max": 2250.0
|
572 |
+
},
|
573 |
+
{
|
574 |
+
"current": 2948.216,
|
575 |
+
"min": 1500.0,
|
576 |
+
"max": 2250.0
|
577 |
+
},
|
578 |
+
{
|
579 |
+
"current": 2941.896,
|
580 |
+
"min": 1500.0,
|
581 |
+
"max": 2250.0
|
582 |
+
},
|
583 |
+
{
|
584 |
+
"current": 2942.828,
|
585 |
+
"min": 1500.0,
|
586 |
+
"max": 2250.0
|
587 |
+
},
|
588 |
+
{
|
589 |
+
"current": 2942.837,
|
590 |
+
"min": 1500.0,
|
591 |
+
"max": 2250.0
|
592 |
+
},
|
593 |
+
{
|
594 |
+
"current": 2942.776,
|
595 |
+
"min": 1500.0,
|
596 |
+
"max": 2250.0
|
597 |
+
},
|
598 |
+
{
|
599 |
+
"current": 2942.719,
|
600 |
+
"min": 1500.0,
|
601 |
+
"max": 2250.0
|
602 |
+
},
|
603 |
+
{
|
604 |
+
"current": 2938.813,
|
605 |
+
"min": 1500.0,
|
606 |
+
"max": 2250.0
|
607 |
+
},
|
608 |
+
{
|
609 |
+
"current": 2938.962,
|
610 |
+
"min": 1500.0,
|
611 |
+
"max": 2250.0
|
612 |
+
},
|
613 |
+
{
|
614 |
+
"current": 2939.034,
|
615 |
+
"min": 1500.0,
|
616 |
+
"max": 2250.0
|
617 |
+
},
|
618 |
+
{
|
619 |
+
"current": 2925.934,
|
620 |
+
"min": 1500.0,
|
621 |
+
"max": 2250.0
|
622 |
+
},
|
623 |
+
{
|
624 |
+
"current": 2942.86,
|
625 |
+
"min": 1500.0,
|
626 |
+
"max": 2250.0
|
627 |
+
},
|
628 |
+
{
|
629 |
+
"current": 2942.866,
|
630 |
+
"min": 1500.0,
|
631 |
+
"max": 2250.0
|
632 |
+
},
|
633 |
+
{
|
634 |
+
"current": 2938.896,
|
635 |
+
"min": 1500.0,
|
636 |
+
"max": 2250.0
|
637 |
+
},
|
638 |
+
{
|
639 |
+
"current": 2942.875,
|
640 |
+
"min": 1500.0,
|
641 |
+
"max": 2250.0
|
642 |
+
},
|
643 |
+
{
|
644 |
+
"current": 2942.101,
|
645 |
+
"min": 1500.0,
|
646 |
+
"max": 2250.0
|
647 |
+
},
|
648 |
+
{
|
649 |
+
"current": 2949.598,
|
650 |
+
"min": 1500.0,
|
651 |
+
"max": 2250.0
|
652 |
+
},
|
653 |
+
{
|
654 |
+
"current": 2935.583,
|
655 |
+
"min": 1500.0,
|
656 |
+
"max": 2250.0
|
657 |
+
},
|
658 |
+
{
|
659 |
+
"current": 2942.305,
|
660 |
+
"min": 1500.0,
|
661 |
+
"max": 2250.0
|
662 |
+
},
|
663 |
+
{
|
664 |
+
"current": 2853.409,
|
665 |
+
"min": 1500.0,
|
666 |
+
"max": 2250.0
|
667 |
+
},
|
668 |
+
{
|
669 |
+
"current": 2940.957,
|
670 |
+
"min": 1500.0,
|
671 |
+
"max": 2250.0
|
672 |
+
},
|
673 |
+
{
|
674 |
+
"current": 2940.961,
|
675 |
+
"min": 1500.0,
|
676 |
+
"max": 2250.0
|
677 |
+
},
|
678 |
+
{
|
679 |
+
"current": 2949.248,
|
680 |
+
"min": 1500.0,
|
681 |
+
"max": 2250.0
|
682 |
+
},
|
683 |
+
{
|
684 |
+
"current": 1680.132,
|
685 |
+
"min": 1500.0,
|
686 |
+
"max": 2250.0
|
687 |
+
},
|
688 |
+
{
|
689 |
+
"current": 2673.469,
|
690 |
+
"min": 1500.0,
|
691 |
+
"max": 2250.0
|
692 |
+
},
|
693 |
+
{
|
694 |
+
"current": 2940.203,
|
695 |
+
"min": 1500.0,
|
696 |
+
"max": 2250.0
|
697 |
+
},
|
698 |
+
{
|
699 |
+
"current": 1681.826,
|
700 |
+
"min": 1500.0,
|
701 |
+
"max": 2250.0
|
702 |
+
}
|
703 |
+
],
|
704 |
+
"disk": {
|
705 |
+
"/": {
|
706 |
+
"total": 1757.8785285949707,
|
707 |
+
"used": 1516.8664588928223
|
708 |
+
}
|
709 |
+
},
|
710 |
+
"gpu": "NVIDIA A100-SXM4-80GB",
|
711 |
+
"gpu_count": 5,
|
712 |
+
"gpu_devices": [
|
713 |
+
{
|
714 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
715 |
+
"memory_total": 85899345920
|
716 |
+
},
|
717 |
+
{
|
718 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
719 |
+
"memory_total": 85899345920
|
720 |
+
},
|
721 |
+
{
|
722 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
723 |
+
"memory_total": 85899345920
|
724 |
+
},
|
725 |
+
{
|
726 |
+
"name": "NVIDIA DGX Display",
|
727 |
+
"memory_total": 4294967296
|
728 |
+
},
|
729 |
+
{
|
730 |
+
"name": "NVIDIA A100-SXM4-80GB",
|
731 |
+
"memory_total": 85899345920
|
732 |
+
}
|
733 |
+
],
|
734 |
+
"memory": {
|
735 |
+
"total": 503.5396919250488
|
736 |
+
}
|
737 |
+
}
|
wandb/run-20240327_141031-aoxf8fxn/files/wandb-summary.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"train/loss": 0.1234, "train/grad_norm": 1.3226145505905151, "train/learning_rate": 8.900000000000001e-05, "train/epoch": 4.5, "train/global_step": 1000, "_timestamp": 1711552087.6420767, "_runtime": 7056.071107625961, "_step": 40, "eval/loss": 0.4784834682941437, "eval/wer": 0.4381905276983698, "eval/runtime": 1224.3495, "eval/samples_per_second": 2.551, "eval/steps_per_second": 0.08}
|