> Training Environment: | > Current device: 0 | > Num. of GPUs: 1 | > Num. of CPUs: 1 | > Num. of Torch Threads: 1 | > Torch seed: 54321 | > Torch CUDNN: True | > Torch CUDNN deterministic: False | > Torch CUDNN benchmark: False > Start Tensorboard: tensorboard --logdir=/home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000 > Model has 28610257 parameters  > EPOCH: 0/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 05:28:42)   --> STEP: 0/406 -- GLOBAL_STEP: 0 | > current_lr: 0.00000 | > step_time: 79.16690 (79.16692) | > loader_time: 5.74650 (5.74649)  --> STEP: 25/406 -- GLOBAL_STEP: 25 | > loss: 3.94976 (3.78406) | > log_mle: 0.82513 (0.82313) | > loss_dur: 3.12463 (2.96093) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 11.25827 (10.15753) | > current_lr: 0.00000 | > step_time: 0.59400 (2.96324) | > loader_time: 1.51590 (2.03268)  --> STEP: 50/406 -- GLOBAL_STEP: 50 | > loss: 3.82559 (3.74907) | > log_mle: 0.82458 (0.82480) | > loss_dur: 3.00102 (2.92427) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.99044 (10.56035) | > current_lr: 0.00000 | > step_time: 0.38640 (1.75328) | > loader_time: 1.81580 (2.14834)  --> STEP: 75/406 -- GLOBAL_STEP: 75 | > loss: 3.81644 (3.74541) | > log_mle: 0.83158 (0.82491) | > loss_dur: 2.98486 (2.92050) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.94210 (10.67450) | > current_lr: 0.00000 | > step_time: 0.44110 (1.31910) | > loader_time: 2.26730 (2.16377)  --> STEP: 100/406 -- GLOBAL_STEP: 100 | > loss: 3.74355 (3.73695) | > log_mle: 0.83085 (0.82495) | > loss_dur: 2.91271 (2.91200) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.89170 (10.71725) | > current_lr: 0.00000 | > step_time: 0.47930 (1.11325) | > loader_time: 1.94830 (2.16166)  --> STEP: 125/406 -- GLOBAL_STEP: 125 | > loss: 3.71598 (3.73164) | > log_mle: 0.82804 (0.82474) | > loss_dur: 2.88794 (2.90691) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.74962 (10.73911) | > current_lr: 0.00000 | > step_time: 0.51620 (0.99739) | > loader_time: 2.05700 (2.18027)  --> STEP: 150/406 -- GLOBAL_STEP: 150 | > loss: 3.71293 (3.72814) | > log_mle: 0.82601 (0.82459) | > loss_dur: 2.88692 (2.90355) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.88592 (10.74974) | > current_lr: 0.00000 | > step_time: 0.59460 (0.92529) | > loader_time: 2.90450 (2.24011)  --> STEP: 175/406 -- GLOBAL_STEP: 175 | > loss: 3.72749 (3.72533) | > log_mle: 0.82417 (0.82436) | > loss_dur: 2.90331 (2.90097) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.79335 (10.75303) | > current_lr: 0.00000 | > step_time: 0.62420 (0.87525) | > loader_time: 3.14640 (2.34661)  --> STEP: 200/406 -- GLOBAL_STEP: 200 | > loss: 3.68366 (3.72319) | > log_mle: 0.82602 (0.82422) | > loss_dur: 2.85764 (2.89897) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.71187 (10.75198) | > current_lr: 0.00000 | > step_time: 0.61540 (0.84143) | > loader_time: 2.58790 (2.41791)  --> STEP: 225/406 -- GLOBAL_STEP: 225 | > loss: 3.69834 (3.71959) | > log_mle: 0.82262 (0.82419) | > loss_dur: 2.87573 (2.89540) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.67498 (10.74564) | > current_lr: 0.00000 | > step_time: 0.67190 (0.82011) | > loader_time: 2.88800 (2.47938)  --> STEP: 250/406 -- GLOBAL_STEP: 250 | > loss: 3.72714 (3.71780) | > log_mle: 0.82394 (0.82417) | > loss_dur: 2.90320 (2.89363) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.66663 (10.74011) | > current_lr: 0.00000 | > step_time: 1.09020 (0.81621) | > loader_time: 3.26330 (2.52943)  --> STEP: 275/406 -- GLOBAL_STEP: 275 | > loss: 3.71350 (3.71480) | > log_mle: 0.82473 (0.82383) | > loss_dur: 2.88877 (2.89096) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.71189 (10.73030) | > current_lr: 0.00000 | > step_time: 1.17890 (0.81986) | > loader_time: 2.63030 (2.56145)  --> STEP: 300/406 -- GLOBAL_STEP: 300 | > loss: 3.68987 (3.71059) | > log_mle: 0.82339 (0.82367) | > loss_dur: 2.86647 (2.88692) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.60767 (10.71504) | > current_lr: 0.00000 | > step_time: 0.79850 (0.82163) | > loader_time: 2.36810 (2.58497)  --> STEP: 325/406 -- GLOBAL_STEP: 325 | > loss: 3.64184 (3.70649) | > log_mle: 0.82212 (0.82346) | > loss_dur: 2.81972 (2.88303) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.46201 (10.69954) | > current_lr: 0.00000 | > step_time: 0.89300 (0.82556) | > loader_time: 3.39350 (2.62371)  --> STEP: 350/406 -- GLOBAL_STEP: 350 | > loss: 3.60014 (3.70447) | > log_mle: 0.81764 (0.82326) | > loss_dur: 2.78251 (2.88121) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.33894 (10.68521) | > current_lr: 0.00000 | > step_time: 0.97850 (0.83363) | > loader_time: 3.19540 (2.66616)  --> STEP: 375/406 -- GLOBAL_STEP: 375 | > loss: 3.68202 (3.69995) | > log_mle: 0.81569 (0.82298) | > loss_dur: 2.86632 (2.87697) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.45899 (10.66681) | > current_lr: 0.00000 | > step_time: 0.81860 (0.84132) | > loader_time: 3.41660 (2.70840)  --> STEP: 400/406 -- GLOBAL_STEP: 400 | > loss: 3.62027 (3.69524) | > log_mle: 0.81756 (0.82280) | > loss_dur: 2.80270 (2.87244) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.27854 (10.64640) | > current_lr: 0.00000 | > step_time: 1.14200 (0.85254) | > loader_time: 3.23870 (2.74440)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 2.00473 (+0.00000) | > avg_loss: 3.65562 (+0.00000) | > avg_log_mle: 0.81726 (+0.00000) | > avg_loss_dur: 2.83836 (+0.00000) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_406.pth  > EPOCH: 1/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 05:55:46)   --> STEP: 19/406 -- GLOBAL_STEP: 425 | > loss: 3.65962 (3.68245) | > log_mle: 0.81672 (0.81579) | > loss_dur: 2.84290 (2.86665) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.34681 (10.20771) | > current_lr: 0.00000 | > step_time: 0.31550 (0.34176) | > loader_time: 2.30600 (1.67893)  --> STEP: 44/406 -- GLOBAL_STEP: 450 | > loss: 3.53872 (3.63671) | > log_mle: 0.82075 (0.81779) | > loss_dur: 2.71797 (2.81892) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.01404 (10.17054) | > current_lr: 0.00000 | > step_time: 0.33360 (0.35092) | > loader_time: 2.36250 (1.92916)  --> STEP: 69/406 -- GLOBAL_STEP: 475 | > loss: 3.65216 (3.62507) | > log_mle: 0.82403 (0.81814) | > loss_dur: 2.82813 (2.80693) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.18318 (10.14952) | > current_lr: 0.00000 | > step_time: 0.44810 (0.37412) | > loader_time: 1.93760 (2.01139)  --> STEP: 94/406 -- GLOBAL_STEP: 500 | > loss: 3.55572 (3.61176) | > log_mle: 0.81874 (0.81788) | > loss_dur: 2.73698 (2.79388) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.00861 (10.11863) | > current_lr: 0.00000 | > step_time: 35.74170 (0.79270) | > loader_time: 9.70000 (2.29221)  --> STEP: 119/406 -- GLOBAL_STEP: 525 | > loss: 3.56023 (3.59951) | > log_mle: 0.81293 (0.81769) | > loss_dur: 2.74730 (2.78182) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.96277 (10.08136) | > current_lr: 0.00000 | > step_time: 0.52790 (0.75709) | > loader_time: 2.49130 (2.36663)  --> STEP: 144/406 -- GLOBAL_STEP: 550 | > loss: 3.53837 (3.59365) | > log_mle: 0.81460 (0.81737) | > loss_dur: 2.72377 (2.77628) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.80126 (10.05367) | > current_lr: 0.00000 | > step_time: 0.59860 (0.72906) | > loader_time: 2.25370 (2.36543)  --> STEP: 169/406 -- GLOBAL_STEP: 575 | > loss: 3.56250 (3.58757) | > log_mle: 0.82209 (0.81706) | > loss_dur: 2.74041 (2.77051) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.79955 (10.02330) | > current_lr: 0.00000 | > step_time: 0.58670 (0.70945) | > loader_time: 2.40860 (2.40226)  --> STEP: 194/406 -- GLOBAL_STEP: 600 | > loss: 3.57549 (3.58195) | > log_mle: 0.81772 (0.81671) | > loss_dur: 2.75778 (2.76524) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.84931 (9.99123) | > current_lr: 0.00000 | > step_time: 0.66660 (0.70359) | > loader_time: 2.58820 (2.44928)  --> STEP: 219/406 -- GLOBAL_STEP: 625 | > loss: 3.44302 (3.57462) | > log_mle: 0.81825 (0.81648) | > loss_dur: 2.62477 (2.75814) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.52012 (9.95608) | > current_lr: 0.00000 | > step_time: 0.67770 (0.70093) | > loader_time: 2.60570 (2.48239)  --> STEP: 244/406 -- GLOBAL_STEP: 650 | > loss: 3.55473 (3.56906) | > log_mle: 0.81139 (0.81628) | > loss_dur: 2.74335 (2.75278) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.70034 (9.92165) | > current_lr: 0.00000 | > step_time: 0.75780 (0.70252) | > loader_time: 2.57140 (2.53127)  --> STEP: 269/406 -- GLOBAL_STEP: 675 | > loss: 3.45832 (3.56188) | > log_mle: 0.81790 (0.81585) | > loss_dur: 2.64042 (2.74603) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.41434 (9.88276) | > current_lr: 0.00000 | > step_time: 0.70590 (0.71185) | > loader_time: 2.68980 (2.57178)  --> STEP: 294/406 -- GLOBAL_STEP: 700 | > loss: 3.47896 (3.55637) | > log_mle: 0.81275 (0.81550) | > loss_dur: 2.66621 (2.74086) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.34280 (9.84752) | > current_lr: 0.00000 | > step_time: 1.14030 (0.72968) | > loader_time: 2.62780 (2.59764)  --> STEP: 319/406 -- GLOBAL_STEP: 725 | > loss: 3.45616 (3.54955) | > log_mle: 0.80658 (0.81518) | > loss_dur: 2.64957 (2.73437) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.27808 (9.80871) | > current_lr: 0.00000 | > step_time: 0.90380 (0.74402) | > loader_time: 3.07470 (2.63182)  --> STEP: 344/406 -- GLOBAL_STEP: 750 | > loss: 3.43717 (3.54397) | > log_mle: 0.80948 (0.81482) | > loss_dur: 2.62770 (2.72915) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.14162 (9.76920) | > current_lr: 0.00000 | > step_time: 0.94130 (0.76317) | > loader_time: 3.76150 (2.67482)  --> STEP: 369/406 -- GLOBAL_STEP: 775 | > loss: 3.38919 (3.53834) | > log_mle: 0.80674 (0.81440) | > loss_dur: 2.58245 (2.72393) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.08179 (9.73207) | > current_lr: 0.00000 | > step_time: 1.22510 (0.78618) | > loader_time: 3.38900 (2.70520)  --> STEP: 394/406 -- GLOBAL_STEP: 800 | > loss: 3.44702 (3.53122) | > log_mle: 0.81215 (0.81407) | > loss_dur: 2.63487 (2.71715) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.08220 (9.68997) | > current_lr: 0.00000 | > step_time: 0.91670 (0.88850) | > loader_time: 2.40840 (2.71484)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.14245 (-0.86228) | > avg_loss: 3.43748 (-0.21814) | > avg_log_mle: 0.80588 (-0.01138) | > avg_loss_dur: 2.63160 (-0.20676) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_812.pth  > EPOCH: 2/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 06:21:20)   --> STEP: 13/406 -- GLOBAL_STEP: 825 | > loss: 3.37253 (3.46965) | > log_mle: 0.79783 (0.80486) | > loss_dur: 2.57470 (2.66479) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.74653 (8.86658) | > current_lr: 0.00000 | > step_time: 1.25280 (0.91477) | > loader_time: 1.01610 (1.06082)  --> STEP: 38/406 -- GLOBAL_STEP: 850 | > loss: 3.46289 (3.42593) | > log_mle: 0.80458 (0.80601) | > loss_dur: 2.65831 (2.61992) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.86531 (8.80118) | > current_lr: 0.00000 | > step_time: 0.70610 (0.80214) | > loader_time: 1.79500 (1.42786)  --> STEP: 63/406 -- GLOBAL_STEP: 875 | > loss: 3.44279 (3.40220) | > log_mle: 0.80185 (0.80557) | > loss_dur: 2.64094 (2.59663) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.75761 (8.71679) | > current_lr: 0.00000 | > step_time: 0.51010 (0.80132) | > loader_time: 1.59220 (1.56598)  --> STEP: 88/406 -- GLOBAL_STEP: 900 | > loss: 3.29054 (3.38590) | > log_mle: 0.80431 (0.80497) | > loss_dur: 2.48623 (2.58093) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.30320 (8.63729) | > current_lr: 0.00000 | > step_time: 0.78830 (0.80160) | > loader_time: 2.12130 (1.68984)  --> STEP: 113/406 -- GLOBAL_STEP: 925 | > loss: 3.32069 (3.36672) | > log_mle: 0.80243 (0.80420) | > loss_dur: 2.51826 (2.56253) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.15448 (8.54417) | > current_lr: 0.00000 | > step_time: 0.47090 (0.74851) | > loader_time: 2.15950 (1.80201)  --> STEP: 138/406 -- GLOBAL_STEP: 950 | > loss: 3.25842 (3.35393) | > log_mle: 0.79439 (0.80315) | > loss_dur: 2.46403 (2.55078) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.86542 (8.45998) | > current_lr: 0.00000 | > step_time: 0.53800 (0.70738) | > loader_time: 2.58990 (1.88918)  --> STEP: 163/406 -- GLOBAL_STEP: 975 | > loss: 3.25329 (3.34086) | > log_mle: 0.79624 (0.80204) | > loss_dur: 2.45705 (2.53882) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.75901 (8.37276) | > current_lr: 0.00000 | > step_time: 0.51680 (0.68493) | > loader_time: 2.52300 (1.99339)  --> STEP: 188/406 -- GLOBAL_STEP: 1000 | > loss: 3.30118 (3.32816) | > log_mle: 0.79379 (0.80102) | > loss_dur: 2.50739 (2.52714) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.66504 (8.28391) | > current_lr: 0.00000 | > step_time: 0.56480 (0.67324) | > loader_time: 2.79130 (2.10796)  --> STEP: 213/406 -- GLOBAL_STEP: 1025 | > loss: 3.17445 (3.31586) | > log_mle: 0.78797 (0.80000) | > loss_dur: 2.38647 (2.51587) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.41179 (8.19825) | > current_lr: 0.00000 | > step_time: 0.61190 (0.66794) | > loader_time: 2.81980 (2.19283)  --> STEP: 238/406 -- GLOBAL_STEP: 1050 | > loss: 3.25268 (3.30448) | > log_mle: 0.79221 (0.79915) | > loss_dur: 2.46047 (2.50533) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.41109 (8.11253) | > current_lr: 0.00000 | > step_time: 0.66550 (0.67002) | > loader_time: 2.49960 (2.27166)  --> STEP: 263/406 -- GLOBAL_STEP: 1075 | > loss: 3.15904 (3.29416) | > log_mle: 0.78633 (0.79799) | > loss_dur: 2.37270 (2.49617) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.08219 (8.02792) | > current_lr: 0.00000 | > step_time: 0.67600 (0.67596) | > loader_time: 2.53420 (2.33123)  --> STEP: 288/406 -- GLOBAL_STEP: 1100 | > loss: 3.11665 (3.28435) | > log_mle: 0.78100 (0.79680) | > loss_dur: 2.33564 (2.48755) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.88171 (7.94364) | > current_lr: 0.00000 | > step_time: 0.89500 (0.68578) | > loader_time: 2.95730 (2.38636)  --> STEP: 313/406 -- GLOBAL_STEP: 1125 | > loss: 3.13315 (3.27499) | > log_mle: 0.78020 (0.79574) | > loss_dur: 2.35295 (2.47926) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.76387 (7.86089) | > current_lr: 0.00000 | > step_time: 0.74600 (0.69217) | > loader_time: 2.83380 (2.42366)  --> STEP: 338/406 -- GLOBAL_STEP: 1150 | > loss: 3.15742 (3.26641) | > log_mle: 0.77964 (0.79451) | > loss_dur: 2.37778 (2.47190) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.68452 (7.78001) | > current_lr: 0.00000 | > step_time: 0.87090 (0.71013) | > loader_time: 3.16410 (2.48275)  --> STEP: 363/406 -- GLOBAL_STEP: 1175 | > loss: 3.13071 (3.25907) | > log_mle: 0.77394 (0.79327) | > loss_dur: 2.35676 (2.46580) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.54849 (7.70225) | > current_lr: 0.00000 | > step_time: 1.05840 (0.73340) | > loader_time: 2.99350 (2.53001)  --> STEP: 388/406 -- GLOBAL_STEP: 1200 | > loss: 3.10237 (3.25019) | > log_mle: 0.77581 (0.79206) | > loss_dur: 2.32656 (2.45813) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.39775 (7.62358) | > current_lr: 0.00000 | > step_time: 1.80780 (0.76340) | > loader_time: 0.81420 (2.56988)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.16578 (+0.02333) | > avg_loss: 3.08469 (-0.35279) | > avg_log_mle: 0.76893 (-0.03695) | > avg_loss_dur: 2.31576 (-0.31584) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_1218.pth  > EPOCH: 3/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 06:44:50)   --> STEP: 7/406 -- GLOBAL_STEP: 1225 | > loss: 3.00657 (3.13080) | > log_mle: 0.77299 (0.77032) | > loss_dur: 2.23359 (2.36048) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.09316 (6.30876) | > current_lr: 0.00000 | > step_time: 0.23340 (0.36592) | > loader_time: 1.24660 (1.62296)  --> STEP: 32/406 -- GLOBAL_STEP: 1250 | > loss: 3.20895 (3.12256) | > log_mle: 0.76949 (0.77058) | > loss_dur: 2.43945 (2.35198) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.40050 (6.28856) | > current_lr: 0.00000 | > step_time: 0.30770 (0.32807) | > loader_time: 1.71630 (1.79281)  --> STEP: 57/406 -- GLOBAL_STEP: 1275 | > loss: 3.12532 (3.11343) | > log_mle: 0.76483 (0.76951) | > loss_dur: 2.36049 (2.34391) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.15928 (6.22955) | > current_lr: 0.00000 | > step_time: 0.43540 (0.35620) | > loader_time: 2.23450 (1.96996)  --> STEP: 82/406 -- GLOBAL_STEP: 1300 | > loss: 3.04734 (3.11453) | > log_mle: 0.76212 (0.76764) | > loss_dur: 2.28522 (2.34689) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.93814 (6.19482) | > current_lr: 0.00000 | > step_time: 0.44260 (0.38278) | > loader_time: 2.55000 (2.02718)  --> STEP: 107/406 -- GLOBAL_STEP: 1325 | > loss: 3.01948 (3.10480) | > log_mle: 0.75827 (0.76545) | > loss_dur: 2.26121 (2.33935) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.90751 (6.14338) | > current_lr: 0.00000 | > step_time: 0.49910 (0.40926) | > loader_time: 2.48440 (2.06977)  --> STEP: 132/406 -- GLOBAL_STEP: 1350 | > loss: 3.10430 (3.09711) | > log_mle: 0.74660 (0.76298) | > loss_dur: 2.35770 (2.33413) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.98590 (6.09694) | > current_lr: 0.00000 | > step_time: 0.52950 (0.43270) | > loader_time: 2.43140 (2.12237)  --> STEP: 157/406 -- GLOBAL_STEP: 1375 | > loss: 3.14100 (3.09132) | > log_mle: 0.74779 (0.76067) | > loss_dur: 2.39321 (2.33065) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.94515 (6.05538) | > current_lr: 0.00000 | > step_time: 0.55210 (0.45586) | > loader_time: 2.89750 (2.19223)  --> STEP: 182/406 -- GLOBAL_STEP: 1400 | > loss: 3.05443 (3.08360) | > log_mle: 0.73520 (0.75825) | > loss_dur: 2.31924 (2.32535) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.76718 (6.01263) | > current_lr: 0.00000 | > step_time: 0.65590 (0.48784) | > loader_time: 2.56450 (2.26756)  --> STEP: 207/406 -- GLOBAL_STEP: 1425 | > loss: 3.02998 (3.07539) | > log_mle: 0.73455 (0.75598) | > loss_dur: 2.29543 (2.31941) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.66429 (5.97146) | > current_lr: 0.00000 | > step_time: 0.67860 (0.50683) | > loader_time: 2.83310 (2.31920)  --> STEP: 232/406 -- GLOBAL_STEP: 1450 | > loss: 2.97410 (3.06646) | > log_mle: 0.72626 (0.75371) | > loss_dur: 2.24784 (2.31275) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.57358 (5.93199) | > current_lr: 0.00000 | > step_time: 0.70400 (0.52790) | > loader_time: 2.48750 (2.35931)  --> STEP: 257/406 -- GLOBAL_STEP: 1475 | > loss: 2.98643 (3.05869) | > log_mle: 0.72606 (0.75142) | > loss_dur: 2.26038 (2.30727) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.56014 (5.89580) | > current_lr: 0.00000 | > step_time: 0.70830 (0.54720) | > loader_time: 2.70140 (2.39842)  --> STEP: 282/406 -- GLOBAL_STEP: 1500 | > loss: 2.95733 (3.04967) | > log_mle: 0.72684 (0.74906) | > loss_dur: 2.23049 (2.30061) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.50368 (5.86039) | > current_lr: 0.00000 | > step_time: 0.77950 (0.56854) | > loader_time: 3.01980 (2.43420)  --> STEP: 307/406 -- GLOBAL_STEP: 1525 | > loss: 2.93742 (3.04123) | > log_mle: 0.71872 (0.74675) | > loss_dur: 2.21870 (2.29448) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.44606 (5.82725) | > current_lr: 0.00000 | > step_time: 0.80860 (0.58726) | > loader_time: 2.78570 (2.46268)  --> STEP: 332/406 -- GLOBAL_STEP: 1550 | > loss: 2.88498 (3.03201) | > log_mle: 0.70957 (0.74435) | > loss_dur: 2.17540 (2.28766) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.30959 (5.79475) | > current_lr: 0.00000 | > step_time: 0.83630 (0.60682) | > loader_time: 2.66760 (2.51126)  --> STEP: 357/406 -- GLOBAL_STEP: 1575 | > loss: 2.87253 (3.02456) | > log_mle: 0.71189 (0.74192) | > loss_dur: 2.16064 (2.28264) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.29678 (5.76602) | > current_lr: 0.00000 | > step_time: 0.91770 (0.62535) | > loader_time: 3.48580 (2.56067)  --> STEP: 382/406 -- GLOBAL_STEP: 1600 | > loss: 2.87543 (3.01428) | > log_mle: 0.70275 (0.73953) | > loss_dur: 2.17267 (2.27476) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.26271 (5.73362) | > current_lr: 0.00000 | > step_time: 1.08450 (0.64811) | > loader_time: 3.32250 (2.61155)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.21142 (+0.04564) | > avg_loss: 2.80198 (-0.28271) | > avg_log_mle: 0.69711 (-0.07182) | > avg_loss_dur: 2.10487 (-0.21089) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_1624.pth  > EPOCH: 4/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 07:08:06)   --> STEP: 1/406 -- GLOBAL_STEP: 1625 | > loss: 2.87741 (2.87741) | > log_mle: 0.70139 (0.70139) | > loss_dur: 2.17602 (2.17602) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.25176 (5.25176) | > current_lr: 0.00000 | > step_time: 0.27200 (0.27203) | > loader_time: 1.52990 (1.52985)  --> STEP: 26/406 -- GLOBAL_STEP: 1650 | > loss: 2.80450 (2.81334) | > log_mle: 0.70088 (0.70594) | > loss_dur: 2.10362 (2.10740) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.16089 (5.16732) | > current_lr: 0.00000 | > step_time: 0.33400 (0.31330) | > loader_time: 1.90390 (1.84449)  --> STEP: 51/406 -- GLOBAL_STEP: 1675 | > loss: 2.75381 (2.80003) | > log_mle: 0.69207 (0.70356) | > loss_dur: 2.06174 (2.09647) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.97318 (5.14317) | > current_lr: 0.00000 | > step_time: 0.43440 (0.34511) | > loader_time: 1.94680 (1.97950)  --> STEP: 76/406 -- GLOBAL_STEP: 1700 | > loss: 2.78072 (2.79140) | > log_mle: 0.69540 (0.69977) | > loss_dur: 2.08532 (2.09162) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.09202 (5.11958) | > current_lr: 0.00000 | > step_time: 0.48170 (0.37742) | > loader_time: 2.30280 (2.05244)  --> STEP: 101/406 -- GLOBAL_STEP: 1725 | > loss: 2.75655 (2.77599) | > log_mle: 0.67863 (0.69598) | > loss_dur: 2.07792 (2.08001) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.98949 (5.08328) | > current_lr: 0.00000 | > step_time: 0.53030 (0.40527) | > loader_time: 2.46830 (2.15380)  --> STEP: 126/406 -- GLOBAL_STEP: 1750 | > loss: 2.62607 (2.76086) | > log_mle: 0.67366 (0.69207) | > loss_dur: 1.95241 (2.06879) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.77410 (5.04698) | > current_lr: 0.00000 | > step_time: 0.54830 (0.43114) | > loader_time: 2.29170 (2.19794)  --> STEP: 151/406 -- GLOBAL_STEP: 1775 | > loss: 2.67206 (2.75022) | > log_mle: 0.66885 (0.68824) | > loss_dur: 2.00320 (2.06197) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.79990 (5.01798) | > current_lr: 0.00000 | > step_time: 0.56980 (0.45234) | > loader_time: 2.26750 (2.20793)  --> STEP: 176/406 -- GLOBAL_STEP: 1800 | > loss: 2.64068 (2.73596) | > log_mle: 0.65875 (0.68452) | > loss_dur: 1.98193 (2.05144) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.75045 (4.98149) | > current_lr: 0.00000 | > step_time: 0.56340 (0.47237) | > loader_time: 2.90420 (2.27342)  --> STEP: 201/406 -- GLOBAL_STEP: 1825 | > loss: 2.57232 (2.72281) | > log_mle: 0.65837 (0.68091) | > loss_dur: 1.91395 (2.04189) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.60655 (4.94764) | > current_lr: 0.00000 | > step_time: 0.67540 (0.49286) | > loader_time: 2.63220 (2.31817)  --> STEP: 226/406 -- GLOBAL_STEP: 1850 | > loss: 2.58292 (2.70914) | > log_mle: 0.64114 (0.67736) | > loss_dur: 1.94178 (2.03178) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.59125 (4.91463) | > current_lr: 0.00000 | > step_time: 0.65050 (0.51328) | > loader_time: 2.87230 (2.36121)  --> STEP: 251/406 -- GLOBAL_STEP: 1875 | > loss: 2.62632 (2.69743) | > log_mle: 0.63875 (0.67391) | > loss_dur: 1.98757 (2.02352) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.69390 (4.88464) | > current_lr: 0.00000 | > step_time: 0.71500 (0.53115) | > loader_time: 2.33360 (2.39324)  --> STEP: 276/406 -- GLOBAL_STEP: 1900 | > loss: 2.53220 (2.68452) | > log_mle: 0.63297 (0.67031) | > loss_dur: 1.89923 (2.01420) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.51055 (4.85343) | > current_lr: 0.00000 | > step_time: 0.77160 (0.55069) | > loader_time: 2.90480 (2.42575)  --> STEP: 301/406 -- GLOBAL_STEP: 1925 | > loss: 2.53341 (2.67228) | > log_mle: 0.62215 (0.66689) | > loss_dur: 1.91126 (2.00539) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.43819 (4.82291) | > current_lr: 0.00000 | > step_time: 0.77920 (0.57104) | > loader_time: 2.71130 (2.45730)  --> STEP: 326/406 -- GLOBAL_STEP: 1950 | > loss: 2.46752 (2.65957) | > log_mle: 0.61535 (0.66345) | > loss_dur: 1.85218 (1.99612) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.35987 (4.79345) | > current_lr: 0.00000 | > step_time: 0.83070 (0.58932) | > loader_time: 2.98860 (2.48715)  --> STEP: 351/406 -- GLOBAL_STEP: 1975 | > loss: 2.50517 (2.64904) | > log_mle: 0.61377 (0.65999) | > loss_dur: 1.89140 (1.98905) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.41112 (4.76648) | > current_lr: 0.00000 | > step_time: 0.85910 (0.60910) | > loader_time: 2.99990 (2.53192)  --> STEP: 376/406 -- GLOBAL_STEP: 2000 | > loss: 2.43601 (2.63688) | > log_mle: 0.60993 (0.65655) | > loss_dur: 1.82608 (1.98032) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.26871 (4.73842) | > current_lr: 0.00000 | > step_time: 0.91590 (0.62982) | > loader_time: 2.86120 (2.57925)  --> STEP: 401/406 -- GLOBAL_STEP: 2025 | > loss: 2.41235 (2.62498) | > log_mle: 0.60259 (0.65324) | > loss_dur: 1.80976 (1.97174) | > amp_scaler: 32768.00000 (16996.86783) | > grad_norm: 4.21382 (4.71053) | > current_lr: 0.00000 | > step_time: 1.16610 (0.66068) | > loader_time: 3.42280 (2.62975)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.18578 (-0.02564) | > avg_loss: 2.34633 (-0.45565) | > avg_log_mle: 0.59978 (-0.09732) | > avg_loss_dur: 1.74655 (-0.35832) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_2030.pth  > EPOCH: 5/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 07:31:11)   --> STEP: 20/406 -- GLOBAL_STEP: 2050 | > loss: 2.32632 (2.39752) | > log_mle: 0.62514 (0.61732) | > loss_dur: 1.70118 (1.78020) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.13014 (4.16269) | > current_lr: 0.00000 | > step_time: 0.61280 (0.30739) | > loader_time: 2.20610 (1.74694)  --> STEP: 45/406 -- GLOBAL_STEP: 2075 | > loss: 2.37442 (2.37738) | > log_mle: 0.61136 (0.61411) | > loss_dur: 1.76306 (1.76327) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.12018 (4.14276) | > current_lr: 0.00000 | > step_time: 0.45660 (0.34062) | > loader_time: 2.55210 (1.96832)  --> STEP: 70/406 -- GLOBAL_STEP: 2100 | > loss: 2.31845 (2.36988) | > log_mle: 0.58928 (0.60870) | > loss_dur: 1.72917 (1.76118) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.01500 (4.12811) | > current_lr: 0.00000 | > step_time: 0.41150 (0.37417) | > loader_time: 2.44440 (2.02950)  --> STEP: 95/406 -- GLOBAL_STEP: 2125 | > loss: 2.27078 (2.35859) | > log_mle: 0.58644 (0.60354) | > loss_dur: 1.68433 (1.75505) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.98636 (4.11155) | > current_lr: 0.00000 | > step_time: 0.44350 (0.40353) | > loader_time: 2.15350 (2.08449)  --> STEP: 120/406 -- GLOBAL_STEP: 2150 | > loss: 2.28870 (2.34508) | > log_mle: 0.57209 (0.59878) | > loss_dur: 1.71661 (1.74630) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.00643 (4.08450) | > current_lr: 0.00000 | > step_time: 0.54960 (0.43083) | > loader_time: 2.14420 (2.09590)  --> STEP: 145/406 -- GLOBAL_STEP: 2175 | > loss: 2.21042 (2.33611) | > log_mle: 0.57014 (0.59404) | > loss_dur: 1.64029 (1.74207) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.86027 (4.06902) | > current_lr: 0.00000 | > step_time: 0.59390 (0.45372) | > loader_time: 2.34310 (2.12068)  --> STEP: 170/406 -- GLOBAL_STEP: 2200 | > loss: 2.24571 (2.32535) | > log_mle: 0.55950 (0.58987) | > loss_dur: 1.68621 (1.73548) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.90860 (4.04696) | > current_lr: 0.00000 | > step_time: 0.53940 (0.47304) | > loader_time: 2.58390 (2.18627)  --> STEP: 195/406 -- GLOBAL_STEP: 2225 | > loss: 2.23841 (2.31468) | > log_mle: 0.56599 (0.58576) | > loss_dur: 1.67242 (1.72891) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.86637 (4.02464) | > current_lr: 0.00000 | > step_time: 0.62200 (0.49047) | > loader_time: 2.60360 (2.26200)  --> STEP: 220/406 -- GLOBAL_STEP: 2250 | > loss: 2.22180 (2.30328) | > log_mle: 0.53867 (0.58184) | > loss_dur: 1.68313 (1.72145) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.81014 (4.00162) | > current_lr: 0.00000 | > step_time: 0.63750 (0.50699) | > loader_time: 2.61390 (2.32729)  --> STEP: 245/406 -- GLOBAL_STEP: 2275 | > loss: 2.20323 (2.29394) | > log_mle: 0.54924 (0.57805) | > loss_dur: 1.65400 (1.71589) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.79550 (3.98261) | > current_lr: 0.00000 | > step_time: 0.68220 (0.52334) | > loader_time: 3.00010 (2.38975)  --> STEP: 270/406 -- GLOBAL_STEP: 2300 | > loss: 2.18591 (2.28371) | > log_mle: 0.53678 (0.57434) | > loss_dur: 1.64914 (1.70937) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.74888 (3.96214) | > current_lr: 0.00000 | > step_time: 0.72950 (0.54111) | > loader_time: 2.66320 (2.42543)  --> STEP: 295/406 -- GLOBAL_STEP: 2325 | > loss: 2.16205 (2.27391) | > log_mle: 0.53249 (0.57075) | > loss_dur: 1.62956 (1.70315) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.71011 (3.94190) | > current_lr: 0.00000 | > step_time: 0.80610 (0.56175) | > loader_time: 2.86540 (2.44789)  --> STEP: 320/406 -- GLOBAL_STEP: 2350 | > loss: 2.13325 (2.26440) | > log_mle: 0.53064 (0.56733) | > loss_dur: 1.60261 (1.69707) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.68975 (3.92360) | > current_lr: 0.00000 | > step_time: 0.75920 (0.57878) | > loader_time: 2.99530 (2.47226)  --> STEP: 345/406 -- GLOBAL_STEP: 2375 | > loss: 2.16305 (2.25604) | > log_mle: 0.51063 (0.56382) | > loss_dur: 1.65242 (1.69222) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.72070 (3.90668) | > current_lr: 0.00000 | > step_time: 0.83260 (0.59704) | > loader_time: 2.99060 (2.51815)  --> STEP: 370/406 -- GLOBAL_STEP: 2400 | > loss: 2.09848 (2.24708) | > log_mle: 0.50447 (0.56045) | > loss_dur: 1.59401 (1.68663) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.59549 (3.89014) | > current_lr: 0.00000 | > step_time: 1.01260 (0.61554) | > loader_time: 2.78670 (2.56226)  --> STEP: 395/406 -- GLOBAL_STEP: 2425 | > loss: 2.10066 (2.23806) | > log_mle: 0.50259 (0.55727) | > loss_dur: 1.59806 (1.68080) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.65250 (3.87293) | > current_lr: 0.00000 | > step_time: 1.20210 (0.64160) | > loader_time: 3.23820 (2.60756)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.10739 (-0.07839) | > avg_loss: 2.01928 (-0.32706) | > avg_log_mle: 0.50687 (-0.09291) | > avg_loss_dur: 1.51240 (-0.23415) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_2436.pth  > EPOCH: 6/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 07:54:00)   --> STEP: 14/406 -- GLOBAL_STEP: 2450 | > loss: 2.07627 (2.08896) | > log_mle: 0.51377 (0.52882) | > loss_dur: 1.56250 (1.56014) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.59623 (3.55583) | > current_lr: 0.00000 | > step_time: 0.23050 (0.27280) | > loader_time: 1.48280 (1.40424)  --> STEP: 39/406 -- GLOBAL_STEP: 2475 | > loss: 2.04526 (2.06515) | > log_mle: 0.52449 (0.52664) | > loss_dur: 1.52077 (1.53851) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.42434 (3.52871) | > current_lr: 0.00000 | > step_time: 0.39060 (0.31894) | > loader_time: 2.39020 (1.67910)  --> STEP: 64/406 -- GLOBAL_STEP: 2500 | > loss: 1.95181 (2.05596) | > log_mle: 0.52202 (0.52228) | > loss_dur: 1.42979 (1.53368) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.33448 (3.52275) | > current_lr: 0.00000 | > step_time: 0.45940 (0.35870) | > loader_time: 1.88890 (1.78787)  --> STEP: 89/406 -- GLOBAL_STEP: 2525 | > loss: 2.04461 (2.05266) | > log_mle: 0.50531 (0.51754) | > loss_dur: 1.53930 (1.53512) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.49105 (3.52939) | > current_lr: 0.00000 | > step_time: 0.47620 (0.39132) | > loader_time: 2.16490 (1.86330)  --> STEP: 114/406 -- GLOBAL_STEP: 2550 | > loss: 2.01263 (2.04373) | > log_mle: 0.48527 (0.51295) | > loss_dur: 1.52736 (1.53078) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.46784 (3.52018) | > current_lr: 0.00000 | > step_time: 0.55040 (0.42217) | > loader_time: 1.94480 (1.89878)  --> STEP: 139/406 -- GLOBAL_STEP: 2575 | > loss: 2.04148 (2.03901) | > log_mle: 0.48638 (0.50869) | > loss_dur: 1.55510 (1.53033) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.52272 (3.51866) | > current_lr: 0.00000 | > step_time: 0.52860 (0.44776) | > loader_time: 2.08100 (1.94636)  --> STEP: 164/406 -- GLOBAL_STEP: 2600 | > loss: 2.01093 (2.03404) | > log_mle: 0.48942 (0.50497) | > loss_dur: 1.52151 (1.52907) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.47391 (3.51444) | > current_lr: 0.00000 | > step_time: 0.57730 (0.47040) | > loader_time: 2.80420 (2.04028)  --> STEP: 189/406 -- GLOBAL_STEP: 2625 | > loss: 2.00893 (2.02843) | > log_mle: 0.47288 (0.50142) | > loss_dur: 1.53605 (1.52701) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.45250 (3.50698) | > current_lr: 0.00000 | > step_time: 0.61870 (0.49225) | > loader_time: 2.55990 (2.12734)  --> STEP: 214/406 -- GLOBAL_STEP: 2650 | > loss: 1.95254 (2.02227) | > log_mle: 0.47246 (0.49824) | > loss_dur: 1.48008 (1.52404) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.41300 (3.49969) | > current_lr: 0.00000 | > step_time: 0.67090 (0.51047) | > loader_time: 2.60850 (2.17820)  --> STEP: 239/406 -- GLOBAL_STEP: 2675 | > loss: 2.00469 (2.01757) | > log_mle: 0.45472 (0.49510) | > loss_dur: 1.54998 (1.52247) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.51217 (3.49622) | > current_lr: 0.00000 | > step_time: 0.69440 (0.53091) | > loader_time: 2.76690 (2.23221)  --> STEP: 264/406 -- GLOBAL_STEP: 2700 | > loss: 1.98347 (2.01273) | > log_mle: 0.46317 (0.49212) | > loss_dur: 1.52030 (1.52061) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.46322 (3.49149) | > current_lr: 0.00000 | > step_time: 0.75000 (0.55010) | > loader_time: 2.48280 (2.26869)  --> STEP: 289/406 -- GLOBAL_STEP: 2725 | > loss: 1.91773 (2.00786) | > log_mle: 0.45712 (0.48919) | > loss_dur: 1.46061 (1.51867) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.39969 (3.48598) | > current_lr: 0.00000 | > step_time: 0.74230 (0.57001) | > loader_time: 2.43200 (2.29983)  --> STEP: 314/406 -- GLOBAL_STEP: 2750 | > loss: 1.91721 (2.00348) | > log_mle: 0.44756 (0.48651) | > loss_dur: 1.46965 (1.51697) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.39172 (3.48175) | > current_lr: 0.00000 | > step_time: 0.80250 (0.59083) | > loader_time: 3.22490 (2.33684)  --> STEP: 339/406 -- GLOBAL_STEP: 2775 | > loss: 1.92716 (1.99946) | > log_mle: 0.44777 (0.48377) | > loss_dur: 1.47939 (1.51569) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.41049 (3.47910) | > current_lr: 0.00000 | > step_time: 0.85750 (0.61535) | > loader_time: 3.68520 (2.39520)  --> STEP: 364/406 -- GLOBAL_STEP: 2800 | > loss: 1.94982 (1.99564) | > log_mle: 0.44866 (0.48105) | > loss_dur: 1.50116 (1.51459) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.45689 (3.47657) | > current_lr: 0.00000 | > step_time: 0.90490 (0.63895) | > loader_time: 2.73300 (2.43614)  --> STEP: 389/406 -- GLOBAL_STEP: 2825 | > loss: 1.92435 (1.99092) | > log_mle: 0.43361 (0.47854) | > loss_dur: 1.49074 (1.51237) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.44428 (3.47275) | > current_lr: 0.00000 | > step_time: 0.93950 (0.65929) | > loader_time: 2.84510 (2.46847)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.05160 (-0.05579) | > avg_loss: 1.84788 (-0.17140) | > avg_log_mle: 0.44069 (-0.06619) | > avg_loss_dur: 1.40719 (-0.10521) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_2842.pth  > EPOCH: 7/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 08:16:03)   --> STEP: 8/406 -- GLOBAL_STEP: 2850 | > loss: 1.96911 (1.96349) | > log_mle: 0.46093 (0.46795) | > loss_dur: 1.50819 (1.49554) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.48551 (3.47797) | > current_lr: 0.00000 | > step_time: 0.28110 (0.27917) | > loader_time: 1.49990 (1.29088)  --> STEP: 33/406 -- GLOBAL_STEP: 2875 | > loss: 1.94136 (1.92067) | > log_mle: 0.45958 (0.46323) | > loss_dur: 1.48178 (1.45744) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.47601 (3.42534) | > current_lr: 0.00000 | > step_time: 0.37700 (0.31595) | > loader_time: 2.39950 (1.66749)  --> STEP: 58/406 -- GLOBAL_STEP: 2900 | > loss: 1.93873 (1.90948) | > log_mle: 0.45079 (0.45991) | > loss_dur: 1.48793 (1.44957) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.46104 (3.40913) | > current_lr: 0.00000 | > step_time: 0.38810 (0.35485) | > loader_time: 2.20780 (1.79926)  --> STEP: 83/406 -- GLOBAL_STEP: 2925 | > loss: 1.90904 (1.90316) | > log_mle: 0.43886 (0.45571) | > loss_dur: 1.47018 (1.44745) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.47968 (3.40773) | > current_lr: 0.00000 | > step_time: 0.48130 (0.38773) | > loader_time: 2.24530 (1.88041)  --> STEP: 108/406 -- GLOBAL_STEP: 2950 | > loss: 1.82908 (1.89354) | > log_mle: 0.43417 (0.45163) | > loss_dur: 1.39490 (1.44191) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.30387 (3.39798) | > current_lr: 0.00000 | > step_time: 0.47450 (0.41515) | > loader_time: 2.42770 (1.94447)  --> STEP: 133/406 -- GLOBAL_STEP: 2975 | > loss: 1.86270 (1.88712) | > log_mle: 0.42606 (0.44763) | > loss_dur: 1.43665 (1.43949) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.42021 (3.39358) | > current_lr: 0.00000 | > step_time: 0.51100 (0.43871) | > loader_time: 1.89930 (1.99278)  --> STEP: 158/406 -- GLOBAL_STEP: 3000 | > loss: 1.86250 (1.88376) | > log_mle: 0.41751 (0.44437) | > loss_dur: 1.44499 (1.43939) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.43428 (3.39362) | > current_lr: 0.00000 | > step_time: 0.51520 (0.46005) | > loader_time: 2.52260 (2.04280)  --> STEP: 183/406 -- GLOBAL_STEP: 3025 | > loss: 1.83292 (1.87877) | > log_mle: 0.41893 (0.44134) | > loss_dur: 1.41399 (1.43743) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.36544 (3.38969) | > current_lr: 0.00000 | > step_time: 0.62150 (0.47857) | > loader_time: 2.50240 (2.12756)  --> STEP: 208/406 -- GLOBAL_STEP: 3050 | > loss: 1.78827 (1.87383) | > log_mle: 0.40539 (0.43844) | > loss_dur: 1.38288 (1.43539) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.32563 (3.38419) | > current_lr: 0.00000 | > step_time: 0.63180 (0.49634) | > loader_time: 2.38040 (2.16925)  --> STEP: 233/406 -- GLOBAL_STEP: 3075 | > loss: 1.83761 (1.86837) | > log_mle: 0.41203 (0.43569) | > loss_dur: 1.42558 (1.43268) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.33179 (3.37878) | > current_lr: 0.00000 | > step_time: 0.65630 (0.51538) | > loader_time: 2.37380 (2.19834)  --> STEP: 258/406 -- GLOBAL_STEP: 3100 | > loss: 1.85241 (1.86407) | > log_mle: 0.40924 (0.43311) | > loss_dur: 1.44316 (1.43096) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.39369 (3.37558) | > current_lr: 0.00000 | > step_time: 0.74990 (0.53692) | > loader_time: 2.80250 (2.23514)  --> STEP: 283/406 -- GLOBAL_STEP: 3125 | > loss: 1.79562 (1.85885) | > log_mle: 0.39832 (0.43056) | > loss_dur: 1.39730 (1.42829) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.30349 (3.36852) | > current_lr: 0.00000 | > step_time: 0.74230 (0.55632) | > loader_time: 2.65480 (2.27261)  --> STEP: 308/406 -- GLOBAL_STEP: 3150 | > loss: 1.78168 (1.85450) | > log_mle: 0.39840 (0.42827) | > loss_dur: 1.38327 (1.42624) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.29173 (3.36365) | > current_lr: 0.00000 | > step_time: 0.73790 (0.57741) | > loader_time: 3.03090 (2.32942)  --> STEP: 333/406 -- GLOBAL_STEP: 3175 | > loss: 1.83408 (1.84947) | > log_mle: 0.39156 (0.42598) | > loss_dur: 1.44252 (1.42349) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.37923 (3.35843) | > current_lr: 0.00000 | > step_time: 0.76510 (0.59347) | > loader_time: 3.18340 (2.39377)  --> STEP: 358/406 -- GLOBAL_STEP: 3200 | > loss: 1.71611 (1.84539) | > log_mle: 0.37494 (0.42362) | > loss_dur: 1.34117 (1.42177) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.19474 (3.35654) | > current_lr: 0.00000 | > step_time: 0.87450 (0.61329) | > loader_time: 3.12700 (2.43973)  --> STEP: 383/406 -- GLOBAL_STEP: 3225 | > loss: 1.77768 (1.83987) | > log_mle: 0.39020 (0.42149) | > loss_dur: 1.38748 (1.41838) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.25907 (3.34852) | > current_lr: 0.00000 | > step_time: 1.49200 (0.63222) | > loader_time: 3.90140 (2.47648)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.14087 (+0.08927) | > avg_loss: 1.68739 (-0.16049) | > avg_log_mle: 0.39035 (-0.05033) | > avg_loss_dur: 1.29703 (-0.11016) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_3248.pth  > EPOCH: 8/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 08:38:02)   --> STEP: 2/406 -- GLOBAL_STEP: 3250 | > loss: 1.84794 (1.84990) | > log_mle: 0.42129 (0.41605) | > loss_dur: 1.42665 (1.43385) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.44622 (3.49590) | > current_lr: 0.00000 | > step_time: 0.28890 (0.28988) | > loader_time: 1.56430 (1.40669)  --> STEP: 27/406 -- GLOBAL_STEP: 3275 | > loss: 1.70309 (1.76733) | > log_mle: 0.41906 (0.41654) | > loss_dur: 1.28403 (1.35079) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.18575 (3.28146) | > current_lr: 0.00000 | > step_time: 0.28630 (0.29704) | > loader_time: 1.72530 (1.60817)  --> STEP: 52/406 -- GLOBAL_STEP: 3300 | > loss: 1.71644 (1.74735) | > log_mle: 0.40328 (0.41352) | > loss_dur: 1.31316 (1.33383) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.11168 (3.22014) | > current_lr: 0.00000 | > step_time: 0.45840 (0.34316) | > loader_time: 1.89590 (1.79496)  --> STEP: 77/406 -- GLOBAL_STEP: 3325 | > loss: 1.73448 (1.73563) | > log_mle: 0.40530 (0.40922) | > loss_dur: 1.32918 (1.32641) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.16931 (3.19617) | > current_lr: 0.00000 | > step_time: 0.53400 (0.37952) | > loader_time: 2.17930 (1.88825)  --> STEP: 102/406 -- GLOBAL_STEP: 3350 | > loss: 1.70873 (1.72368) | > log_mle: 0.37598 (0.40487) | > loss_dur: 1.33275 (1.31881) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.18634 (3.17941) | > current_lr: 0.00000 | > step_time: 0.52590 (0.40938) | > loader_time: 2.17620 (1.93153)  --> STEP: 127/406 -- GLOBAL_STEP: 3375 | > loss: 1.67640 (1.71273) | > log_mle: 0.37993 (0.40098) | > loss_dur: 1.29646 (1.31175) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.16501 (3.15668) | > current_lr: 0.00000 | > step_time: 0.51500 (0.43232) | > loader_time: 2.42380 (1.97267)  --> STEP: 152/406 -- GLOBAL_STEP: 3400 | > loss: 1.65853 (1.70660) | > log_mle: 0.37716 (0.39754) | > loss_dur: 1.28137 (1.30906) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.01159 (3.14658) | > current_lr: 0.00000 | > step_time: 0.57630 (0.45423) | > loader_time: 2.37160 (2.01706)  --> STEP: 177/406 -- GLOBAL_STEP: 3425 | > loss: 1.64946 (1.69962) | > log_mle: 0.36582 (0.39452) | > loss_dur: 1.28364 (1.30510) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.07432 (3.13534) | > current_lr: 0.00000 | > step_time: 0.64430 (0.47297) | > loader_time: 2.60340 (2.12425)  --> STEP: 202/406 -- GLOBAL_STEP: 3450 | > loss: 1.63416 (1.69252) | > log_mle: 0.37142 (0.39168) | > loss_dur: 1.26274 (1.30084) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.97809 (3.11988) | > current_lr: 0.00000 | > step_time: 0.61060 (0.49242) | > loader_time: 2.80140 (2.18575)  --> STEP: 227/406 -- GLOBAL_STEP: 3475 | > loss: 1.62764 (1.68514) | > log_mle: 0.37852 (0.38900) | > loss_dur: 1.24913 (1.29614) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.92160 (3.10267) | > current_lr: 0.00000 | > step_time: 0.65890 (0.51124) | > loader_time: 2.34810 (2.22160)  --> STEP: 252/406 -- GLOBAL_STEP: 3500 | > loss: 1.61722 (1.67925) | > log_mle: 0.35895 (0.38643) | > loss_dur: 1.25827 (1.29282) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.89135 (3.08984) | > current_lr: 0.00000 | > step_time: 0.71100 (0.53094) | > loader_time: 2.59120 (2.24913)  --> STEP: 277/406 -- GLOBAL_STEP: 3525 | > loss: 1.59856 (1.67291) | > log_mle: 0.36055 (0.38393) | > loss_dur: 1.23801 (1.28898) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.92326 (3.07682) | > current_lr: 0.00000 | > step_time: 0.78250 (0.55436) | > loader_time: 2.32180 (2.27453)  --> STEP: 302/406 -- GLOBAL_STEP: 3550 | > loss: 1.59327 (1.66705) | > log_mle: 0.35343 (0.38167) | > loss_dur: 1.23984 (1.28538) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.92698 (3.06434) | > current_lr: 0.00000 | > step_time: 0.78710 (0.57367) | > loader_time: 2.35590 (2.29975)  --> STEP: 327/406 -- GLOBAL_STEP: 3575 | > loss: 1.58036 (1.66084) | > log_mle: 0.35204 (0.37953) | > loss_dur: 1.22833 (1.28131) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.97569 (3.05265) | > current_lr: 0.00000 | > step_time: 0.78480 (0.59303) | > loader_time: 3.39530 (2.33705)  --> STEP: 352/406 -- GLOBAL_STEP: 3600 | > loss: 1.58020 (1.65628) | > log_mle: 0.34938 (0.37730) | > loss_dur: 1.23082 (1.27899) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.89964 (3.04428) | > current_lr: 0.00000 | > step_time: 0.87750 (0.61284) | > loader_time: 2.76590 (2.37554)  --> STEP: 377/406 -- GLOBAL_STEP: 3625 | > loss: 1.58888 (1.65010) | > log_mle: 0.34446 (0.37511) | > loss_dur: 1.24442 (1.27499) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.95227 (3.03088) | > current_lr: 0.00000 | > step_time: 0.96200 (0.63442) | > loader_time: 3.11010 (2.41044)  --> STEP: 402/406 -- GLOBAL_STEP: 3650 | > loss: 1.53839 (1.64419) | > log_mle: 0.33520 (0.37307) | > loss_dur: 1.20319 (1.27112) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.84542 (3.01778) | > current_lr: 0.00000 | > step_time: 1.04060 (0.66372) | > loader_time: 3.07020 (2.44366)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.07849 (-0.06238) | > avg_loss: 1.49742 (-0.18997) | > avg_log_mle: 0.34547 (-0.04488) | > avg_loss_dur: 1.15194 (-0.14509) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_3654.pth  > EPOCH: 9/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 08:59:42)   --> STEP: 21/406 -- GLOBAL_STEP: 3675 | > loss: 1.49872 (1.57799) | > log_mle: 0.36793 (0.37336) | > loss_dur: 1.13079 (1.20463) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.73898 (2.91897) | > current_lr: 0.00000 | > step_time: 0.26220 (0.31256) | > loader_time: 1.60250 (1.40686)  --> STEP: 46/406 -- GLOBAL_STEP: 3700 | > loss: 1.56281 (1.55377) | > log_mle: 0.36506 (0.37138) | > loss_dur: 1.19775 (1.18238) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.76536 (2.83050) | > current_lr: 0.00000 | > step_time: 0.39350 (0.34399) | > loader_time: 1.68410 (1.64451)  --> STEP: 71/406 -- GLOBAL_STEP: 3725 | > loss: 1.51894 (1.54579) | > log_mle: 0.35790 (0.36630) | > loss_dur: 1.16105 (1.17950) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.99218 (2.82838) | > current_lr: 0.00000 | > step_time: 0.48710 (0.37561) | > loader_time: 1.78850 (1.73678)  --> STEP: 96/406 -- GLOBAL_STEP: 3750 | > loss: 1.53003 (1.53496) | > log_mle: 0.34705 (0.36160) | > loss_dur: 1.18298 (1.17336) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.92493 (2.80869) | > current_lr: 0.00000 | > step_time: 0.51530 (0.40348) | > loader_time: 2.02150 (1.78785)  --> STEP: 121/406 -- GLOBAL_STEP: 3775 | > loss: 1.51401 (1.52598) | > log_mle: 0.34361 (0.35769) | > loss_dur: 1.17040 (1.16829) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.78462 (2.78650) | > current_lr: 0.00000 | > step_time: 0.56140 (0.44794) | > loader_time: 1.77920 (1.81979)  --> STEP: 146/406 -- GLOBAL_STEP: 3800 | > loss: 1.48767 (1.52002) | > log_mle: 0.34445 (0.35402) | > loss_dur: 1.14323 (1.16600) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.71470 (2.78053) | > current_lr: 0.00000 | > step_time: 0.60390 (0.46943) | > loader_time: 2.48690 (1.87265)  --> STEP: 171/406 -- GLOBAL_STEP: 3825 | > loss: 1.45805 (1.51447) | > log_mle: 0.32992 (0.35096) | > loss_dur: 1.12812 (1.16351) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.66298 (2.76658) | > current_lr: 0.00000 | > step_time: 0.61050 (0.48842) | > loader_time: 2.72310 (1.98629)  --> STEP: 196/406 -- GLOBAL_STEP: 3850 | > loss: 1.45783 (1.50941) | > log_mle: 0.32256 (0.34799) | > loss_dur: 1.13528 (1.16142) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.65209 (2.75529) | > current_lr: 0.00000 | > step_time: 0.66210 (0.50768) | > loader_time: 2.71610 (2.07140)  --> STEP: 221/406 -- GLOBAL_STEP: 3875 | > loss: 1.47014 (1.50357) | > log_mle: 0.33055 (0.34530) | > loss_dur: 1.13959 (1.15827) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.65247 (2.74201) | > current_lr: 0.00000 | > step_time: 0.60850 (0.52547) | > loader_time: 2.63390 (2.11945)  --> STEP: 246/406 -- GLOBAL_STEP: 3900 | > loss: 1.43280 (1.49877) | > log_mle: 0.31848 (0.34271) | > loss_dur: 1.11432 (1.15606) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.69570 (2.73298) | > current_lr: 0.00000 | > step_time: 0.71550 (0.54282) | > loader_time: 2.29480 (2.15506)  --> STEP: 271/406 -- GLOBAL_STEP: 3925 | > loss: 1.45462 (1.49424) | > log_mle: 0.30196 (0.34024) | > loss_dur: 1.15267 (1.15400) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.92253 (2.72909) | > current_lr: 0.00000 | > step_time: 0.72700 (0.56259) | > loader_time: 2.39850 (2.19385)  --> STEP: 296/406 -- GLOBAL_STEP: 3950 | > loss: 1.46072 (1.49001) | > log_mle: 0.31431 (0.33795) | > loss_dur: 1.14641 (1.15205) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.61953 (2.72566) | > current_lr: 0.00000 | > step_time: 0.76590 (0.58155) | > loader_time: 2.15570 (2.22230)  --> STEP: 321/406 -- GLOBAL_STEP: 3975 | > loss: 1.42671 (1.48536) | > log_mle: 0.30693 (0.33581) | > loss_dur: 1.11977 (1.14955) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.61382 (2.72431) | > current_lr: 0.00000 | > step_time: 0.79400 (0.60064) | > loader_time: 3.23400 (2.27174)  --> STEP: 346/406 -- GLOBAL_STEP: 4000 | > loss: 1.41643 (1.48190) | > log_mle: 0.30933 (0.33356) | > loss_dur: 1.10709 (1.14835) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.54829 (2.71890) | > current_lr: 0.00000 | > step_time: 0.90700 (0.61771) | > loader_time: 3.31090 (2.33149)  --> STEP: 371/406 -- GLOBAL_STEP: 4025 | > loss: 1.41167 (1.47760) | > log_mle: 0.29983 (0.33137) | > loss_dur: 1.11184 (1.14623) | > amp_scaler: 32768.00000 (32856.32345) | > grad_norm: 2.54190 (2.70327) | > current_lr: 0.00000 | > step_time: 0.88050 (0.63619) | > loader_time: 3.20440 (2.37874)  --> STEP: 396/406 -- GLOBAL_STEP: 4050 | > loss: 1.40678 (1.47328) | > log_mle: 0.28764 (0.32938) | > loss_dur: 1.11914 (1.14391) | > amp_scaler: 32768.00000 (32850.74747) | > grad_norm: 2.59690 (2.69736) | > current_lr: 0.00000 | > step_time: 1.15280 (0.66131) | > loader_time: 3.57930 (2.41747)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.09103 (+0.01254) | > avg_loss: 1.36604 (-0.13137) | > avg_log_mle: 0.30225 (-0.04322) | > avg_loss_dur: 1.06379 (-0.08816) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_4060.pth  > EPOCH: 10/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 09:21:17)   --> STEP: 15/406 -- GLOBAL_STEP: 4075 | > loss: 1.42459 (1.44612) | > log_mle: 0.32219 (0.33019) | > loss_dur: 1.10240 (1.11593) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.65287 (2.73155) | > current_lr: 0.00000 | > step_time: 0.29900 (0.28946) | > loader_time: 1.53250 (1.39406)  --> STEP: 40/406 -- GLOBAL_STEP: 4100 | > loss: 1.36887 (1.41465) | > log_mle: 0.33738 (0.32948) | > loss_dur: 1.03149 (1.08517) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.42406 (2.60906) | > current_lr: 0.00000 | > step_time: 0.38680 (0.33503) | > loader_time: 1.90710 (1.66163)  --> STEP: 65/406 -- GLOBAL_STEP: 4125 | > loss: 1.41261 (1.40471) | > log_mle: 0.31738 (0.32524) | > loss_dur: 1.09523 (1.07947) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.51945 (2.58182) | > current_lr: 0.00000 | > step_time: 0.50620 (0.37851) | > loader_time: 1.78520 (1.75945)  --> STEP: 90/406 -- GLOBAL_STEP: 4150 | > loss: 1.37216 (1.39692) | > log_mle: 0.30640 (0.32073) | > loss_dur: 1.06575 (1.07619) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.50864 (2.56483) | > current_lr: 0.00000 | > step_time: 0.51750 (0.40811) | > loader_time: 2.37720 (1.85806)  --> STEP: 115/406 -- GLOBAL_STEP: 4175 | > loss: 1.38216 (1.38773) | > log_mle: 0.31031 (0.31631) | > loss_dur: 1.07185 (1.07142) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.46989 (2.55213) | > current_lr: 0.00000 | > step_time: 0.58110 (0.43439) | > loader_time: 2.13700 (1.88739)  --> STEP: 140/406 -- GLOBAL_STEP: 4200 | > loss: 1.34752 (1.38247) | > log_mle: 0.28762 (0.31243) | > loss_dur: 1.05990 (1.07005) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.55189 (2.55990) | > current_lr: 0.00000 | > step_time: 0.56830 (0.45717) | > loader_time: 2.08320 (1.90449)  --> STEP: 165/406 -- GLOBAL_STEP: 4225 | > loss: 1.34131 (1.37790) | > log_mle: 0.28218 (0.30929) | > loss_dur: 1.05912 (1.06861) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.48666 (2.55559) | > current_lr: 0.00000 | > step_time: 0.57980 (0.47895) | > loader_time: 2.76290 (1.97593)  --> STEP: 190/406 -- GLOBAL_STEP: 4250 | > loss: 1.35345 (1.37374) | > log_mle: 0.27890 (0.30625) | > loss_dur: 1.07454 (1.06749) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.69455 (2.56084) | > current_lr: 0.00000 | > step_time: 0.65310 (0.49803) | > loader_time: 2.38080 (2.04050)  --> STEP: 215/406 -- GLOBAL_STEP: 4275 | > loss: 1.32825 (1.36860) | > log_mle: 0.28016 (0.30369) | > loss_dur: 1.04809 (1.06491) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.58756 (2.55561) | > current_lr: 0.00000 | > step_time: 0.61060 (0.51656) | > loader_time: 2.56090 (2.10172)  --> STEP: 240/406 -- GLOBAL_STEP: 4300 | > loss: 1.31147 (1.36449) | > log_mle: 0.27203 (0.30111) | > loss_dur: 1.03944 (1.06338) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.44012 (2.54704) | > current_lr: 0.00000 | > step_time: 0.63150 (0.53437) | > loader_time: 2.68430 (2.15565)  --> STEP: 265/406 -- GLOBAL_STEP: 4325 | > loss: 1.32583 (1.36066) | > log_mle: 0.27015 (0.29873) | > loss_dur: 1.05569 (1.06193) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.50196 (2.53991) | > current_lr: 0.00000 | > step_time: 0.67330 (0.55169) | > loader_time: 2.65600 (2.19620)  --> STEP: 290/406 -- GLOBAL_STEP: 4350 | > loss: 1.30914 (1.35701) | > log_mle: 0.26071 (0.29641) | > loss_dur: 1.04843 (1.06060) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.42991 (2.53274) | > current_lr: 0.00000 | > step_time: 0.78430 (0.57045) | > loader_time: 2.43250 (2.22027)  --> STEP: 315/406 -- GLOBAL_STEP: 4375 | > loss: 1.28599 (1.35367) | > log_mle: 0.25376 (0.29437) | > loss_dur: 1.03223 (1.05930) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.40107 (2.52525) | > current_lr: 0.00000 | > step_time: 0.77590 (0.59181) | > loader_time: 2.79670 (2.25139)  --> STEP: 340/406 -- GLOBAL_STEP: 4400 | > loss: 1.32872 (1.35074) | > log_mle: 0.26691 (0.29233) | > loss_dur: 1.06181 (1.05841) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.86391 (2.53015) | > current_lr: 0.00000 | > step_time: 0.87200 (0.61023) | > loader_time: 2.70990 (2.29055)  --> STEP: 365/406 -- GLOBAL_STEP: 4425 | > loss: 1.25242 (1.34737) | > log_mle: 0.26373 (0.29020) | > loss_dur: 0.98869 (1.05717) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.29143 (2.53519) | > current_lr: 0.00000 | > step_time: 0.89890 (0.62864) | > loader_time: 2.39630 (2.33699)  --> STEP: 390/406 -- GLOBAL_STEP: 4450 | > loss: 1.29040 (1.34365) | > log_mle: 0.25125 (0.28826) | > loss_dur: 1.03915 (1.05539) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.54572 (2.53367) | > current_lr: 0.00000 | > step_time: 1.07500 (0.65068) | > loader_time: 2.78960 (2.36752)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.07129 (-0.01974) | > avg_loss: 1.25202 (-0.11402) | > avg_log_mle: 0.26251 (-0.03974) | > avg_loss_dur: 0.98951 (-0.07428) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_4466.pth  > EPOCH: 11/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 09:42:36)   --> STEP: 9/406 -- GLOBAL_STEP: 4475 | > loss: 1.34257 (1.33919) | > log_mle: 0.29600 (0.29549) | > loss_dur: 1.04657 (1.04370) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.53248 (2.58129) | > current_lr: 0.00000 | > step_time: 0.30140 (0.29187) | > loader_time: 1.26050 (1.29966)  --> STEP: 34/406 -- GLOBAL_STEP: 4500 | > loss: 1.28956 (1.29893) | > log_mle: 0.30031 (0.29030) | > loss_dur: 0.98925 (1.00863) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.38411 (2.47360) | > current_lr: 0.00000 | > step_time: 0.39080 (0.32303) | > loader_time: 1.99910 (1.60834)  --> STEP: 59/406 -- GLOBAL_STEP: 4525 | > loss: 1.25389 (1.28784) | > log_mle: 0.26517 (0.28617) | > loss_dur: 0.98872 (1.00167) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.72886 (2.45463) | > current_lr: 0.00000 | > step_time: 0.49210 (0.36166) | > loader_time: 1.84130 (1.71455)  --> STEP: 84/406 -- GLOBAL_STEP: 4550 | > loss: 1.25189 (1.28053) | > log_mle: 0.27123 (0.28208) | > loss_dur: 0.98067 (0.99846) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.35870 (2.45077) | > current_lr: 0.00000 | > step_time: 0.52550 (0.39617) | > loader_time: 1.70420 (1.80977)  --> STEP: 109/406 -- GLOBAL_STEP: 4575 | > loss: 1.23211 (1.27106) | > log_mle: 0.25285 (0.27778) | > loss_dur: 0.97926 (0.99328) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.57709 (2.43590) | > current_lr: 0.00000 | > step_time: 0.54800 (0.42151) | > loader_time: 2.06720 (1.85622)  --> STEP: 134/406 -- GLOBAL_STEP: 4600 | > loss: 1.25460 (1.26537) | > log_mle: 0.25480 (0.27382) | > loss_dur: 0.99980 (0.99155) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.38852 (2.44857) | > current_lr: 0.00000 | > step_time: 0.55990 (0.44367) | > loader_time: 2.09420 (1.87612)  --> STEP: 159/406 -- GLOBAL_STEP: 4625 | > loss: 1.23639 (1.26185) | > log_mle: 0.25376 (0.27070) | > loss_dur: 0.98263 (0.99115) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.26595 (2.44123) | > current_lr: 0.00000 | > step_time: 0.55710 (0.46468) | > loader_time: 2.69590 (1.92138)  --> STEP: 184/406 -- GLOBAL_STEP: 4650 | > loss: 1.27520 (1.25801) | > log_mle: 0.23324 (0.26770) | > loss_dur: 1.04196 (0.99032) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.44802 (2.43277) | > current_lr: 0.00000 | > step_time: 0.67700 (0.48534) | > loader_time: 2.04390 (1.98034)  --> STEP: 209/406 -- GLOBAL_STEP: 4675 | > loss: 1.23888 (1.25394) | > log_mle: 0.23535 (0.26500) | > loss_dur: 1.00353 (0.98894) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.33587 (2.43368) | > current_lr: 0.00000 | > step_time: 0.58790 (0.50466) | > loader_time: 2.40440 (2.01515)  --> STEP: 234/406 -- GLOBAL_STEP: 4700 | > loss: 1.22883 (1.24960) | > log_mle: 0.23179 (0.26233) | > loss_dur: 0.99704 (0.98727) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.42160 (2.42645) | > current_lr: 0.00000 | > step_time: 0.67960 (0.52404) | > loader_time: 2.51970 (2.05319)  --> STEP: 259/406 -- GLOBAL_STEP: 4725 | > loss: 1.20521 (1.24649) | > log_mle: 0.22357 (0.25988) | > loss_dur: 0.98164 (0.98661) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.34066 (2.41780) | > current_lr: 0.00000 | > step_time: 0.79690 (0.54303) | > loader_time: 3.15780 (2.10299)  --> STEP: 284/406 -- GLOBAL_STEP: 4750 | > loss: 1.21444 (1.24298) | > log_mle: 0.22858 (0.25755) | > loss_dur: 0.98587 (0.98543) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.41069 (2.41238) | > current_lr: 0.00000 | > step_time: 0.77780 (0.56307) | > loader_time: 2.44820 (2.14532)  --> STEP: 309/406 -- GLOBAL_STEP: 4775 | > loss: 1.18498 (1.24003) | > log_mle: 0.23236 (0.25556) | > loss_dur: 0.95262 (0.98447) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.46981 (2.41025) | > current_lr: 0.00000 | > step_time: 0.89260 (0.58288) | > loader_time: 2.55140 (2.18204)  --> STEP: 334/406 -- GLOBAL_STEP: 4800 | > loss: 1.23048 (1.23705) | > log_mle: 0.22302 (0.25355) | > loss_dur: 1.00746 (0.98350) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.51359 (2.40824) | > current_lr: 0.00000 | > step_time: 0.86220 (0.60250) | > loader_time: 2.72580 (2.22189)  --> STEP: 359/406 -- GLOBAL_STEP: 4825 | > loss: 1.17302 (1.23441) | > log_mle: 0.22890 (0.25148) | > loss_dur: 0.94412 (0.98293) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.30157 (2.41084) | > current_lr: 0.00000 | > step_time: 0.90490 (0.62134) | > loader_time: 2.82770 (2.27640)  --> STEP: 384/406 -- GLOBAL_STEP: 4850 | > loss: 1.16129 (1.23105) | > log_mle: 0.22923 (0.24960) | > loss_dur: 0.93205 (0.98145) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.29501 (2.40804) | > current_lr: 0.00000 | > step_time: 0.89280 (0.64200) | > loader_time: 2.94830 (2.32658)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.09873 (+0.02744) | > avg_loss: 1.14620 (-0.10582) | > avg_log_mle: 0.22456 (-0.03795) | > avg_loss_dur: 0.92164 (-0.06786) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_4872.pth  > EPOCH: 12/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 10:03:30)   --> STEP: 3/406 -- GLOBAL_STEP: 4875 | > loss: 1.25120 (1.24528) | > log_mle: 0.25092 (0.25100) | > loss_dur: 1.00027 (0.99428) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.56650 (2.90351) | > current_lr: 0.00000 | > step_time: 0.30070 (0.30155) | > loader_time: 1.31660 (1.23587)  --> STEP: 28/406 -- GLOBAL_STEP: 4900 | > loss: 1.17654 (1.18992) | > log_mle: 0.24867 (0.25250) | > loss_dur: 0.92787 (0.93742) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.17389 (2.50973) | > current_lr: 0.00000 | > step_time: 0.36380 (0.31142) | > loader_time: 1.93740 (1.48728)  --> STEP: 53/406 -- GLOBAL_STEP: 4925 | > loss: 1.15333 (1.18015) | > log_mle: 0.24664 (0.24935) | > loss_dur: 0.90669 (0.93079) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.41848 (2.39002) | > current_lr: 0.00000 | > step_time: 0.45560 (0.35889) | > loader_time: 2.34120 (1.67044)  --> STEP: 78/406 -- GLOBAL_STEP: 4950 | > loss: 1.12611 (1.17304) | > log_mle: 0.23645 (0.24497) | > loss_dur: 0.88966 (0.92807) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.14772 (2.35734) | > current_lr: 0.00000 | > step_time: 0.38000 (0.39158) | > loader_time: 1.65000 (1.72278)  --> STEP: 103/406 -- GLOBAL_STEP: 4975 | > loss: 1.13303 (1.16578) | > log_mle: 0.20101 (0.24031) | > loss_dur: 0.93202 (0.92547) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.96942 (2.33709) | > current_lr: 0.00000 | > step_time: 0.40750 (0.41898) | > loader_time: 1.57430 (1.74268)  --> STEP: 128/406 -- GLOBAL_STEP: 5000 | > loss: 1.17277 (1.15911) | > log_mle: 0.21362 (0.23656) | > loss_dur: 0.95915 (0.92256) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.28186 (2.36087) | > current_lr: 0.00000 | > step_time: 0.58060 (0.44209) | > loader_time: 1.85400 (1.77511)  --> STEP: 153/406 -- GLOBAL_STEP: 5025 | > loss: 1.13765 (1.15570) | > log_mle: 0.22049 (0.23332) | > loss_dur: 0.91716 (0.92238) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.63199 (2.35428) | > current_lr: 0.00000 | > step_time: 0.52450 (0.46280) | > loader_time: 2.55750 (1.85892)  --> STEP: 178/406 -- GLOBAL_STEP: 5050 | > loss: 1.12247 (1.15198) | > log_mle: 0.22145 (0.23040) | > loss_dur: 0.90103 (0.92158) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.72802 (2.36214) | > current_lr: 0.00000 | > step_time: 0.55690 (0.48195) | > loader_time: 2.03050 (1.93703)  --> STEP: 203/406 -- GLOBAL_STEP: 5075 | > loss: 1.12946 (1.14824) | > log_mle: 0.21736 (0.22768) | > loss_dur: 0.91210 (0.92056) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.96276 (2.43284) | > current_lr: 0.00000 | > step_time: 0.60150 (0.50045) | > loader_time: 2.27760 (1.99759)  --> STEP: 228/406 -- GLOBAL_STEP: 5100 | > loss: 1.13766 (1.14430) | > log_mle: 0.19594 (0.22500) | > loss_dur: 0.94172 (0.91930) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.57340 (2.44162) | > current_lr: 0.00000 | > step_time: 0.69190 (0.52080) | > loader_time: 2.16150 (2.04930)  --> STEP: 253/406 -- GLOBAL_STEP: 5125 | > loss: 1.12917 (1.14129) | > log_mle: 0.18708 (0.22253) | > loss_dur: 0.94209 (0.91876) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.26057 (2.43592) | > current_lr: 0.00000 | > step_time: 0.67690 (0.54006) | > loader_time: 2.27300 (2.09187)  --> STEP: 278/406 -- GLOBAL_STEP: 5150 | > loss: 1.09002 (1.13790) | > log_mle: 0.21044 (0.22030) | > loss_dur: 0.87958 (0.91760) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.08396 (2.42829) | > current_lr: 0.00000 | > step_time: 0.78320 (0.56019) | > loader_time: 2.29870 (2.11545)  --> STEP: 303/406 -- GLOBAL_STEP: 5175 | > loss: 1.11645 (1.13511) | > log_mle: 0.20075 (0.21823) | > loss_dur: 0.91570 (0.91687) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.23935 (2.41150) | > current_lr: 0.00000 | > step_time: 0.77750 (0.58203) | > loader_time: 2.19950 (2.13815)  --> STEP: 328/406 -- GLOBAL_STEP: 5200 | > loss: 1.13125 (1.13186) | > log_mle: 0.19120 (0.21629) | > loss_dur: 0.94005 (0.91558) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.18570 (2.39925) | > current_lr: 0.00000 | > step_time: 0.83390 (0.60036) | > loader_time: 2.76730 (2.17399)  --> STEP: 353/406 -- GLOBAL_STEP: 5225 | > loss: 1.08862 (1.12975) | > log_mle: 0.17697 (0.21424) | > loss_dur: 0.91165 (0.91551) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.25039 (2.40600) | > current_lr: 0.00000 | > step_time: 0.93980 (0.62179) | > loader_time: 2.76200 (2.21695)  --> STEP: 378/406 -- GLOBAL_STEP: 5250 | > loss: 1.08523 (1.12648) | > log_mle: 0.19200 (0.21225) | > loss_dur: 0.89322 (0.91424) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.18938 (2.39856) | > current_lr: 0.00000 | > step_time: 0.85410 (0.64245) | > loader_time: 3.08040 (2.25452)  --> STEP: 403/406 -- GLOBAL_STEP: 5275 | > loss: 1.04455 (1.12316) | > log_mle: 0.18261 (0.21035) | > loss_dur: 0.86194 (0.91281) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.10067 (2.38826) | > current_lr: 0.00000 | > step_time: 0.95120 (0.66131) | > loader_time: 3.06690 (2.28946)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.10477 (+0.00604) | > avg_loss: 1.04789 (-0.09831) | > avg_log_mle: 0.18673 (-0.03783) | > avg_loss_dur: 0.86116 (-0.06048) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_5278.pth  > EPOCH: 13/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 10:24:02)   --> STEP: 22/406 -- GLOBAL_STEP: 5300 | > loss: 1.02650 (1.08775) | > log_mle: 0.21650 (0.21637) | > loss_dur: 0.81000 (0.87138) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.22766 (2.27365) | > current_lr: 0.00000 | > step_time: 0.37660 (0.30410) | > loader_time: 1.80980 (1.45334)  --> STEP: 47/406 -- GLOBAL_STEP: 5325 | > loss: 1.09366 (1.07700) | > log_mle: 0.19734 (0.21395) | > loss_dur: 0.89632 (0.86305) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.46328 (2.26078) | > current_lr: 0.00000 | > step_time: 0.35240 (0.34568) | > loader_time: 1.96100 (1.67364)  --> STEP: 72/406 -- GLOBAL_STEP: 5350 | > loss: 1.04907 (1.07063) | > log_mle: 0.20828 (0.20932) | > loss_dur: 0.84079 (0.86130) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.35592 (2.28033) | > current_lr: 0.00000 | > step_time: 0.45040 (0.37802) | > loader_time: 1.60750 (1.71310)  --> STEP: 97/406 -- GLOBAL_STEP: 5375 | > loss: 1.02715 (1.06330) | > log_mle: 0.19248 (0.20456) | > loss_dur: 0.83467 (0.85875) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.53288 (2.31411) | > current_lr: 0.00000 | > step_time: 0.53980 (0.40724) | > loader_time: 1.81930 (1.75946)  --> STEP: 122/406 -- GLOBAL_STEP: 5400 | > loss: 1.00207 (1.05670) | > log_mle: 0.19442 (0.20072) | > loss_dur: 0.80765 (0.85598) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.15784 (2.30115) | > current_lr: 0.00000 | > step_time: 0.50270 (0.43074) | > loader_time: 1.84370 (1.76714)  --> STEP: 147/406 -- GLOBAL_STEP: 5425 | > loss: 1.04106 (1.05313) | > log_mle: 0.18111 (0.19718) | > loss_dur: 0.85995 (0.85596) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.17534 (2.28178) | > current_lr: 0.00000 | > step_time: 0.53130 (0.45351) | > loader_time: 2.01570 (1.80956)  --> STEP: 172/406 -- GLOBAL_STEP: 5450 | > loss: 1.01768 (1.04924) | > log_mle: 0.17793 (0.19426) | > loss_dur: 0.83975 (0.85498) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.94584 (2.27083) | > current_lr: 0.00000 | > step_time: 0.59570 (0.47222) | > loader_time: 2.20620 (1.89277)  --> STEP: 197/406 -- GLOBAL_STEP: 5475 | > loss: 1.02426 (1.04599) | > log_mle: 0.17513 (0.19150) | > loss_dur: 0.84913 (0.85450) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.39347 (2.30118) | > current_lr: 0.00000 | > step_time: 0.56600 (0.49419) | > loader_time: 2.32850 (1.94858)  --> STEP: 222/406 -- GLOBAL_STEP: 5500 | > loss: 0.99649 (1.04196) | > log_mle: 0.16359 (0.18896) | > loss_dur: 0.83290 (0.85299) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.07585 (2.32388) | > current_lr: 0.00000 | > step_time: 0.69410 (0.51076) | > loader_time: 2.37010 (2.00060)  --> STEP: 247/406 -- GLOBAL_STEP: 5525 | > loss: 0.98053 (1.03852) | > log_mle: 0.18364 (0.18658) | > loss_dur: 0.79689 (0.85194) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.20051 (2.30746) | > current_lr: 0.00000 | > step_time: 0.78100 (0.52999) | > loader_time: 2.40530 (2.04282)  --> STEP: 272/406 -- GLOBAL_STEP: 5550 | > loss: 0.98957 (1.03533) | > log_mle: 0.15604 (0.18419) | > loss_dur: 0.83354 (0.85113) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.01190 (2.29004) | > current_lr: 0.00000 | > step_time: 0.74140 (0.54761) | > loader_time: 2.65060 (2.07874)  --> STEP: 297/406 -- GLOBAL_STEP: 5575 | > loss: 0.99033 (1.03238) | > log_mle: 0.16163 (0.18217) | > loss_dur: 0.82869 (0.85021) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.01919 (2.29530) | > current_lr: 0.00000 | > step_time: 0.72900 (0.56645) | > loader_time: 2.70180 (2.10757)  --> STEP: 322/406 -- GLOBAL_STEP: 5600 | > loss: 1.02396 (1.02908) | > log_mle: 0.15174 (0.18024) | > loss_dur: 0.87222 (0.84884) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.08260 (2.33987) | > current_lr: 0.00000 | > step_time: 0.77800 (0.58459) | > loader_time: 2.85200 (2.13871)  --> STEP: 347/406 -- GLOBAL_STEP: 5625 | > loss: 1.00185 (1.02673) | > log_mle: 0.14444 (0.17826) | > loss_dur: 0.85741 (0.84847) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.83399 (2.35129) | > current_lr: 0.00000 | > step_time: 0.83290 (0.60329) | > loader_time: 2.83170 (2.18575)  --> STEP: 372/406 -- GLOBAL_STEP: 5650 | > loss: 0.97082 (1.02360) | > log_mle: 0.15735 (0.17634) | > loss_dur: 0.81347 (0.84725) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.07717 (2.33641) | > current_lr: 0.00000 | > step_time: 0.90580 (0.62265) | > loader_time: 2.82190 (2.22383)  --> STEP: 397/406 -- GLOBAL_STEP: 5675 | > loss: 0.97134 (1.02044) | > log_mle: 0.14968 (0.17451) | > loss_dur: 0.82166 (0.84593) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.71902 (2.32580) | > current_lr: 0.00000 | > step_time: 1.06880 (0.64658) | > loader_time: 2.44430 (2.25423)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.07332 (-0.03146) | > avg_loss: 0.94557 (-0.10233) | > avg_log_mle: 0.15062 (-0.03611) | > avg_loss_dur: 0.79495 (-0.06621) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_5684.pth  > EPOCH: 14/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 10:44:16)   --> STEP: 16/406 -- GLOBAL_STEP: 5700 | > loss: 1.00190 (0.99342) | > log_mle: 0.17435 (0.17936) | > loss_dur: 0.82755 (0.81406) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.65827 (2.28969) | > current_lr: 0.00000 | > step_time: 0.23920 (0.30457) | > loader_time: 1.29470 (1.20192)  --> STEP: 41/406 -- GLOBAL_STEP: 5725 | > loss: 0.95316 (0.97528) | > log_mle: 0.18530 (0.17942) | > loss_dur: 0.76786 (0.79586) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.19472 (2.17793) | > current_lr: 0.00000 | > step_time: 0.30130 (0.34044) | > loader_time: 1.42940 (1.49483)  --> STEP: 66/406 -- GLOBAL_STEP: 5750 | > loss: 0.97173 (0.96810) | > log_mle: 0.16291 (0.17498) | > loss_dur: 0.80882 (0.79313) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.99384 (2.15365) | > current_lr: 0.00000 | > step_time: 0.39430 (0.36878) | > loader_time: 1.85950 (1.57305)  --> STEP: 91/406 -- GLOBAL_STEP: 5775 | > loss: 0.92456 (0.96212) | > log_mle: 0.14024 (0.17048) | > loss_dur: 0.78431 (0.79164) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.01462 (2.18665) | > current_lr: 0.00000 | > step_time: 0.42660 (0.40129) | > loader_time: 1.70580 (1.63475)  --> STEP: 116/406 -- GLOBAL_STEP: 5800 | > loss: 0.93057 (0.95528) | > log_mle: 0.15790 (0.16647) | > loss_dur: 0.77267 (0.78881) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.09909 (2.16073) | > current_lr: 0.00000 | > step_time: 0.42500 (0.42816) | > loader_time: 1.56950 (1.66000)  --> STEP: 141/406 -- GLOBAL_STEP: 5825 | > loss: 0.94767 (0.95162) | > log_mle: 0.14481 (0.16288) | > loss_dur: 0.80286 (0.78875) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.99329 (2.19661) | > current_lr: 0.00000 | > step_time: 0.57490 (0.45115) | > loader_time: 1.83820 (1.68420)  --> STEP: 166/406 -- GLOBAL_STEP: 5850 | > loss: 0.92328 (0.94796) | > log_mle: 0.13067 (0.15995) | > loss_dur: 0.79261 (0.78801) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.43192 (2.19069) | > current_lr: 0.00000 | > step_time: 0.62530 (0.47144) | > loader_time: 2.39350 (1.74462)  --> STEP: 191/406 -- GLOBAL_STEP: 5875 | > loss: 0.90414 (0.94467) | > log_mle: 0.13559 (0.15717) | > loss_dur: 0.76855 (0.78750) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.30896 (2.19571) | > current_lr: 0.00000 | > step_time: 0.67770 (0.49045) | > loader_time: 2.53360 (1.81872)  --> STEP: 216/406 -- GLOBAL_STEP: 5900 | > loss: 0.90236 (0.94085) | > log_mle: 0.12707 (0.15477) | > loss_dur: 0.77529 (0.78608) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.22816 (2.17132) | > current_lr: 0.00000 | > step_time: 0.68690 (0.50939) | > loader_time: 2.16130 (1.86900)  --> STEP: 241/406 -- GLOBAL_STEP: 5925 | > loss: 0.89977 (0.93770) | > log_mle: 0.14124 (0.15239) | > loss_dur: 0.75853 (0.78531) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.68819 (2.16703) | > current_lr: 0.00000 | > step_time: 0.75280 (0.52830) | > loader_time: 2.39030 (1.91200)  --> STEP: 266/406 -- GLOBAL_STEP: 5950 | > loss: 0.91375 (0.93460) | > log_mle: 0.13438 (0.15013) | > loss_dur: 0.77937 (0.78448) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.80741 (2.17355) | > current_lr: 0.00000 | > step_time: 0.76090 (0.54783) | > loader_time: 2.51310 (1.95816)  --> STEP: 291/406 -- GLOBAL_STEP: 5975 | > loss: 0.92023 (0.93155) | > log_mle: 0.13882 (0.14801) | > loss_dur: 0.78141 (0.78354) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.84725 (2.16132) | > current_lr: 0.00000 | > step_time: 0.73150 (0.56708) | > loader_time: 2.43020 (2.00390)  --> STEP: 316/406 -- GLOBAL_STEP: 6000 | > loss: 0.88624 (0.92860) | > log_mle: 0.12356 (0.14609) | > loss_dur: 0.76269 (0.78251) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.88216 (2.16313) | > current_lr: 0.00000 | > step_time: 0.98110 (0.58724) | > loader_time: 2.62660 (2.04164)  --> STEP: 341/406 -- GLOBAL_STEP: 6025 | > loss: 0.88454 (0.92622) | > log_mle: 0.10591 (0.14425) | > loss_dur: 0.77863 (0.78197) | > amp_scaler: 65536.00000 (34113.31378) | > grad_norm: 5.70181 (2.18458) | > current_lr: 0.00000 | > step_time: 0.83290 (0.61379) | > loader_time: 2.90490 (2.09965)  --> STEP: 366/406 -- GLOBAL_STEP: 6050 | > loss: 0.87949 (0.92343) | > log_mle: 0.11251 (0.14233) | > loss_dur: 0.76699 (0.78110) | > amp_scaler: 65536.00000 (36259.67213) | > grad_norm: 2.09164 (2.20260) | > current_lr: 0.00000 | > step_time: 0.84170 (0.63250) | > loader_time: 3.00110 (2.16151)  --> STEP: 391/406 -- GLOBAL_STEP: 6075 | > loss: 0.87505 (0.92047) | > log_mle: 0.10405 (0.14057) | > loss_dur: 0.77100 (0.77990) | > amp_scaler: 65536.00000 (38131.56010) | > grad_norm: 2.09093 (2.20270) | > current_lr: 0.00000 | > step_time: 0.88790 (0.65049) | > loader_time: 2.75030 (2.21116)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.08898 (+0.01566) | > avg_loss: 0.84867 (-0.09689) | > avg_log_mle: 0.11730 (-0.03332) | > avg_loss_dur: 0.73138 (-0.06357) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_6090.pth  > EPOCH: 15/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 11:04:24)   --> STEP: 10/406 -- GLOBAL_STEP: 6100 | > loss: 0.88356 (0.90483) | > log_mle: 0.14603 (0.15052) | > loss_dur: 0.73753 (0.75431) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.04802 (2.38087) | > current_lr: 0.00000 | > step_time: 0.31670 (0.30248) | > loader_time: 1.67380 (1.21728)  --> STEP: 35/406 -- GLOBAL_STEP: 6125 | > loss: 0.85852 (0.87719) | > log_mle: 0.12792 (0.14565) | > loss_dur: 0.73061 (0.73154) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.77435 (2.14139) | > current_lr: 0.00000 | > step_time: 0.28180 (0.33797) | > loader_time: 1.58960 (1.50426)  --> STEP: 60/406 -- GLOBAL_STEP: 6150 | > loss: 0.83465 (0.87068) | > log_mle: 0.13209 (0.14217) | > loss_dur: 0.70255 (0.72851) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.79910 (2.03058) | > current_lr: 0.00000 | > step_time: 0.33460 (0.37218) | > loader_time: 1.63880 (1.63301)  --> STEP: 85/406 -- GLOBAL_STEP: 6175 | > loss: 0.81600 (0.86602) | > log_mle: 0.10938 (0.13816) | > loss_dur: 0.70662 (0.72786) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.76565 (2.04513) | > current_lr: 0.00000 | > step_time: 0.38260 (0.40036) | > loader_time: 1.57370 (1.67453)  --> STEP: 110/406 -- GLOBAL_STEP: 6200 | > loss: 0.85372 (0.85952) | > log_mle: 0.10825 (0.13420) | > loss_dur: 0.74546 (0.72532) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.81643 (2.03321) | > current_lr: 0.00000 | > step_time: 0.51630 (0.42559) | > loader_time: 1.67230 (1.69237)  --> STEP: 135/406 -- GLOBAL_STEP: 6225 | > loss: 0.86273 (0.85547) | > log_mle: 0.11569 (0.13065) | > loss_dur: 0.74704 (0.72482) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.19288 (2.04049) | > current_lr: 0.00000 | > step_time: 0.63130 (0.44694) | > loader_time: 2.16290 (1.73840)  --> STEP: 160/406 -- GLOBAL_STEP: 6250 | > loss: 0.82481 (0.85233) | > log_mle: 0.11221 (0.12782) | > loss_dur: 0.71260 (0.72451) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.99797 (2.04038) | > current_lr: 0.00000 | > step_time: 0.63850 (0.46965) | > loader_time: 2.55570 (1.81283)  --> STEP: 185/406 -- GLOBAL_STEP: 6275 | > loss: 0.81827 (0.84992) | > log_mle: 0.11121 (0.12515) | > loss_dur: 0.70706 (0.72477) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.59162 (2.05876) | > current_lr: 0.00000 | > step_time: 0.61430 (0.48937) | > loader_time: 2.89050 (1.91495)  --> STEP: 210/406 -- GLOBAL_STEP: 6300 | > loss: 0.80603 (0.84680) | > log_mle: 0.10976 (0.12274) | > loss_dur: 0.69627 (0.72406) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.02422 (2.08904) | > current_lr: 0.00000 | > step_time: 0.62310 (0.50876) | > loader_time: 2.48990 (1.99623)  --> STEP: 235/406 -- GLOBAL_STEP: 6325 | > loss: 0.82942 (0.84340) | > log_mle: 0.09985 (0.12023) | > loss_dur: 0.72956 (0.72317) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.84982 (2.08347) | > current_lr: 0.00000 | > step_time: 0.69420 (0.53567) | > loader_time: 2.64490 (2.04457)  --> STEP: 260/406 -- GLOBAL_STEP: 6350 | > loss: 0.77415 (0.84069) | > log_mle: 0.08921 (0.11799) | > loss_dur: 0.68494 (0.72270) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.10852 (2.07371) | > current_lr: 0.00000 | > step_time: 0.69180 (0.55327) | > loader_time: 2.30000 (2.08789)  --> STEP: 285/406 -- GLOBAL_STEP: 6375 | > loss: 0.79432 (0.83785) | > log_mle: 0.09805 (0.11598) | > loss_dur: 0.69627 (0.72186) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.87403 (2.08823) | > current_lr: 0.00000 | > step_time: 0.74010 (0.57177) | > loader_time: 2.56100 (2.13342)  --> STEP: 310/406 -- GLOBAL_STEP: 6400 | > loss: 0.77572 (0.83541) | > log_mle: 0.10057 (0.11425) | > loss_dur: 0.67514 (0.72116) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.71324 (2.09712) | > current_lr: 0.00000 | > step_time: 0.77790 (0.59108) | > loader_time: 2.56350 (2.16368)  --> STEP: 335/406 -- GLOBAL_STEP: 6425 | > loss: 0.82341 (0.83299) | > log_mle: 0.08130 (0.11246) | > loss_dur: 0.74212 (0.72053) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.21265 (2.11747) | > current_lr: 0.00000 | > step_time: 0.83680 (0.61075) | > loader_time: 3.14040 (2.22320)  --> STEP: 360/406 -- GLOBAL_STEP: 6450 | > loss: 0.79910 (0.83068) | > log_mle: 0.08271 (0.11064) | > loss_dur: 0.71639 (0.72004) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.66862 (2.14137) | > current_lr: 0.00000 | > step_time: 0.81090 (0.62895) | > loader_time: 2.96480 (2.27350)  --> STEP: 385/406 -- GLOBAL_STEP: 6475 | > loss: 0.78327 (0.82792) | > log_mle: 0.08866 (0.10897) | > loss_dur: 0.69461 (0.71895) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.96199 (2.13289) | > current_lr: 0.00000 | > step_time: 1.06950 (0.64796) | > loader_time: 2.87730 (2.32215)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.17881 (+0.08984) | > avg_loss: 0.75686 (-0.09181) | > avg_log_mle: 0.08445 (-0.03285) | > avg_loss_dur: 0.67242 (-0.05896) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_6496.pth  > EPOCH: 16/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 11:25:31)   --> STEP: 4/406 -- GLOBAL_STEP: 6500 | > loss: 0.87785 (0.83530) | > log_mle: 0.13780 (0.11983) | > loss_dur: 0.74005 (0.71548) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.98770 (2.32815) | > current_lr: 0.00000 | > step_time: 0.31970 (0.31894) | > loader_time: 1.56830 (1.42207)  --> STEP: 29/406 -- GLOBAL_STEP: 6525 | > loss: 0.78056 (0.78767) | > log_mle: 0.11443 (0.11520) | > loss_dur: 0.66613 (0.67247) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.87768 (1.93910) | > current_lr: 0.00000 | > step_time: 0.34050 (0.34190) | > loader_time: 1.41210 (1.55573)  --> STEP: 54/406 -- GLOBAL_STEP: 6550 | > loss: 0.77674 (0.78266) | > log_mle: 0.10479 (0.11223) | > loss_dur: 0.67195 (0.67043) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.85235 (2.01726) | > current_lr: 0.00000 | > step_time: 0.36580 (0.38226) | > loader_time: 2.12140 (1.69044)  --> STEP: 79/406 -- GLOBAL_STEP: 6575 | > loss: 0.76706 (0.77787) | > log_mle: 0.09122 (0.10814) | > loss_dur: 0.67584 (0.66973) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.15325 (2.03767) | > current_lr: 0.00000 | > step_time: 0.49610 (0.41900) | > loader_time: 1.91060 (1.76630)  --> STEP: 104/406 -- GLOBAL_STEP: 6600 | > loss: 0.73074 (0.77290) | > log_mle: 0.10001 (0.10409) | > loss_dur: 0.63074 (0.66881) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.80878 (2.34393) | > current_lr: 0.00000 | > step_time: 0.54880 (0.44265) | > loader_time: 2.08630 (1.80504)  --> STEP: 129/406 -- GLOBAL_STEP: 6625 | > loss: 0.75989 (0.76845) | > log_mle: 0.09058 (0.10060) | > loss_dur: 0.66931 (0.66785) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.88958 (2.28491) | > current_lr: 0.00000 | > step_time: 0.48940 (0.46405) | > loader_time: 2.12410 (1.84989)  --> STEP: 154/406 -- GLOBAL_STEP: 6650 | > loss: 0.73025 (0.76597) | > log_mle: 0.08493 (0.09769) | > loss_dur: 0.64533 (0.66828) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.72769 (2.27288) | > current_lr: 0.00000 | > step_time: 0.59740 (0.48275) | > loader_time: 2.83590 (1.93054)  --> STEP: 179/406 -- GLOBAL_STEP: 6675 | > loss: 0.74534 (0.76370) | > log_mle: 0.07594 (0.09509) | > loss_dur: 0.66940 (0.66861) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.75924 (2.19808) | > current_lr: 0.00000 | > step_time: 0.65250 (0.50124) | > loader_time: 2.93130 (2.04968)  --> STEP: 204/406 -- GLOBAL_STEP: 6700 | > loss: 0.73245 (0.76125) | > log_mle: 0.07699 (0.09270) | > loss_dur: 0.65546 (0.66856) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.37404 (2.14524) | > current_lr: 0.00000 | > step_time: 0.63340 (0.51811) | > loader_time: 2.44330 (2.12738)  --> STEP: 229/406 -- GLOBAL_STEP: 6725 | > loss: 0.72765 (0.75810) | > log_mle: 0.07821 (0.09033) | > loss_dur: 0.64943 (0.66777) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.40260 (2.13140) | > current_lr: 0.00000 | > step_time: 0.67750 (0.53570) | > loader_time: 2.59920 (2.18043)  --> STEP: 254/406 -- GLOBAL_STEP: 6750 | > loss: 0.73265 (0.75592) | > log_mle: 0.07578 (0.08814) | > loss_dur: 0.65687 (0.66778) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.83874 (2.20232) | > current_lr: 0.00000 | > step_time: 0.72420 (0.55353) | > loader_time: 3.10140 (2.22796)  --> STEP: 279/406 -- GLOBAL_STEP: 6775 | > loss: 0.74165 (0.75340) | > log_mle: 0.07022 (0.08620) | > loss_dur: 0.67143 (0.66720) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.55920 (2.16462) | > current_lr: 0.00000 | > step_time: 0.68050 (0.57138) | > loader_time: 2.51000 (2.25971)  --> STEP: 304/406 -- GLOBAL_STEP: 6800 | > loss: 0.70784 (0.75124) | > log_mle: 0.06112 (0.08440) | > loss_dur: 0.64672 (0.66684) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.87951 (2.13850) | > current_lr: 0.00000 | > step_time: 0.81100 (0.59237) | > loader_time: 2.81840 (2.28761)  --> STEP: 329/406 -- GLOBAL_STEP: 6825 | > loss: 0.74032 (0.74885) | > log_mle: 0.07267 (0.08281) | > loss_dur: 0.66766 (0.66603) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.45145 (2.11407) | > current_lr: 0.00000 | > step_time: 0.87860 (0.61045) | > loader_time: 3.09190 (2.34143)  --> STEP: 354/406 -- GLOBAL_STEP: 6850 | > loss: 0.73188 (0.74728) | > log_mle: 0.06169 (0.08109) | > loss_dur: 0.67019 (0.66619) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.62111 (2.16132) | > current_lr: 0.00000 | > step_time: 0.83370 (0.63011) | > loader_time: 3.22000 (2.40898)  --> STEP: 379/406 -- GLOBAL_STEP: 6875 | > loss: 0.70260 (0.74508) | > log_mle: 0.05484 (0.07941) | > loss_dur: 0.64776 (0.66568) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.61433 (2.18605) | > current_lr: 0.00000 | > step_time: 0.97280 (0.65190) | > loader_time: 3.66410 (2.46086)  --> STEP: 404/406 -- GLOBAL_STEP: 6900 | > loss: 0.72052 (0.74279) | > log_mle: 0.05505 (0.07779) | > loss_dur: 0.66547 (0.66501) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.64656 (2.16262) | > current_lr: 0.00000 | > step_time: 1.47790 (0.67909) | > loader_time: 2.80880 (2.49844)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.09973 (-0.07909) | > avg_loss: 0.68373 (-0.07313) | > avg_log_mle: 0.05649 (-0.02796) | > avg_loss_dur: 0.62725 (-0.04517) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_6902.pth  > EPOCH: 17/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 11:47:41)   --> STEP: 23/406 -- GLOBAL_STEP: 6925 | > loss: 0.71056 (0.70971) | > log_mle: 0.08638 (0.08723) | > loss_dur: 0.62418 (0.62248) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.51052 (1.84014) | > current_lr: 0.00000 | > step_time: 0.35220 (0.33722) | > loader_time: 1.78860 (1.51293)  --> STEP: 48/406 -- GLOBAL_STEP: 6950 | > loss: 0.70648 (0.70483) | > log_mle: 0.07952 (0.08516) | > loss_dur: 0.62696 (0.61967) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.83981 (1.77796) | > current_lr: 0.00000 | > step_time: 0.41700 (0.36757) | > loader_time: 1.83590 (1.74833)  --> STEP: 73/406 -- GLOBAL_STEP: 6975 | > loss: 0.65369 (0.70146) | > log_mle: 0.05432 (0.08080) | > loss_dur: 0.59938 (0.62066) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.62392 (1.76317) | > current_lr: 0.00000 | > step_time: 0.52030 (0.40221) | > loader_time: 1.92010 (1.83229)  --> STEP: 98/406 -- GLOBAL_STEP: 7000 | > loss: 0.69573 (0.69851) | > log_mle: 0.07162 (0.07685) | > loss_dur: 0.62411 (0.62166) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.63760 (1.97515) | > current_lr: 0.00000 | > step_time: 0.53940 (0.42564) | > loader_time: 1.94040 (1.87679)  --> STEP: 123/406 -- GLOBAL_STEP: 7025 | > loss: 0.69146 (0.69416) | > log_mle: 0.05086 (0.07329) | > loss_dur: 0.64060 (0.62087) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.28379 (1.96829) | > current_lr: 0.00000 | > step_time: 0.50340 (0.44822) | > loader_time: 2.02610 (1.91093)  --> STEP: 148/406 -- GLOBAL_STEP: 7050 | > loss: 0.68895 (0.69172) | > log_mle: 0.04897 (0.07026) | > loss_dur: 0.63998 (0.62145) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.73259 (2.00058) | > current_lr: 0.00000 | > step_time: 0.51050 (0.47037) | > loader_time: 2.36490 (1.95597)  --> STEP: 173/406 -- GLOBAL_STEP: 7075 | > loss: 0.70867 (0.68947) | > log_mle: 0.05488 (0.06780) | > loss_dur: 0.65379 (0.62167) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.75028 (1.95688) | > current_lr: 0.00000 | > step_time: 0.65330 (0.48941) | > loader_time: 2.46120 (2.03509)  --> STEP: 198/406 -- GLOBAL_STEP: 7100 | > loss: 0.66233 (0.68771) | > log_mle: 0.04541 (0.06533) | > loss_dur: 0.61692 (0.62238) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.03263 (1.96591) | > current_lr: 0.00000 | > step_time: 0.59340 (0.51013) | > loader_time: 2.48670 (2.09235)  --> STEP: 223/406 -- GLOBAL_STEP: 7125 | > loss: 0.65232 (0.68508) | > log_mle: 0.04797 (0.06315) | > loss_dur: 0.60435 (0.62193) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.88420 (1.94327) | > current_lr: 0.00000 | > step_time: 0.70150 (0.52743) | > loader_time: 2.60140 (2.14472)  --> STEP: 248/406 -- GLOBAL_STEP: 7150 | > loss: 0.66399 (0.68333) | > log_mle: 0.04633 (0.06108) | > loss_dur: 0.61766 (0.62224) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.14434 (1.92488) | > current_lr: 0.00000 | > step_time: 0.67440 (0.54646) | > loader_time: 2.19150 (2.17671)  --> STEP: 273/406 -- GLOBAL_STEP: 7175 | > loss: 0.64944 (0.68150) | > log_mle: 0.03404 (0.05903) | > loss_dur: 0.61539 (0.62247) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.53517 (1.92908) | > current_lr: 0.00000 | > step_time: 0.69360 (0.56628) | > loader_time: 2.56480 (2.20391)  --> STEP: 298/406 -- GLOBAL_STEP: 7200 | > loss: 0.65263 (0.67981) | > log_mle: 0.04824 (0.05744) | > loss_dur: 0.60439 (0.62237) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.38549 (1.91233) | > current_lr: 0.00000 | > step_time: 0.89090 (0.58763) | > loader_time: 2.56240 (2.22214)  --> STEP: 323/406 -- GLOBAL_STEP: 7225 | > loss: 0.64757 (0.67777) | > log_mle: 0.03365 (0.05581) | > loss_dur: 0.61392 (0.62196) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.68997 (1.89856) | > current_lr: 0.00000 | > step_time: 0.86480 (0.60658) | > loader_time: 2.62690 (2.25353)  --> STEP: 348/406 -- GLOBAL_STEP: 7250 | > loss: 0.67494 (0.67659) | > log_mle: 0.04247 (0.05422) | > loss_dur: 0.63247 (0.62238) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.36015 (1.92321) | > current_lr: 0.00000 | > step_time: 0.92970 (0.62435) | > loader_time: 2.78830 (2.30251)  --> STEP: 373/406 -- GLOBAL_STEP: 7275 | > loss: 0.66912 (0.67484) | > log_mle: 0.02665 (0.05261) | > loss_dur: 0.64248 (0.62223) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.43369 (1.92862) | > current_lr: 0.00000 | > step_time: 0.95130 (0.64709) | > loader_time: 3.23020 (2.33729)  --> STEP: 398/406 -- GLOBAL_STEP: 7300 | > loss: 0.64225 (0.67301) | > log_mle: 0.02269 (0.05110) | > loss_dur: 0.61956 (0.62192) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.60807 (1.93023) | > current_lr: 0.00000 | > step_time: 0.96850 (0.66719) | > loader_time: 2.15170 (2.36134)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.08366 (-0.01606) | > avg_loss: 0.61616 (-0.06757) | > avg_log_mle: 0.02942 (-0.02706) | > avg_loss_dur: 0.58674 (-0.04051) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_7308.pth  > EPOCH: 18/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 12:08:44)   --> STEP: 17/406 -- GLOBAL_STEP: 7325 | > loss: 0.63872 (0.64835) | > log_mle: 0.05992 (0.05981) | > loss_dur: 0.57881 (0.58854) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.30209 (1.92295) | > current_lr: 0.00000 | > step_time: 0.32280 (0.33716) | > loader_time: 1.29910 (1.18344)  --> STEP: 42/406 -- GLOBAL_STEP: 7350 | > loss: 0.64193 (0.64025) | > log_mle: 0.04849 (0.05973) | > loss_dur: 0.59344 (0.58052) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.44911 (1.66454) | > current_lr: 0.00000 | > step_time: 0.57020 (0.34879) | > loader_time: 2.04360 (1.47474)  --> STEP: 67/406 -- GLOBAL_STEP: 7375 | > loss: 0.61392 (0.63857) | > log_mle: 0.04265 (0.05602) | > loss_dur: 0.57127 (0.58255) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.42700 (1.62901) | > current_lr: 0.00000 | > step_time: 0.47850 (0.37316) | > loader_time: 2.15310 (1.57884)  --> STEP: 92/406 -- GLOBAL_STEP: 7400 | > loss: 0.61065 (0.63646) | > log_mle: 0.02913 (0.05198) | > loss_dur: 0.58152 (0.58448) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.03155 (1.67857) | > current_lr: 0.00000 | > step_time: 0.54800 (0.40569) | > loader_time: 2.16040 (1.65677)  --> STEP: 117/406 -- GLOBAL_STEP: 7425 | > loss: 0.62464 (0.63298) | > log_mle: 0.03355 (0.04850) | > loss_dur: 0.59109 (0.58447) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.87521 (1.73615) | > current_lr: 0.00000 | > step_time: 0.56290 (0.43430) | > loader_time: 1.96560 (1.70832)  --> STEP: 142/406 -- GLOBAL_STEP: 7450 | > loss: 0.60952 (0.63092) | > log_mle: 0.02627 (0.04533) | > loss_dur: 0.58324 (0.58559) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.47522 (1.84581) | > current_lr: 0.00000 | > step_time: 0.55820 (0.45551) | > loader_time: 2.15820 (1.74474)  --> STEP: 167/406 -- GLOBAL_STEP: 7475 | > loss: 0.61104 (0.62908) | > log_mle: 0.02405 (0.04281) | > loss_dur: 0.58699 (0.58627) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.17686 (1.90762) | > current_lr: 0.00000 | > step_time: 0.65590 (0.47627) | > loader_time: 1.73090 (1.79367)  --> STEP: 192/406 -- GLOBAL_STEP: 7500 | > loss: 0.58775 (0.62772) | > log_mle: 0.02374 (0.04045) | > loss_dur: 0.56401 (0.58726) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 4.05972 (1.93778) | > current_lr: 0.00000 | > step_time: 0.58240 (0.49603) | > loader_time: 2.13860 (1.83047)  --> STEP: 217/406 -- GLOBAL_STEP: 7525 | > loss: 0.59858 (0.62560) | > log_mle: 0.02135 (0.03844) | > loss_dur: 0.57723 (0.58716) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.90632 (2.02591) | > current_lr: 0.00000 | > step_time: 0.69230 (0.51456) | > loader_time: 1.95780 (1.86553)  --> STEP: 242/406 -- GLOBAL_STEP: 7550 | > loss: 0.60832 (0.62402) | > log_mle: 0.00271 (0.03634) | > loss_dur: 0.60562 (0.58768) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.98438 (2.01341) | > current_lr: 0.00000 | > step_time: 0.76270 (0.53452) | > loader_time: 2.24230 (1.90874)  --> STEP: 267/406 -- GLOBAL_STEP: 7575 | > loss: 0.61049 (0.62257) | > log_mle: 0.01358 (0.03451) | > loss_dur: 0.59691 (0.58806) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.74262 (1.99373) | > current_lr: 0.00000 | > step_time: 0.66940 (0.55415) | > loader_time: 2.50390 (1.95696)  --> STEP: 292/406 -- GLOBAL_STEP: 7600 | > loss: 0.62413 (0.62122) | > log_mle: 0.02137 (0.03283) | > loss_dur: 0.60275 (0.58840) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 4.67019 (2.02127) | > current_lr: 0.00000 | > step_time: 0.94210 (0.57847) | > loader_time: 2.71760 (2.00176)  --> STEP: 317/406 -- GLOBAL_STEP: 7625 | > loss: 0.60477 (0.61976) | > log_mle: 0.01892 (0.03124) | > loss_dur: 0.58585 (0.58851) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.54323 (2.11433) | > current_lr: 0.00000 | > step_time: 0.83910 (0.59961) | > loader_time: 2.54460 (2.05020)  --> STEP: 342/406 -- GLOBAL_STEP: 7650 | > loss: 0.59575 (0.61862) | > log_mle: 0.00370 (0.02976) | > loss_dur: 0.59205 (0.58886) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.33530 (2.12415) | > current_lr: 0.00000 | > step_time: 0.87640 (0.61630) | > loader_time: 3.11590 (2.10629)  --> STEP: 367/406 -- GLOBAL_STEP: 7675 | > loss: 0.58436 (0.61738) | > log_mle: 0.00987 (0.02821) | > loss_dur: 0.57449 (0.58916) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.84325 (2.12046) | > current_lr: 0.00000 | > step_time: 0.93670 (0.63533) | > loader_time: 2.76440 (2.15070)  --> STEP: 392/406 -- GLOBAL_STEP: 7700 | > loss: 0.59386 (0.61596) | > log_mle: 0.00449 (0.02675) | > loss_dur: 0.58937 (0.58921) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.70919 (2.10098) | > current_lr: 0.00000 | > step_time: 0.88090 (0.65878) | > loader_time: 2.63470 (2.18224)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.06107 (-0.02259) | > avg_loss: 0.56329 (-0.05288) | > avg_log_mle: 0.00567 (-0.02375) | > avg_loss_dur: 0.55761 (-0.02912) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_7714.pth  > EPOCH: 19/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 12:28:42)   --> STEP: 11/406 -- GLOBAL_STEP: 7725 | > loss: 0.58882 (0.59666) | > log_mle: 0.03454 (0.03932) | > loss_dur: 0.55428 (0.55734) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.32118 (1.61813) | > current_lr: 0.00000 | > step_time: 0.52920 (0.33579) | > loader_time: 1.52020 (1.25481)  --> STEP: 36/406 -- GLOBAL_STEP: 7750 | > loss: 0.56091 (0.58552) | > log_mle: 0.04186 (0.03611) | > loss_dur: 0.51906 (0.54940) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.27929 (1.63259) | > current_lr: 0.00000 | > step_time: 0.45030 (0.35026) | > loader_time: 1.90240 (1.46218)  --> STEP: 61/406 -- GLOBAL_STEP: 7775 | > loss: 0.59122 (0.58571) | > log_mle: 0.02340 (0.03284) | > loss_dur: 0.56782 (0.55287) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.24682 (1.71721) | > current_lr: 0.00000 | > step_time: 0.47650 (0.38474) | > loader_time: 1.96000 (1.60891)  --> STEP: 86/406 -- GLOBAL_STEP: 7800 | > loss: 0.57559 (0.58399) | > log_mle: 0.02182 (0.02911) | > loss_dur: 0.55377 (0.55488) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.84747 (1.75610) | > current_lr: 0.00000 | > step_time: 0.53370 (0.41648) | > loader_time: 1.90640 (1.69727)  --> STEP: 111/406 -- GLOBAL_STEP: 7825 | > loss: 0.55674 (0.58076) | > log_mle: 0.00221 (0.02536) | > loss_dur: 0.55453 (0.55541) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.60324 (1.83055) | > current_lr: 0.00000 | > step_time: 0.51040 (0.44547) | > loader_time: 2.01750 (1.75981)  --> STEP: 136/406 -- GLOBAL_STEP: 7850 | > loss: 0.59540 (0.57885) | > log_mle: 0.00325 (0.02220) | > loss_dur: 0.59216 (0.55666) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.25412 (1.92249) | > current_lr: 0.00000 | > step_time: 0.59360 (0.46709) | > loader_time: 2.10570 (1.82538)  --> STEP: 161/406 -- GLOBAL_STEP: 7875 | > loss: 0.56041 (0.57736) | > log_mle: 0.01209 (0.01982) | > loss_dur: 0.54832 (0.55754) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.94852 (1.88740) | > current_lr: 0.00000 | > step_time: 0.61980 (0.49023) | > loader_time: 2.58430 (1.90035)  --> STEP: 186/406 -- GLOBAL_STEP: 7900 | > loss: 0.56540 (0.57658) | > log_mle: -0.00345 (0.01745) | > loss_dur: 0.56884 (0.55912) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.65653 (1.91100) | > current_lr: 0.00000 | > step_time: 0.66350 (0.50831) | > loader_time: 2.64980 (1.98152)  --> STEP: 211/406 -- GLOBAL_STEP: 7925 | > loss: 0.54561 (0.57497) | > log_mle: -0.01409 (0.01545) | > loss_dur: 0.55970 (0.55952) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.80544 (1.93103) | > current_lr: 0.00000 | > step_time: 0.71230 (0.52639) | > loader_time: 2.51740 (2.03852)  --> STEP: 236/406 -- GLOBAL_STEP: 7950 | > loss: 0.56089 (0.57349) | > log_mle: 0.01113 (0.01347) | > loss_dur: 0.54976 (0.56002) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.48231 (1.92610) | > current_lr: 0.00000 | > step_time: 0.65790 (0.54610) | > loader_time: 2.40020 (2.09883)  --> STEP: 261/406 -- GLOBAL_STEP: 7975 | > loss: 0.55260 (0.57209) | > log_mle: -0.00323 (0.01155) | > loss_dur: 0.55583 (0.56053) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.19003 (1.91823) | > current_lr: 0.00000 | > step_time: 0.73340 (0.56303) | > loader_time: 2.45730 (2.13701)  --> STEP: 286/406 -- GLOBAL_STEP: 8000 | > loss: 0.55032 (0.57093) | > log_mle: -0.00925 (0.00989) | > loss_dur: 0.55957 (0.56104) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.28757 (1.97478) | > current_lr: 0.00000 | > step_time: 0.68550 (0.57921) | > loader_time: 2.23800 (2.16275)  --> STEP: 311/406 -- GLOBAL_STEP: 8025 | > loss: 0.54205 (0.56976) | > log_mle: -0.01420 (0.00846) | > loss_dur: 0.55624 (0.56130) | > amp_scaler: 131072.00000 (68486.17363) | > grad_norm: 1.03420 (1.93226) | > current_lr: 0.00000 | > step_time: 0.97000 (0.60080) | > loader_time: 2.07070 (2.17714)  --> STEP: 336/406 -- GLOBAL_STEP: 8050 | > loss: 0.56463 (0.56869) | > log_mle: -0.00178 (0.00707) | > loss_dur: 0.56640 (0.56162) | > amp_scaler: 131072.00000 (73142.85714) | > grad_norm: 2.09438 (1.91971) | > current_lr: 0.00000 | > step_time: 0.90150 (0.61978) | > loader_time: 2.64830 (2.21436)  --> STEP: 361/406 -- GLOBAL_STEP: 8075 | > loss: 0.55958 (0.56762) | > log_mle: -0.00656 (0.00563) | > loss_dur: 0.56614 (0.56199) | > amp_scaler: 131072.00000 (77154.57064) | > grad_norm: 1.29841 (1.96584) | > current_lr: 0.00000 | > step_time: 1.02780 (0.64132) | > loader_time: 2.66280 (2.24640)  --> STEP: 386/406 -- GLOBAL_STEP: 8100 | > loss: 0.55573 (0.56636) | > log_mle: -0.01164 (0.00429) | > loss_dur: 0.56737 (0.56207) | > amp_scaler: 131072.00000 (80646.63212) | > grad_norm: 2.72421 (1.99370) | > current_lr: 0.00000 | > step_time: 1.03130 (0.66340) | > loader_time: 2.53260 (2.27874)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.15752 (+0.09645) | > avg_loss: 0.51247 (-0.05082) | > avg_log_mle: -0.01619 (-0.02187) | > avg_loss_dur: 0.52866 (-0.02895) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_8120.pth  > EPOCH: 20/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 12:49:26)   --> STEP: 5/406 -- GLOBAL_STEP: 8125 | > loss: 0.54112 (0.56025) | > log_mle: 0.02811 (0.02038) | > loss_dur: 0.51301 (0.53987) | > amp_scaler: 131072.00000 (131072.00000) | > grad_norm: 1.35217 (1.75753) | > current_lr: 0.00000 | > step_time: 0.31870 (0.33056) | > loader_time: 0.98650 (1.12710)  --> STEP: 30/406 -- GLOBAL_STEP: 8150 | > loss: 0.52730 (0.53469) | > log_mle: 0.00650 (0.01467) | > loss_dur: 0.52080 (0.52002) | > amp_scaler: 131072.00000 (131072.00000) | > grad_norm: 1.33410 (1.63881) | > current_lr: 0.00000 | > step_time: 0.31310 (0.33610) | > loader_time: 1.86560 (1.46628)  --> STEP: 55/406 -- GLOBAL_STEP: 8175 | > loss: 0.52488 (0.53496) | > log_mle: -0.00521 (0.01205) | > loss_dur: 0.53009 (0.52290) | > amp_scaler: 131072.00000 (131072.00000) | > grad_norm: 1.94975 (1.68310) | > current_lr: 0.00000 | > step_time: 0.41270 (0.37420) | > loader_time: 1.73030 (1.59201)  --> STEP: 80/406 -- GLOBAL_STEP: 8200 | > loss: 0.53786 (0.53242) | > log_mle: 0.00958 (0.00873) | > loss_dur: 0.52828 (0.52369) | > amp_scaler: 131072.00000 (131072.00000) | > grad_norm: 1.64994 (1.72548) | > current_lr: 0.00000 | > step_time: 0.50570 (0.40678) | > loader_time: 1.72620 (1.67181)  --> STEP: 105/406 -- GLOBAL_STEP: 8225 | > loss: 0.50475 (0.52990) | > log_mle: 0.00193 (0.00503) | > loss_dur: 0.50282 (0.52487) | > amp_scaler: 131072.00000 (131072.00000) | > grad_norm: 2.52956 (1.79567) | > current_lr: 0.00000 | > step_time: 0.58330 (0.43586) | > loader_time: 1.82480 (1.71991)  --> STEP: 130/406 -- GLOBAL_STEP: 8250 | > loss: 0.50816 (0.52745) | > log_mle: -0.01206 (0.00190) | > loss_dur: 0.52022 (0.52555) | > amp_scaler: 131072.00000 (131072.00000) | > grad_norm: 2.89429 (1.92243) | > current_lr: 0.00000 | > step_time: 0.60610 (0.46230) | > loader_time: 1.83340 (1.73046)  --> STEP: 155/406 -- GLOBAL_STEP: 8275 | > loss: 0.50928 (0.52627) | > log_mle: -0.00994 (-0.00057) | > loss_dur: 0.51923 (0.52684) | > amp_scaler: 131072.00000 (131072.00000) | > grad_norm: 1.72901 (1.99829) | > current_lr: 0.00000 | > step_time: 0.66550 (0.48411) | > loader_time: 2.10200 (1.76054)  --> STEP: 180/406 -- GLOBAL_STEP: 8300 | > loss: 0.49535 (0.52522) | > log_mle: -0.01972 (-0.00279) | > loss_dur: 0.51507 (0.52802) | > amp_scaler: 131072.00000 (131072.00000) | > grad_norm: 1.96062 (2.15771) | > current_lr: 0.00000 | > step_time: 0.66740 (0.50356) | > loader_time: 1.99280 (1.80756)  --> STEP: 205/406 -- GLOBAL_STEP: 8325 | > loss: 0.51911 (0.52434) | > log_mle: -0.02778 (-0.00485) | > loss_dur: 0.54689 (0.52919) | > amp_scaler: 131072.00000 (131072.00000) | > grad_norm: 3.27878 (2.24755) | > current_lr: 0.00000 | > step_time: 0.72940 (0.52313) | > loader_time: 2.19860 (1.84532)  --> STEP: 230/406 -- GLOBAL_STEP: 8350 | > loss: 0.50891 (0.52255) | > log_mle: -0.02062 (-0.00682) | > loss_dur: 0.52953 (0.52937) | > amp_scaler: 131072.00000 (131072.00000) | > grad_norm: 1.51235 (2.25042) | > current_lr: 0.00000 | > step_time: 0.62670 (0.54067) | > loader_time: 2.27200 (1.88650)  --> STEP: 255/406 -- GLOBAL_STEP: 8375 | > loss: 0.49785 (0.52128) | > log_mle: -0.02267 (-0.00869) | > loss_dur: 0.52052 (0.52997) | > amp_scaler: 65536.00000 (129529.97647) | > grad_norm: 5.30793 (2.25012) | > current_lr: 0.00000 | > step_time: 0.67000 (0.56041) | > loader_time: 2.33160 (1.93510)  --> STEP: 280/406 -- GLOBAL_STEP: 8400 | > loss: 0.50150 (0.51989) | > log_mle: -0.03442 (-0.01031) | > loss_dur: 0.53592 (0.53020) | > amp_scaler: 65536.00000 (123816.22857) | > grad_norm: 1.54717 (2.19474) | > current_lr: 0.00000 | > step_time: 0.71210 (0.57721) | > loader_time: 2.40450 (1.97214)  --> STEP: 305/406 -- GLOBAL_STEP: 8425 | > loss: 0.51990 (0.51873) | > log_mle: -0.02659 (-0.01178) | > loss_dur: 0.54649 (0.53051) | > amp_scaler: 65536.00000 (119039.16066) | > grad_norm: 3.05272 (2.20973) | > current_lr: 0.00000 | > step_time: 0.86470 (0.59574) | > loader_time: 2.23510 (2.00074)  --> STEP: 330/406 -- GLOBAL_STEP: 8450 | > loss: 0.51872 (0.51738) | > log_mle: -0.04099 (-0.01309) | > loss_dur: 0.55972 (0.53047) | > amp_scaler: 65536.00000 (114985.89091) | > grad_norm: 1.87560 (2.23172) | > current_lr: 0.00000 | > step_time: 0.76450 (0.61426) | > loader_time: 2.25090 (2.04283)  --> STEP: 355/406 -- GLOBAL_STEP: 8475 | > loss: 0.48814 (0.51634) | > log_mle: -0.04023 (-0.01450) | > loss_dur: 0.52837 (0.53084) | > amp_scaler: 65536.00000 (111503.50423) | > grad_norm: 1.71652 (2.26772) | > current_lr: 0.00000 | > step_time: 0.84790 (0.63389) | > loader_time: 2.66870 (2.08504)  --> STEP: 380/406 -- GLOBAL_STEP: 8500 | > loss: 0.50377 (0.51510) | > log_mle: -0.03459 (-0.01588) | > loss_dur: 0.53836 (0.53098) | > amp_scaler: 65536.00000 (108479.32632) | > grad_norm: 2.43367 (2.27153) | > current_lr: 0.00000 | > step_time: 0.94090 (0.65358) | > loader_time: 3.05420 (2.12025)  --> STEP: 405/406 -- GLOBAL_STEP: 8525 | > loss: 0.47969 (0.51386) | > log_mle: -0.03269 (-0.01717) | > loss_dur: 0.51238 (0.53103) | > amp_scaler: 65536.00000 (105828.50370) | > grad_norm: 1.51056 (2.27091) | > current_lr: 0.00000 | > step_time: 0.44990 (0.68305) | > loader_time: 0.14410 (2.13752)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.00041 (-0.15710) | > avg_loss: 0.46370 (-0.04877) | > avg_log_mle: -0.03485 (-0.01866) | > avg_loss_dur: 0.49855 (-0.03012) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_8526.pth  > EPOCH: 21/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 13:09:14)   --> STEP: 24/406 -- GLOBAL_STEP: 8550 | > loss: 0.48894 (0.48691) | > log_mle: -0.01141 (-0.00448) | > loss_dur: 0.50035 (0.49139) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.58710 (1.70015) | > current_lr: 0.00001 | > step_time: 0.40710 (0.31567) | > loader_time: 1.76360 (1.24635)  --> STEP: 49/406 -- GLOBAL_STEP: 8575 | > loss: 0.49617 (0.48580) | > log_mle: -0.00939 (-0.00594) | > loss_dur: 0.50556 (0.49174) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.40099 (1.68370) | > current_lr: 0.00001 | > step_time: 0.44000 (0.35663) | > loader_time: 1.80320 (1.45924)  --> STEP: 74/406 -- GLOBAL_STEP: 8600 | > loss: 0.49183 (0.48349) | > log_mle: -0.03804 (-0.01015) | > loss_dur: 0.52987 (0.49364) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.56506 (1.95312) | > current_lr: 0.00001 | > step_time: 0.35720 (0.40209) | > loader_time: 1.58770 (1.53181)  --> STEP: 99/406 -- GLOBAL_STEP: 8625 | > loss: 0.47969 (0.48211) | > log_mle: -0.02114 (-0.01338) | > loss_dur: 0.50083 (0.49550) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.01473 (2.16549) | > current_lr: 0.00001 | > step_time: 0.45480 (0.42850) | > loader_time: 1.89590 (1.58305)  --> STEP: 124/406 -- GLOBAL_STEP: 8650 | > loss: 0.45893 (0.47971) | > log_mle: -0.03054 (-0.01647) | > loss_dur: 0.48947 (0.49618) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.59767 (2.37554) | > current_lr: 0.00001 | > step_time: 0.60870 (0.44866) | > loader_time: 1.76950 (1.61090)  --> STEP: 149/406 -- GLOBAL_STEP: 8675 | > loss: 0.48400 (0.47860) | > log_mle: -0.03577 (-0.01908) | > loss_dur: 0.51977 (0.49768) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.97928 (2.43723) | > current_lr: 0.00001 | > step_time: 0.65940 (0.47159) | > loader_time: 1.84830 (1.66450)  --> STEP: 174/406 -- GLOBAL_STEP: 8700 | > loss: 0.47599 (0.47764) | > log_mle: -0.03409 (-0.02108) | > loss_dur: 0.51009 (0.49871) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.70520 (2.39137) | > current_lr: 0.00001 | > step_time: 0.65090 (0.49230) | > loader_time: 2.28000 (1.75465)  --> STEP: 199/406 -- GLOBAL_STEP: 8725 | > loss: 0.48088 (0.47660) | > log_mle: -0.02838 (-0.02311) | > loss_dur: 0.50927 (0.49971) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.21901 (2.38723) | > current_lr: 0.00001 | > step_time: 1.24090 (0.51502) | > loader_time: 2.96930 (1.82421)  --> STEP: 224/406 -- GLOBAL_STEP: 8750 | > loss: 0.46393 (0.47502) | > log_mle: -0.03686 (-0.02494) | > loss_dur: 0.50079 (0.49996) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.96758 (2.34225) | > current_lr: 0.00001 | > step_time: 0.63390 (0.53239) | > loader_time: 2.02210 (1.85865)  --> STEP: 249/406 -- GLOBAL_STEP: 8775 | > loss: 0.47546 (0.47393) | > log_mle: -0.03558 (-0.02664) | > loss_dur: 0.51104 (0.50057) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.07203 (2.31047) | > current_lr: 0.00001 | > step_time: 0.68800 (0.55181) | > loader_time: 1.90280 (1.87904)  --> STEP: 274/406 -- GLOBAL_STEP: 8800 | > loss: 0.47242 (0.47263) | > log_mle: -0.04635 (-0.02835) | > loss_dur: 0.51876 (0.50098) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.68935 (2.40262) | > current_lr: 0.00001 | > step_time: 0.70930 (0.57015) | > loader_time: 1.72160 (1.88755)  --> STEP: 299/406 -- GLOBAL_STEP: 8825 | > loss: 0.44551 (0.47150) | > log_mle: -0.06188 (-0.02965) | > loss_dur: 0.50739 (0.50114) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.35420 (2.38334) | > current_lr: 0.00001 | > step_time: 0.76510 (0.59012) | > loader_time: 2.12890 (1.89523)  --> STEP: 324/406 -- GLOBAL_STEP: 8850 | > loss: 0.42839 (0.47048) | > log_mle: -0.04799 (-0.03090) | > loss_dur: 0.47638 (0.50138) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.42410 (2.35382) | > current_lr: 0.00001 | > step_time: 0.78850 (0.61021) | > loader_time: 2.21430 (1.91420)  --> STEP: 349/406 -- GLOBAL_STEP: 8875 | > loss: 0.47783 (0.46987) | > log_mle: -0.04596 (-0.03214) | > loss_dur: 0.52378 (0.50201) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.43248 (2.41983) | > current_lr: 0.00001 | > step_time: 0.80640 (0.63160) | > loader_time: 2.22780 (1.95226)  --> STEP: 374/406 -- GLOBAL_STEP: 8900 | > loss: 0.43511 (0.46859) | > log_mle: -0.04974 (-0.03345) | > loss_dur: 0.48485 (0.50203) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.81903 (2.50179) | > current_lr: 0.00001 | > step_time: 1.07360 (0.65356) | > loader_time: 2.77610 (1.98897)  --> STEP: 399/406 -- GLOBAL_STEP: 8925 | > loss: 0.43970 (0.46732) | > log_mle: -0.05321 (-0.03468) | > loss_dur: 0.49291 (0.50200) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.23116 (2.52165) | > current_lr: 0.00001 | > step_time: 1.17980 (0.68166) | > loader_time: 2.56120 (2.02084)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.00212 (+0.00170) | > avg_loss: 0.41871 (-0.04498) | > avg_log_mle: -0.05128 (-0.01643) | > avg_loss_dur: 0.46999 (-0.02855) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_8932.pth  > EPOCH: 22/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 13:28:16)   --> STEP: 18/406 -- GLOBAL_STEP: 8950 | > loss: 0.44240 (0.44247) | > log_mle: -0.01600 (-0.02190) | > loss_dur: 0.45840 (0.46437) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.71502 (1.97383) | > current_lr: 0.00001 | > step_time: 0.25090 (0.31114) | > loader_time: 1.12030 (1.13777)  --> STEP: 43/406 -- GLOBAL_STEP: 8975 | > loss: 0.41444 (0.43994) | > log_mle: -0.01950 (-0.02192) | > loss_dur: 0.43395 (0.46186) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.57268 (1.78726) | > current_lr: 0.00001 | > step_time: 0.39240 (0.33734) | > loader_time: 1.70940 (1.33818)  --> STEP: 68/406 -- GLOBAL_STEP: 9000 | > loss: 0.44090 (0.43888) | > log_mle: -0.03775 (-0.02555) | > loss_dur: 0.47865 (0.46443) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.38417 (1.95818) | > current_lr: 0.00001 | > step_time: 0.44870 (0.37483) | > loader_time: 1.53250 (1.45698)  --> STEP: 93/406 -- GLOBAL_STEP: 9025 | > loss: 0.40230 (0.43716) | > log_mle: -0.05484 (-0.02939) | > loss_dur: 0.45714 (0.46655) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 4.21489 (2.07380) | > current_lr: 0.00001 | > step_time: 0.50630 (0.40192) | > loader_time: 1.47310 (1.51128)  --> STEP: 118/406 -- GLOBAL_STEP: 9050 | > loss: 0.42291 (0.43512) | > log_mle: -0.04610 (-0.03232) | > loss_dur: 0.46901 (0.46744) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.71564 (2.24132) | > current_lr: 0.00001 | > step_time: 0.43780 (0.42596) | > loader_time: 1.56450 (1.53057)  --> STEP: 143/406 -- GLOBAL_STEP: 9075 | > loss: 0.41060 (0.43378) | > log_mle: -0.05656 (-0.03517) | > loss_dur: 0.46716 (0.46896) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.36880 (2.28439) | > current_lr: 0.00001 | > step_time: 0.62560 (0.44735) | > loader_time: 1.74930 (1.54928)  --> STEP: 168/406 -- GLOBAL_STEP: 9100 | > loss: 0.44352 (0.43312) | > log_mle: -0.03752 (-0.03711) | > loss_dur: 0.48104 (0.47023) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.42391 (2.35159) | > current_lr: 0.00001 | > step_time: 0.94950 (0.46844) | > loader_time: 2.04350 (1.57618)  --> STEP: 193/406 -- GLOBAL_STEP: 9125 | > loss: 0.43542 (0.43226) | > log_mle: -0.05460 (-0.03917) | > loss_dur: 0.49002 (0.47143) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.85163 (2.30182) | > current_lr: 0.00001 | > step_time: 1.26710 (0.49063) | > loader_time: 2.45420 (1.60772)  --> STEP: 218/406 -- GLOBAL_STEP: 9150 | > loss: 0.42857 (0.43096) | > log_mle: -0.05211 (-0.04082) | > loss_dur: 0.48068 (0.47178) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.05389 (2.35978) | > current_lr: 0.00001 | > step_time: 0.58120 (0.51123) | > loader_time: 1.56980 (1.63353)  --> STEP: 243/406 -- GLOBAL_STEP: 9175 | > loss: 0.43464 (0.43006) | > log_mle: -0.06977 (-0.04265) | > loss_dur: 0.50441 (0.47271) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.54497 (2.36709) | > current_lr: 0.00001 | > step_time: 0.61630 (0.53159) | > loader_time: 1.73780 (1.66797)  --> STEP: 268/406 -- GLOBAL_STEP: 9200 | > loss: 0.41441 (0.42882) | > log_mle: -0.06332 (-0.04411) | > loss_dur: 0.47773 (0.47292) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.89973 (2.36712) | > current_lr: 0.00001 | > step_time: 0.80450 (0.55197) | > loader_time: 1.92410 (1.69369)  --> STEP: 293/406 -- GLOBAL_STEP: 9225 | > loss: 0.40959 (0.42765) | > log_mle: -0.06629 (-0.04548) | > loss_dur: 0.47587 (0.47313) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.43670 (2.49035) | > current_lr: 0.00001 | > step_time: 0.85970 (0.57266) | > loader_time: 2.22040 (1.71912)  --> STEP: 318/406 -- GLOBAL_STEP: 9250 | > loss: 0.40956 (0.42686) | > log_mle: -0.05902 (-0.04678) | > loss_dur: 0.46858 (0.47364) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.93656 (2.49432) | > current_lr: 0.00001 | > step_time: 0.79330 (0.59440) | > loader_time: 1.83800 (1.74383)  --> STEP: 343/406 -- GLOBAL_STEP: 9275 | > loss: 0.41702 (0.42589) | > log_mle: -0.07229 (-0.04801) | > loss_dur: 0.48931 (0.47390) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.48275 (2.47523) | > current_lr: 0.00001 | > step_time: 0.81610 (0.61567) | > loader_time: 2.79710 (1.78776)  --> STEP: 368/406 -- GLOBAL_STEP: 9300 | > loss: 0.41625 (0.42495) | > log_mle: -0.07497 (-0.04927) | > loss_dur: 0.49122 (0.47422) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.46743 (2.50782) | > current_lr: 0.00001 | > step_time: 0.98540 (0.63526) | > loader_time: 2.36790 (1.82862)  --> STEP: 393/406 -- GLOBAL_STEP: 9325 | > loss: 0.39614 (0.42387) | > log_mle: -0.06734 (-0.05043) | > loss_dur: 0.46349 (0.47430) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.72832 (2.46536) | > current_lr: 0.00001 | > step_time: 1.11520 (0.65767) | > loader_time: 2.42140 (1.85823)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 0.93518 (-0.06694) | > avg_loss: 0.37130 (-0.04741) | > avg_log_mle: -0.06830 (-0.01702) | > avg_loss_dur: 0.43961 (-0.03039) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_9338.pth  > EPOCH: 23/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 13:45:59)   --> STEP: 12/406 -- GLOBAL_STEP: 9350 | > loss: 0.38791 (0.39473) | > log_mle: -0.04485 (-0.03542) | > loss_dur: 0.43276 (0.43015) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.19087 (1.78454) | > current_lr: 0.00001 | > step_time: 0.32640 (0.38599) | > loader_time: 1.15020 (0.99471)  --> STEP: 37/406 -- GLOBAL_STEP: 9375 | > loss: 0.39531 (0.39706) | > log_mle: -0.03403 (-0.03723) | > loss_dur: 0.42934 (0.43428) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.63264 (1.81385) | > current_lr: 0.00001 | > step_time: 0.37070 (0.34712) | > loader_time: 1.29320 (1.20758)  --> STEP: 62/406 -- GLOBAL_STEP: 9400 | > loss: 0.38496 (0.39591) | > log_mle: -0.03214 (-0.04004) | > loss_dur: 0.41710 (0.43594) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.61394 (1.89919) | > current_lr: 0.00001 | > step_time: 0.43940 (0.37503) | > loader_time: 1.36590 (1.30459)  --> STEP: 87/406 -- GLOBAL_STEP: 9425 | > loss: 0.38425 (0.39573) | > log_mle: -0.06857 (-0.04363) | > loss_dur: 0.45282 (0.43936) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.75198 (2.05405) | > current_lr: 0.00001 | > step_time: 0.39400 (0.39614) | > loader_time: 1.27700 (1.36192)  --> STEP: 112/406 -- GLOBAL_STEP: 9450 | > loss: 0.37497 (0.39397) | > log_mle: -0.06556 (-0.04684) | > loss_dur: 0.44053 (0.44081) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.85195 (2.26179) | > current_lr: 0.00001 | > step_time: 0.59090 (0.41997) | > loader_time: 1.84970 (1.42143)  --> STEP: 137/406 -- GLOBAL_STEP: 9475 | > loss: 0.39241 (0.39312) | > log_mle: -0.05889 (-0.04951) | > loss_dur: 0.45130 (0.44264) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.52189 (2.31795) | > current_lr: 0.00001 | > step_time: 0.46170 (0.44204) | > loader_time: 1.62780 (1.44944)  --> STEP: 162/406 -- GLOBAL_STEP: 9500 | > loss: 0.37089 (0.39199) | > log_mle: -0.08145 (-0.05166) | > loss_dur: 0.45234 (0.44365) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.44864 (2.37980) | > current_lr: 0.00001 | > step_time: 0.64280 (0.46324) | > loader_time: 1.90580 (1.49154)  --> STEP: 187/406 -- GLOBAL_STEP: 9525 | > loss: 0.38590 (0.39128) | > log_mle: -0.06725 (-0.05366) | > loss_dur: 0.45315 (0.44494) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 4.61434 (2.37950) | > current_lr: 0.00001 | > step_time: 0.51320 (0.48373) | > loader_time: 1.71440 (1.54315)  --> STEP: 212/406 -- GLOBAL_STEP: 9550 | > loss: 0.38170 (0.39004) | > log_mle: -0.07341 (-0.05544) | > loss_dur: 0.45511 (0.44547) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.47850 (2.40552) | > current_lr: 0.00001 | > step_time: 0.67130 (0.50330) | > loader_time: 1.67250 (1.58955)  --> STEP: 237/406 -- GLOBAL_STEP: 9575 | > loss: 0.38200 (0.38884) | > log_mle: -0.07860 (-0.05717) | > loss_dur: 0.46059 (0.44601) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.57387 (2.38282) | > current_lr: 0.00001 | > step_time: 0.73300 (0.52405) | > loader_time: 1.85090 (1.62784)  --> STEP: 262/406 -- GLOBAL_STEP: 9600 | > loss: 0.38771 (0.38763) | > log_mle: -0.06133 (-0.05874) | > loss_dur: 0.44904 (0.44637) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.09067 (2.49723) | > current_lr: 0.00001 | > step_time: 0.77150 (0.54436) | > loader_time: 2.16070 (1.65933)  --> STEP: 287/406 -- GLOBAL_STEP: 9625 | > loss: 0.40202 (0.38646) | > log_mle: -0.06810 (-0.06014) | > loss_dur: 0.47012 (0.44659) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.81088 (2.51863) | > current_lr: 0.00001 | > step_time: 0.82620 (0.56432) | > loader_time: 2.11710 (1.69279)  --> STEP: 312/406 -- GLOBAL_STEP: 9650 | > loss: 0.38367 (0.38579) | > log_mle: -0.07072 (-0.06130) | > loss_dur: 0.45439 (0.44709) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.66743 (2.58789) | > current_lr: 0.00001 | > step_time: 1.23790 (0.58890) | > loader_time: 2.50300 (1.71889)  --> STEP: 337/406 -- GLOBAL_STEP: 9675 | > loss: 0.36348 (0.38491) | > log_mle: -0.07947 (-0.06243) | > loss_dur: 0.44295 (0.44734) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.73454 (2.59658) | > current_lr: 0.00001 | > step_time: 0.78640 (0.60991) | > loader_time: 2.12140 (1.75585)  --> STEP: 362/406 -- GLOBAL_STEP: 9700 | > loss: 0.36710 (0.38401) | > log_mle: -0.08624 (-0.06365) | > loss_dur: 0.45333 (0.44766) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.18580 (2.55949) | > current_lr: 0.00001 | > step_time: 0.93950 (0.62921) | > loader_time: 2.45100 (1.79307)  --> STEP: 387/406 -- GLOBAL_STEP: 9725 | > loss: 0.35423 (0.38302) | > log_mle: -0.08144 (-0.06476) | > loss_dur: 0.43567 (0.44778) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.62336 (2.56641) | > current_lr: 0.00001 | > step_time: 1.27540 (0.65105) | > loader_time: 3.08360 (1.83285)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 0.92831 (-0.00686) | > avg_loss: 0.32833 (-0.04298) | > avg_log_mle: -0.08256 (-0.01426) | > avg_loss_dur: 0.41089 (-0.02871) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_9744.pth  > EPOCH: 24/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 14:03:38)   --> STEP: 6/406 -- GLOBAL_STEP: 9750 | > loss: 0.33191 (0.36378) | > log_mle: -0.05005 (-0.04733) | > loss_dur: 0.38196 (0.41111) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 1.50832 (1.80549) | > current_lr: 0.00001 | > step_time: 0.35650 (0.34220) | > loader_time: 0.88390 (0.84133)  --> STEP: 31/406 -- GLOBAL_STEP: 9775 | > loss: 0.36799 (0.35880) | > log_mle: -0.04952 (-0.05113) | > loss_dur: 0.41750 (0.40994) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.46473 (2.03499) | > current_lr: 0.00001 | > step_time: 0.37760 (0.34920) | > loader_time: 1.51190 (1.10560)  --> STEP: 56/406 -- GLOBAL_STEP: 9800 | > loss: 0.33785 (0.35927) | > log_mle: -0.06201 (-0.05351) | > loss_dur: 0.39985 (0.41278) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 3.17897 (2.09753) | > current_lr: 0.00001 | > step_time: 0.41270 (0.37794) | > loader_time: 1.46680 (1.26402)  --> STEP: 81/406 -- GLOBAL_STEP: 9825 | > loss: 0.35844 (0.35732) | > log_mle: -0.07053 (-0.05649) | > loss_dur: 0.42897 (0.41381) | > amp_scaler: 65536.00000 (65536.00000) | > grad_norm: 2.83049 (2.35064) | > current_lr: 0.00001 | > step_time: 0.49360 (0.39425) | > loader_time: 1.59840 (1.31740)  --> STEP: 106/406 -- GLOBAL_STEP: 9850 | > loss: 0.34818 (0.35655) | > log_mle: -0.06362 (-0.05974) | > loss_dur: 0.41181 (0.41629) | > amp_scaler: 32768.00000 (60899.01887) | > grad_norm: 3.48686 (2.86929) | > current_lr: 0.00001 | > step_time: 0.49440 (0.41798) | > loader_time: 1.54340 (1.35995)  --> STEP: 131/406 -- GLOBAL_STEP: 9875 | > loss: 0.31918 (0.35569) | > log_mle: -0.07275 (-0.06261) | > loss_dur: 0.39193 (0.41830) | > amp_scaler: 32768.00000 (55530.50382) | > grad_norm: 3.00620 (2.91054) | > current_lr: 0.00001 | > step_time: 0.43620 (0.43700) | > loader_time: 1.51380 (1.40150)  --> STEP: 156/406 -- GLOBAL_STEP: 9900 | > loss: 0.33350 (0.35500) | > log_mle: -0.07969 (-0.06480) | > loss_dur: 0.41320 (0.41979) | > amp_scaler: 32768.00000 (51882.66667) | > grad_norm: 2.61029 (2.88036) | > current_lr: 0.00001 | > step_time: 0.55390 (0.46004) | > loader_time: 1.84800 (1.44713)  --> STEP: 181/406 -- GLOBAL_STEP: 9925 | > loss: 0.34724 (0.35438) | > log_mle: -0.07706 (-0.06668) | > loss_dur: 0.42430 (0.42106) | > amp_scaler: 32768.00000 (49242.51934) | > grad_norm: 4.78213 (2.95219) | > current_lr: 0.00001 | > step_time: 0.54730 (0.48138) | > loader_time: 1.82450 (1.50200)  --> STEP: 206/406 -- GLOBAL_STEP: 9950 | > loss: 0.36308 (0.35363) | > log_mle: -0.07920 (-0.06841) | > loss_dur: 0.44227 (0.42204) | > amp_scaler: 32768.00000 (47243.18447) | > grad_norm: 2.10551 (3.02059) | > current_lr: 0.00001 | > step_time: 0.69500 (0.50209) | > loader_time: 1.74930 (1.54559)  --> STEP: 231/406 -- GLOBAL_STEP: 9975 | > loss: 0.34605 (0.35272) | > log_mle: -0.08828 (-0.07012) | > loss_dur: 0.43433 (0.42283) | > amp_scaler: 32768.00000 (45676.60606) | > grad_norm: 2.46758 (3.06158) | > current_lr: 0.00001 | > step_time: 0.73770 (0.52130) | > loader_time: 2.01620 (1.58745)  --> STEP: 256/406 -- GLOBAL_STEP: 10000 | > loss: 0.33344 (0.35172) | > log_mle: -0.08687 (-0.07168) | > loss_dur: 0.42031 (0.42340) | > amp_scaler: 32768.00000 (44416.00000) | > grad_norm: 3.29178 (3.04318) | > current_lr: 0.00001 | > step_time: 0.83360 (0.54332) | > loader_time: 1.76890 (1.61219) > CHECKPOINT : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/checkpoint_10000.pth  --> STEP: 281/406 -- GLOBAL_STEP: 10025 | > loss: 0.34688 (0.35072) | > log_mle: -0.09505 (-0.07304) | > loss_dur: 0.44193 (0.42376) | > amp_scaler: 32768.00000 (43379.70107) | > grad_norm: 2.75201 (3.04290) | > current_lr: 0.00001 | > step_time: 0.83200 (0.56567) | > loader_time: 2.06650 (1.62756)  --> STEP: 306/406 -- GLOBAL_STEP: 10050 | > loss: 0.36296 (0.35010) | > log_mle: -0.08582 (-0.07420) | > loss_dur: 0.44878 (0.42431) | > amp_scaler: 32768.00000 (42512.73203) | > grad_norm: 2.21842 (3.07589) | > current_lr: 0.00001 | > step_time: 0.73400 (0.58619) | > loader_time: 2.21880 (1.65308)  --> STEP: 331/406 -- GLOBAL_STEP: 10075 | > loss: 0.34699 (0.34948) | > log_mle: -0.08820 (-0.07525) | > loss_dur: 0.43519 (0.42473) | > amp_scaler: 32768.00000 (41776.72508) | > grad_norm: 2.17465 (3.02362) | > current_lr: 0.00001 | > step_time: 0.91210 (0.60542) | > loader_time: 2.25270 (1.69077)  --> STEP: 356/406 -- GLOBAL_STEP: 10100 | > loss: 0.35376 (0.34882) | > log_mle: -0.09512 (-0.07641) | > loss_dur: 0.44887 (0.42522) | > amp_scaler: 32768.00000 (41144.08989) | > grad_norm: 3.12636 (2.99082) | > current_lr: 0.00001 | > step_time: 0.91860 (0.62452) | > loader_time: 2.54590 (1.72876)  --> STEP: 381/406 -- GLOBAL_STEP: 10125 | > loss: 0.33595 (0.34776) | > log_mle: -0.08653 (-0.07751) | > loss_dur: 0.42248 (0.42527) | > amp_scaler: 32768.00000 (40594.47769) | > grad_norm: 2.27133 (3.04745) | > current_lr: 0.00001 | > step_time: 0.84760 (0.64428) | > loader_time: 2.09060 (1.76689)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 0.93304 (+0.00473) | > avg_loss: 0.29703 (-0.03130) | > avg_log_mle: -0.09486 (-0.01230) | > avg_loss_dur: 0.39189 (-0.01900) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_10150.pth  > EPOCH: 25/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 14:21:01)   --> STEP: 0/406 -- GLOBAL_STEP: 10150 | > loss: 0.36791 (0.36791) | > log_mle: -0.04815 (-0.04815) | > loss_dur: 0.41606 (0.41606) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.83520 (1.83520) | > current_lr: 0.00001 | > step_time: 0.95990 (0.95987) | > loader_time: 2.10810 (2.10811)  --> STEP: 25/406 -- GLOBAL_STEP: 10175 | > loss: 0.34707 (0.32220) | > log_mle: -0.06176 (-0.06302) | > loss_dur: 0.40883 (0.38522) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.86322 (2.12800) | > current_lr: 0.00001 | > step_time: 0.25680 (0.34955) | > loader_time: 1.43490 (1.04003)  --> STEP: 50/406 -- GLOBAL_STEP: 10200 | > loss: 0.36008 (0.32337) | > log_mle: -0.08598 (-0.06481) | > loss_dur: 0.44606 (0.38818) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.66169 (2.15511) | > current_lr: 0.00001 | > step_time: 0.42250 (0.36954) | > loader_time: 1.38110 (1.24011)  --> STEP: 75/406 -- GLOBAL_STEP: 10225 | > loss: 0.32980 (0.32397) | > log_mle: -0.07962 (-0.06847) | > loss_dur: 0.40942 (0.39243) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.62078 (2.63454) | > current_lr: 0.00001 | > step_time: 0.52770 (0.39641) | > loader_time: 1.69910 (1.32480)  --> STEP: 100/406 -- GLOBAL_STEP: 10250 | > loss: 0.31785 (0.32260) | > log_mle: -0.07358 (-0.07141) | > loss_dur: 0.39143 (0.39401) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.21757 (3.12863) | > current_lr: 0.00001 | > step_time: 0.96130 (0.42854) | > loader_time: 1.77380 (1.37966)  --> STEP: 125/406 -- GLOBAL_STEP: 10275 | > loss: 0.29476 (0.32113) | > log_mle: -0.10105 (-0.07454) | > loss_dur: 0.39582 (0.39567) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.03556 (3.13855) | > current_lr: 0.00001 | > step_time: 0.55550 (0.44597) | > loader_time: 1.45420 (1.41426)  --> STEP: 150/406 -- GLOBAL_STEP: 10300 | > loss: 0.31675 (0.32077) | > log_mle: -0.08453 (-0.07681) | > loss_dur: 0.40128 (0.39758) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.66820 (3.14097) | > current_lr: 0.00001 | > step_time: 0.67580 (0.46705) | > loader_time: 1.66530 (1.46104)  --> STEP: 175/406 -- GLOBAL_STEP: 10325 | > loss: 0.31513 (0.32040) | > log_mle: -0.08665 (-0.07857) | > loss_dur: 0.40178 (0.39896) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 5.97584 (3.26017) | > current_lr: 0.00001 | > step_time: 0.60720 (0.48876) | > loader_time: 1.88050 (1.54341)  --> STEP: 200/406 -- GLOBAL_STEP: 10350 | > loss: 0.31346 (0.32003) | > log_mle: -0.09315 (-0.08039) | > loss_dur: 0.40661 (0.40043) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 5.46800 (3.32630) | > current_lr: 0.00001 | > step_time: 0.59530 (0.51058) | > loader_time: 2.14180 (1.60371)  --> STEP: 225/406 -- GLOBAL_STEP: 10375 | > loss: 0.33735 (0.31950) | > log_mle: -0.09175 (-0.08200) | > loss_dur: 0.42911 (0.40149) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.14477 (3.42000) | > current_lr: 0.00001 | > step_time: 0.75030 (0.52950) | > loader_time: 2.30530 (1.66011)  --> STEP: 250/406 -- GLOBAL_STEP: 10400 | > loss: 0.31736 (0.31887) | > log_mle: -0.11224 (-0.08354) | > loss_dur: 0.42959 (0.40241) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.76417 (3.42508) | > current_lr: 0.00001 | > step_time: 0.75690 (0.54962) | > loader_time: 2.30640 (1.72168)  --> STEP: 275/406 -- GLOBAL_STEP: 10425 | > loss: 0.31471 (0.31801) | > log_mle: -0.08803 (-0.08492) | > loss_dur: 0.40274 (0.40292) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.78913 (3.43352) | > current_lr: 0.00001 | > step_time: 0.82710 (0.56858) | > loader_time: 2.43750 (1.78036)  --> STEP: 300/406 -- GLOBAL_STEP: 10450 | > loss: 0.32317 (0.31742) | > log_mle: -0.08734 (-0.08599) | > loss_dur: 0.41051 (0.40341) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.55488 (3.42827) | > current_lr: 0.00001 | > step_time: 0.83550 (0.59015) | > loader_time: 2.48410 (1.83080)  --> STEP: 325/406 -- GLOBAL_STEP: 10475 | > loss: 0.32069 (0.31708) | > log_mle: -0.09187 (-0.08703) | > loss_dur: 0.41256 (0.40412) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.13710 (3.35333) | > current_lr: 0.00001 | > step_time: 0.90860 (0.61018) | > loader_time: 2.82340 (1.88937)  --> STEP: 350/406 -- GLOBAL_STEP: 10500 | > loss: 0.28665 (0.31654) | > log_mle: -0.10594 (-0.08809) | > loss_dur: 0.39259 (0.40463) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.03045 (3.40664) | > current_lr: 0.00001 | > step_time: 0.85680 (0.63554) | > loader_time: 2.75960 (1.94982)  --> STEP: 375/406 -- GLOBAL_STEP: 10525 | > loss: 0.29872 (0.31579) | > log_mle: -0.11507 (-0.08921) | > loss_dur: 0.41380 (0.40500) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.25818 (3.43185) | > current_lr: 0.00001 | > step_time: 0.83920 (0.65555) | > loader_time: 2.67370 (2.01398)  --> STEP: 400/406 -- GLOBAL_STEP: 10550 | > loss: 0.31888 (0.31510) | > log_mle: -0.11022 (-0.09023) | > loss_dur: 0.42910 (0.40533) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.88292 (3.41859) | > current_lr: 0.00001 | > step_time: 1.05960 (0.67872) | > loader_time: 2.67980 (2.07001)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.13241 (+0.19938) | > avg_loss: 0.27015 (-0.02688) | > avg_log_mle: -0.10556 (-0.01069) | > avg_loss_dur: 0.37570 (-0.01619) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_10556.pth  > EPOCH: 26/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 14:40:18)   --> STEP: 19/406 -- GLOBAL_STEP: 10575 | > loss: 0.31905 (0.29570) | > log_mle: -0.06453 (-0.07463) | > loss_dur: 0.38358 (0.37033) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.95285 (2.08358) | > current_lr: 0.00001 | > step_time: 0.38230 (0.32271) | > loader_time: 1.72870 (1.41896)  --> STEP: 44/406 -- GLOBAL_STEP: 10600 | > loss: 0.29485 (0.29653) | > log_mle: -0.08148 (-0.07508) | > loss_dur: 0.37633 (0.37162) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.60284 (1.97887) | > current_lr: 0.00001 | > step_time: 0.35380 (0.38022) | > loader_time: 1.95720 (1.66818)  --> STEP: 69/406 -- GLOBAL_STEP: 10625 | > loss: 0.29900 (0.29633) | > log_mle: -0.08687 (-0.07837) | > loss_dur: 0.38587 (0.37470) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.68065 (2.36066) | > current_lr: 0.00001 | > step_time: 0.51550 (0.41248) | > loader_time: 1.84280 (1.78511)  --> STEP: 94/406 -- GLOBAL_STEP: 10650 | > loss: 0.28810 (0.29502) | > log_mle: -0.09557 (-0.08195) | > loss_dur: 0.38367 (0.37697) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 5.68335 (2.65214) | > current_lr: 0.00001 | > step_time: 0.55950 (0.44215) | > loader_time: 1.71670 (1.82218)  --> STEP: 119/406 -- GLOBAL_STEP: 10675 | > loss: 0.28052 (0.29354) | > log_mle: -0.10613 (-0.08468) | > loss_dur: 0.38665 (0.37822) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.36627 (2.79194) | > current_lr: 0.00001 | > step_time: 0.60470 (0.46592) | > loader_time: 2.22370 (1.84523)  --> STEP: 144/406 -- GLOBAL_STEP: 10700 | > loss: 0.29097 (0.29301) | > log_mle: -0.09276 (-0.08713) | > loss_dur: 0.38373 (0.38014) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.84522 (3.20076) | > current_lr: 0.00001 | > step_time: 1.14280 (0.49092) | > loader_time: 2.36260 (1.88266)  --> STEP: 169/406 -- GLOBAL_STEP: 10725 | > loss: 0.30650 (0.29271) | > log_mle: -0.09801 (-0.08890) | > loss_dur: 0.40451 (0.38161) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.34032 (3.12170) | > current_lr: 0.00001 | > step_time: 0.65830 (0.50896) | > loader_time: 2.50200 (1.94883)  --> STEP: 194/406 -- GLOBAL_STEP: 10750 | > loss: 0.28301 (0.29239) | > log_mle: -0.10233 (-0.09078) | > loss_dur: 0.38534 (0.38317) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.88190 (3.12262) | > current_lr: 0.00001 | > step_time: 0.63190 (0.52616) | > loader_time: 2.28460 (2.01504)  --> STEP: 219/406 -- GLOBAL_STEP: 10775 | > loss: 0.28306 (0.29158) | > log_mle: -0.10657 (-0.09227) | > loss_dur: 0.38963 (0.38384) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.65077 (3.17638) | > current_lr: 0.00001 | > step_time: 0.80500 (0.54407) | > loader_time: 2.55860 (2.06934)  --> STEP: 244/406 -- GLOBAL_STEP: 10800 | > loss: 0.26848 (0.29107) | > log_mle: -0.10769 (-0.09393) | > loss_dur: 0.37617 (0.38500) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.69881 (3.24113) | > current_lr: 0.00001 | > step_time: 0.63010 (0.56236) | > loader_time: 2.40450 (2.11080)  --> STEP: 269/406 -- GLOBAL_STEP: 10825 | > loss: 0.27135 (0.29015) | > log_mle: -0.11310 (-0.09524) | > loss_dur: 0.38444 (0.38538) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.41463 (3.31364) | > current_lr: 0.00001 | > step_time: 0.70470 (0.58189) | > loader_time: 2.31720 (2.14751)  --> STEP: 294/406 -- GLOBAL_STEP: 10850 | > loss: 0.27688 (0.28927) | > log_mle: -0.11561 (-0.09644) | > loss_dur: 0.39249 (0.38571) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.25505 (3.32884) | > current_lr: 0.00001 | > step_time: 0.73150 (0.60137) | > loader_time: 2.52920 (2.17966)  --> STEP: 319/406 -- GLOBAL_STEP: 10875 | > loss: 0.27698 (0.28899) | > log_mle: -0.11379 (-0.09755) | > loss_dur: 0.39076 (0.38654) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.22433 (3.33609) | > current_lr: 0.00001 | > step_time: 0.89220 (0.61948) | > loader_time: 2.86190 (2.21529)  --> STEP: 344/406 -- GLOBAL_STEP: 10900 | > loss: 0.29553 (0.28841) | > log_mle: -0.10609 (-0.09856) | > loss_dur: 0.40162 (0.38697) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.94456 (3.32810) | > current_lr: 0.00001 | > step_time: 1.06930 (0.63793) | > loader_time: 2.72140 (2.26242)  --> STEP: 369/406 -- GLOBAL_STEP: 10925 | > loss: 0.27682 (0.28770) | > log_mle: -0.10608 (-0.09964) | > loss_dur: 0.38290 (0.38734) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.09411 (3.39881) | > current_lr: 0.00001 | > step_time: 1.06060 (0.65995) | > loader_time: 2.55610 (2.30859)  --> STEP: 394/406 -- GLOBAL_STEP: 10950 | > loss: 0.27825 (0.28705) | > log_mle: -0.11287 (-0.10064) | > loss_dur: 0.39112 (0.38769) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.23576 (3.45716) | > current_lr: 0.00001 | > step_time: 0.99690 (0.67680) | > loader_time: 2.93270 (2.34434)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.15634 (+0.02392) | > avg_loss: 0.24522 (-0.02492) | > avg_log_mle: -0.11552 (-0.00996) | > avg_loss_dur: 0.36074 (-0.01496) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_10962.pth  > EPOCH: 27/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 15:01:26)   --> STEP: 13/406 -- GLOBAL_STEP: 10975 | > loss: 0.25450 (0.27252) | > log_mle: -0.09341 (-0.08467) | > loss_dur: 0.34792 (0.35720) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.12503 (2.20860) | > current_lr: 0.00001 | > step_time: 0.40790 (0.33176) | > loader_time: 1.89600 (1.43379)  --> STEP: 38/406 -- GLOBAL_STEP: 11000 | > loss: 0.28087 (0.27329) | > log_mle: -0.09488 (-0.08562) | > loss_dur: 0.37575 (0.35891) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.06309 (2.19596) | > current_lr: 0.00001 | > step_time: 0.38650 (0.35656) | > loader_time: 1.62400 (1.69670)  --> STEP: 63/406 -- GLOBAL_STEP: 11025 | > loss: 0.28427 (0.27141) | > log_mle: -0.09951 (-0.08805) | > loss_dur: 0.38377 (0.35946) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.59792 (2.63140) | > current_lr: 0.00001 | > step_time: 0.39550 (0.39487) | > loader_time: 1.85290 (1.79351)  --> STEP: 88/406 -- GLOBAL_STEP: 11050 | > loss: 0.28895 (0.27024) | > log_mle: -0.08771 (-0.09121) | > loss_dur: 0.37666 (0.36146) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.26100 (2.64407) | > current_lr: 0.00001 | > step_time: 0.53570 (0.42663) | > loader_time: 1.82170 (1.83881)  --> STEP: 113/406 -- GLOBAL_STEP: 11075 | > loss: 0.28028 (0.26794) | > log_mle: -0.10793 (-0.09433) | > loss_dur: 0.38821 (0.36227) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 7.54213 (2.81994) | > current_lr: 0.00001 | > step_time: 0.41110 (0.45591) | > loader_time: 1.69300 (1.86757)  --> STEP: 138/406 -- GLOBAL_STEP: 11100 | > loss: 0.28809 (0.26816) | > log_mle: -0.11248 (-0.09672) | > loss_dur: 0.40058 (0.36488) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 5.01520 (2.89930) | > current_lr: 0.00001 | > step_time: 0.64380 (0.47816) | > loader_time: 1.99430 (1.89708)  --> STEP: 163/406 -- GLOBAL_STEP: 11125 | > loss: 0.29171 (0.26742) | > log_mle: -0.11642 (-0.09858) | > loss_dur: 0.40813 (0.36600) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.59138 (2.94171) | > current_lr: 0.00001 | > step_time: 0.53680 (0.50101) | > loader_time: 2.35170 (1.94871)  --> STEP: 188/406 -- GLOBAL_STEP: 11150 | > loss: 0.27765 (0.26724) | > log_mle: -0.12752 (-0.10039) | > loss_dur: 0.40516 (0.36763) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 13.96418 (3.35335) | > current_lr: 0.00001 | > step_time: 0.59420 (0.51827) | > loader_time: 2.63030 (2.02814)  --> STEP: 213/406 -- GLOBAL_STEP: 11175 | > loss: 0.25370 (0.26697) | > log_mle: -0.11926 (-0.10183) | > loss_dur: 0.37295 (0.36880) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 7.61691 (3.91675) | > current_lr: 0.00001 | > step_time: 0.62910 (0.53636) | > loader_time: 2.42150 (2.08316)  --> STEP: 238/406 -- GLOBAL_STEP: 11200 | > loss: 0.25704 (0.26656) | > log_mle: -0.11957 (-0.10335) | > loss_dur: 0.37660 (0.36991) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.41119 (4.23871) | > current_lr: 0.00001 | > step_time: 0.61440 (0.55636) | > loader_time: 2.43040 (2.13226)  --> STEP: 263/406 -- GLOBAL_STEP: 11225 | > loss: 0.25484 (0.26568) | > log_mle: -0.12070 (-0.10471) | > loss_dur: 0.37554 (0.37039) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.84671 (4.28862) | > current_lr: 0.00001 | > step_time: 0.77020 (0.57541) | > loader_time: 2.55390 (2.17513)  --> STEP: 288/406 -- GLOBAL_STEP: 11250 | > loss: 0.24486 (0.26486) | > log_mle: -0.11440 (-0.10589) | > loss_dur: 0.35926 (0.37075) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.05309 (4.20038) | > current_lr: 0.00001 | > step_time: 1.04440 (0.59586) | > loader_time: 2.44920 (2.21128)  --> STEP: 313/406 -- GLOBAL_STEP: 11275 | > loss: 0.27056 (0.26453) | > log_mle: -0.11867 (-0.10692) | > loss_dur: 0.38923 (0.37145) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.36317 (4.16680) | > current_lr: 0.00001 | > step_time: 0.86780 (0.61630) | > loader_time: 2.60630 (2.23402)  --> STEP: 338/406 -- GLOBAL_STEP: 11300 | > loss: 0.24359 (0.26398) | > log_mle: -0.12069 (-0.10788) | > loss_dur: 0.36428 (0.37186) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 8.10564 (4.07323) | > current_lr: 0.00001 | > step_time: 0.79310 (0.63234) | > loader_time: 2.93840 (2.28031)  --> STEP: 363/406 -- GLOBAL_STEP: 11325 | > loss: 0.26628 (0.26342) | > log_mle: -0.13004 (-0.10894) | > loss_dur: 0.39632 (0.37236) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.56009 (4.12299) | > current_lr: 0.00001 | > step_time: 0.83040 (0.65106) | > loader_time: 3.07310 (2.33037)  --> STEP: 388/406 -- GLOBAL_STEP: 11350 | > loss: 0.25700 (0.26289) | > log_mle: -0.12098 (-0.10985) | > loss_dur: 0.37798 (0.37274) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 5.16298 (4.16924) | > current_lr: 0.00001 | > step_time: 0.96230 (0.67047) | > loader_time: 3.19480 (2.37481)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.11481 (-0.04153) | > avg_loss: 0.22513 (-0.02009) | > avg_log_mle: -0.12299 (-0.00748) | > avg_loss_dur: 0.34813 (-0.01261) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_11368.pth  > EPOCH: 28/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 15:22:52)   --> STEP: 7/406 -- GLOBAL_STEP: 11375 | > loss: 0.24754 (0.24895) | > log_mle: -0.08381 (-0.08936) | > loss_dur: 0.33136 (0.33830) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.85162 (2.00496) | > current_lr: 0.00001 | > step_time: 0.25760 (0.40619) | > loader_time: 1.72620 (1.44538)  --> STEP: 32/406 -- GLOBAL_STEP: 11400 | > loss: 0.26184 (0.25077) | > log_mle: -0.09838 (-0.09410) | > loss_dur: 0.36022 (0.34487) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.66730 (2.14867) | > current_lr: 0.00001 | > step_time: 0.31660 (0.36407) | > loader_time: 1.85530 (1.65188)  --> STEP: 57/406 -- GLOBAL_STEP: 11425 | > loss: 0.26797 (0.24835) | > log_mle: -0.09775 (-0.09613) | > loss_dur: 0.36572 (0.34448) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.30505 (2.52244) | > current_lr: 0.00001 | > step_time: 0.49980 (0.40266) | > loader_time: 2.03750 (1.77590)  --> STEP: 82/406 -- GLOBAL_STEP: 11450 | > loss: 0.26705 (0.24775) | > log_mle: -0.11287 (-0.09912) | > loss_dur: 0.37992 (0.34687) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.62870 (2.48962) | > current_lr: 0.00001 | > step_time: 0.66840 (0.43656) | > loader_time: 2.31600 (1.86147)  --> STEP: 107/406 -- GLOBAL_STEP: 11475 | > loss: 0.24044 (0.24708) | > log_mle: -0.11161 (-0.10220) | > loss_dur: 0.35205 (0.34928) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.18449 (2.92253) | > current_lr: 0.00001 | > step_time: 0.47190 (0.46425) | > loader_time: 1.90730 (1.89461)  --> STEP: 132/406 -- GLOBAL_STEP: 11500 | > loss: 0.23223 (0.24616) | > log_mle: -0.12862 (-0.10499) | > loss_dur: 0.36085 (0.35115) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 6.69415 (3.05425) | > current_lr: 0.00001 | > step_time: 0.48160 (0.48247) | > loader_time: 2.16880 (1.91470)  --> STEP: 157/406 -- GLOBAL_STEP: 11525 | > loss: 0.24312 (0.24625) | > log_mle: -0.11761 (-0.10686) | > loss_dur: 0.36073 (0.35311) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.22078 (3.22693) | > current_lr: 0.00001 | > step_time: 0.64330 (0.50350) | > loader_time: 2.64580 (1.96773)  --> STEP: 182/406 -- GLOBAL_STEP: 11550 | > loss: 0.23910 (0.24580) | > log_mle: -0.12347 (-0.10862) | > loss_dur: 0.36256 (0.35442) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.97826 (3.30809) | > current_lr: 0.00001 | > step_time: 0.57360 (0.52134) | > loader_time: 2.44190 (2.05219)  --> STEP: 207/406 -- GLOBAL_STEP: 11575 | > loss: 0.24730 (0.24568) | > log_mle: -0.11261 (-0.11016) | > loss_dur: 0.35991 (0.35584) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.36016 (3.86091) | > current_lr: 0.00001 | > step_time: 0.71770 (0.53819) | > loader_time: 2.85260 (2.11001)  --> STEP: 232/406 -- GLOBAL_STEP: 11600 | > loss: 0.23810 (0.24515) | > log_mle: -0.13104 (-0.11178) | > loss_dur: 0.36914 (0.35693) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.31728 (4.17088) | > current_lr: 0.00001 | > step_time: 0.67090 (0.55551) | > loader_time: 2.18010 (2.16037)  --> STEP: 257/406 -- GLOBAL_STEP: 11625 | > loss: 0.23360 (0.24449) | > log_mle: -0.12605 (-0.11315) | > loss_dur: 0.35964 (0.35764) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.53223 (4.15985) | > current_lr: 0.00001 | > step_time: 0.71480 (0.57525) | > loader_time: 2.11940 (2.20111)  --> STEP: 282/406 -- GLOBAL_STEP: 11650 | > loss: 0.22450 (0.24364) | > log_mle: -0.13085 (-0.11437) | > loss_dur: 0.35535 (0.35801) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 9.06947 (4.32209) | > current_lr: 0.00001 | > step_time: 0.91790 (0.59604) | > loader_time: 2.63340 (2.23064)  --> STEP: 307/406 -- GLOBAL_STEP: 11675 | > loss: 0.23451 (0.24330) | > log_mle: -0.12334 (-0.11538) | > loss_dur: 0.35786 (0.35868) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.04611 (4.32227) | > current_lr: 0.00001 | > step_time: 0.88530 (0.61691) | > loader_time: 2.89010 (2.25881)  --> STEP: 332/406 -- GLOBAL_STEP: 11700 | > loss: 0.23318 (0.24298) | > log_mle: -0.12302 (-0.11628) | > loss_dur: 0.35620 (0.35926) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.00700 (4.29802) | > current_lr: 0.00001 | > step_time: 0.79380 (0.63422) | > loader_time: 2.90880 (2.30298)  --> STEP: 357/406 -- GLOBAL_STEP: 11725 | > loss: 0.24122 (0.24251) | > log_mle: -0.12114 (-0.11729) | > loss_dur: 0.36236 (0.35981) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 1.77495 (4.30505) | > current_lr: 0.00001 | > step_time: 0.95580 (0.65304) | > loader_time: 3.09350 (2.35214)  --> STEP: 382/406 -- GLOBAL_STEP: 11750 | > loss: 0.23372 (0.24182) | > log_mle: -0.12874 (-0.11829) | > loss_dur: 0.36246 (0.36010) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.99866 (4.32335) | > current_lr: 0.00001 | > step_time: 0.89600 (0.67464) | > loader_time: 3.28690 (2.40182)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.16440 (+0.04959) | > avg_loss: 0.20483 (-0.02031) | > avg_log_mle: -0.13320 (-0.01021) | > avg_loss_dur: 0.33803 (-0.01010) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_11774.pth  > EPOCH: 29/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 15:44:39)   --> STEP: 1/406 -- GLOBAL_STEP: 11775 | > loss: 0.21087 (0.21087) | > log_mle: -0.10073 (-0.10073) | > loss_dur: 0.31160 (0.31160) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.08018 (3.08018) | > current_lr: 0.00001 | > step_time: 0.35140 (0.35143) | > loader_time: 1.32570 (1.32574)  --> STEP: 26/406 -- GLOBAL_STEP: 11800 | > loss: 0.24168 (0.22689) | > log_mle: -0.10708 (-0.10202) | > loss_dur: 0.34876 (0.32891) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.10323 (2.84690) | > current_lr: 0.00001 | > step_time: 0.42380 (0.37359) | > loader_time: 1.92050 (1.71067)  --> STEP: 51/406 -- GLOBAL_STEP: 11825 | > loss: 0.22614 (0.22983) | > log_mle: -0.12822 (-0.10369) | > loss_dur: 0.35437 (0.33352) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 11.64828 (3.19642) | > current_lr: 0.00001 | > step_time: 0.43750 (0.40118) | > loader_time: 1.48960 (1.81637)  --> STEP: 76/406 -- GLOBAL_STEP: 11850 | > loss: 0.22871 (0.22930) | > log_mle: -0.10567 (-0.10646) | > loss_dur: 0.33438 (0.33576) | > amp_scaler: 65536.00000 (39235.36842) | > grad_norm: 4.86194 (3.66272) | > current_lr: 0.00001 | > step_time: 0.55780 (0.43194) | > loader_time: 2.12800 (1.86057)  --> STEP: 101/406 -- GLOBAL_STEP: 11875 | > loss: 0.25688 (0.22926) | > log_mle: -0.12246 (-0.10931) | > loss_dur: 0.37934 (0.33857) | > amp_scaler: 32768.00000 (39256.71287) | > grad_norm: 6.65345 (3.72442) | > current_lr: 0.00001 | > step_time: 0.57660 (0.45852) | > loader_time: 1.88840 (1.92446)  --> STEP: 126/406 -- GLOBAL_STEP: 11900 | > loss: 0.24779 (0.22742) | > log_mle: -0.11488 (-0.11218) | > loss_dur: 0.36268 (0.33960) | > amp_scaler: 32768.00000 (37969.26984) | > grad_norm: 3.86605 (3.65566) | > current_lr: 0.00001 | > step_time: 0.61210 (0.47867) | > loader_time: 1.82150 (1.96366)  --> STEP: 151/406 -- GLOBAL_STEP: 11925 | > loss: 0.23473 (0.22738) | > log_mle: -0.12094 (-0.11434) | > loss_dur: 0.35567 (0.34172) | > amp_scaler: 32768.00000 (37108.13245) | > grad_norm: 3.45270 (3.78111) | > current_lr: 0.00001 | > step_time: 0.75090 (0.50099) | > loader_time: 2.12960 (2.00384)  --> STEP: 176/406 -- GLOBAL_STEP: 11950 | > loss: 0.21988 (0.22693) | > log_mle: -0.12217 (-0.11596) | > loss_dur: 0.34204 (0.34289) | > amp_scaler: 32768.00000 (36491.63636) | > grad_norm: 2.73817 (3.76767) | > current_lr: 0.00001 | > step_time: 0.60160 (0.51757) | > loader_time: 2.55870 (2.10309)  --> STEP: 201/406 -- GLOBAL_STEP: 11975 | > loss: 0.21297 (0.22650) | > log_mle: -0.12407 (-0.11764) | > loss_dur: 0.33704 (0.34414) | > amp_scaler: 32768.00000 (36028.49751) | > grad_norm: 3.07779 (3.86291) | > current_lr: 0.00001 | > step_time: 0.70480 (0.53324) | > loader_time: 2.74380 (2.17137)  --> STEP: 226/406 -- GLOBAL_STEP: 12000 | > loss: 0.21680 (0.22619) | > log_mle: -0.13852 (-0.11920) | > loss_dur: 0.35533 (0.34539) | > amp_scaler: 32768.00000 (35667.82301) | > grad_norm: 7.90049 (3.83669) | > current_lr: 0.00001 | > step_time: 0.62560 (0.54991) | > loader_time: 2.74420 (2.23400)  --> STEP: 251/406 -- GLOBAL_STEP: 12025 | > loss: 0.22428 (0.22589) | > log_mle: -0.12953 (-0.12061) | > loss_dur: 0.35381 (0.34651) | > amp_scaler: 32768.00000 (35378.99602) | > grad_norm: 5.24565 (3.93303) | > current_lr: 0.00001 | > step_time: 0.65520 (0.56497) | > loader_time: 2.67890 (2.28620)  --> STEP: 276/406 -- GLOBAL_STEP: 12050 | > loss: 0.21249 (0.22504) | > log_mle: -0.13507 (-0.12193) | > loss_dur: 0.34755 (0.34697) | > amp_scaler: 32768.00000 (35142.49275) | > grad_norm: 6.33063 (3.91549) | > current_lr: 0.00001 | > step_time: 0.79560 (0.58351) | > loader_time: 2.40900 (2.32888)  --> STEP: 301/406 -- GLOBAL_STEP: 12075 | > loss: 0.22373 (0.22454) | > log_mle: -0.14812 (-0.12299) | > loss_dur: 0.37185 (0.34753) | > amp_scaler: 32768.00000 (34945.27575) | > grad_norm: 2.45454 (3.95192) | > current_lr: 0.00001 | > step_time: 0.86970 (0.60159) | > loader_time: 2.42700 (2.36487)  --> STEP: 326/406 -- GLOBAL_STEP: 12100 | > loss: 0.20375 (0.22416) | > log_mle: -0.13071 (-0.12393) | > loss_dur: 0.33447 (0.34810) | > amp_scaler: 32768.00000 (34778.30675) | > grad_norm: 5.85059 (3.98087) | > current_lr: 0.00001 | > step_time: 0.81990 (0.62018) | > loader_time: 3.19840 (2.41327)  --> STEP: 351/406 -- GLOBAL_STEP: 12125 | > loss: 0.23237 (0.22380) | > log_mle: -0.13275 (-0.12494) | > loss_dur: 0.36512 (0.34873) | > amp_scaler: 32768.00000 (34635.12251) | > grad_norm: 4.26761 (4.19877) | > current_lr: 0.00001 | > step_time: 0.89280 (0.64048) | > loader_time: 2.72040 (2.46228)  --> STEP: 376/406 -- GLOBAL_STEP: 12150 | > loss: 0.22911 (0.22313) | > log_mle: -0.13616 (-0.12599) | > loss_dur: 0.36528 (0.34912) | > amp_scaler: 32768.00000 (34510.97872) | > grad_norm: 3.21636 (4.15879) | > current_lr: 0.00001 | > step_time: 0.92120 (0.65994) | > loader_time: 3.39680 (2.50673)  --> STEP: 401/406 -- GLOBAL_STEP: 12175 | > loss: 0.20776 (0.22261) | > log_mle: -0.14106 (-0.12691) | > loss_dur: 0.34882 (0.34952) | > amp_scaler: 16384.00000 (33993.73566) | > grad_norm: 7.96923 (4.46150) | > current_lr: 0.00001 | > step_time: 1.30740 (0.68263) | > loader_time: 3.02960 (2.55268)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.19300 (+0.02860) | > avg_loss: 0.19561 (-0.00921) | > avg_log_mle: -0.13555 (-0.00235) | > avg_loss_dur: 0.33117 (-0.00686) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_12180.pth  > EPOCH: 30/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 16:07:17)   --> STEP: 20/406 -- GLOBAL_STEP: 12200 | > loss: 0.24063 (0.21234) | > log_mle: -0.09650 (-0.10937) | > loss_dur: 0.33713 (0.32170) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 1.53536 (2.56651) | > current_lr: 0.00001 | > step_time: 0.37070 (0.33811) | > loader_time: 1.59070 (1.63577)  --> STEP: 45/406 -- GLOBAL_STEP: 12225 | > loss: 0.21135 (0.21420) | > log_mle: -0.10661 (-0.11005) | > loss_dur: 0.31796 (0.32425) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.00610 (2.88577) | > current_lr: 0.00001 | > step_time: 0.85630 (0.38509) | > loader_time: 2.18210 (1.93081)  --> STEP: 70/406 -- GLOBAL_STEP: 12250 | > loss: 0.19239 (0.21423) | > log_mle: -0.12921 (-0.11348) | > loss_dur: 0.32160 (0.32771) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.45864 (3.11269) | > current_lr: 0.00001 | > step_time: 0.36440 (0.41884) | > loader_time: 1.70100 (2.00526)  --> STEP: 95/406 -- GLOBAL_STEP: 12275 | > loss: 0.21566 (0.21326) | > log_mle: -0.12279 (-0.11679) | > loss_dur: 0.33845 (0.33005) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.90941 (3.48385) | > current_lr: 0.00001 | > step_time: 0.44470 (0.44729) | > loader_time: 1.86720 (2.02928)  --> STEP: 120/406 -- GLOBAL_STEP: 12300 | > loss: 0.20166 (0.21191) | > log_mle: -0.13519 (-0.11944) | > loss_dur: 0.33686 (0.33135) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 16.41538 (4.38916) | > current_lr: 0.00001 | > step_time: 0.60850 (0.47586) | > loader_time: 2.16030 (2.05834)  --> STEP: 145/406 -- GLOBAL_STEP: 12325 | > loss: 0.21107 (0.21131) | > log_mle: -0.12597 (-0.12161) | > loss_dur: 0.33704 (0.33292) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.42298 (5.20432) | > current_lr: 0.00001 | > step_time: 0.57510 (0.49503) | > loader_time: 2.08790 (2.10371)  --> STEP: 170/406 -- GLOBAL_STEP: 12350 | > loss: 0.20886 (0.21106) | > log_mle: -0.13400 (-0.12331) | > loss_dur: 0.34286 (0.33437) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.15673 (4.89153) | > current_lr: 0.00001 | > step_time: 0.83930 (0.51361) | > loader_time: 3.16450 (2.20842)  --> STEP: 195/406 -- GLOBAL_STEP: 12375 | > loss: 0.21318 (0.21076) | > log_mle: -0.12141 (-0.12500) | > loss_dur: 0.33459 (0.33575) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.08684 (4.86627) | > current_lr: 0.00001 | > step_time: 0.71680 (0.53065) | > loader_time: 2.78090 (2.28370)  --> STEP: 220/406 -- GLOBAL_STEP: 12400 | > loss: 0.20313 (0.20995) | > log_mle: -0.15888 (-0.12655) | > loss_dur: 0.36201 (0.33649) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.51397 (4.93426) | > current_lr: 0.00001 | > step_time: 0.60100 (0.54462) | > loader_time: 2.91230 (2.34998)  --> STEP: 245/406 -- GLOBAL_STEP: 12425 | > loss: 0.19267 (0.20958) | > log_mle: -0.13610 (-0.12798) | > loss_dur: 0.32877 (0.33755) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.36821 (5.10768) | > current_lr: 0.00001 | > step_time: 0.76250 (0.56158) | > loader_time: 3.14360 (2.40010)  --> STEP: 270/406 -- GLOBAL_STEP: 12450 | > loss: 0.20557 (0.20877) | > log_mle: -0.14316 (-0.12917) | > loss_dur: 0.34873 (0.33794) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.03006 (5.22299) | > current_lr: 0.00001 | > step_time: 0.85610 (0.57932) | > loader_time: 3.17680 (2.44365)  --> STEP: 295/406 -- GLOBAL_STEP: 12475 | > loss: 0.20959 (0.20806) | > log_mle: -0.14400 (-0.13027) | > loss_dur: 0.35359 (0.33833) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 11.46003 (5.27169) | > current_lr: 0.00001 | > step_time: 0.84390 (0.59489) | > loader_time: 2.83510 (2.48626)  --> STEP: 320/406 -- GLOBAL_STEP: 12500 | > loss: 0.21836 (0.20771) | > log_mle: -0.13369 (-0.13124) | > loss_dur: 0.35205 (0.33895) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.00327 (5.29213) | > current_lr: 0.00001 | > step_time: 0.79130 (0.61103) | > loader_time: 3.28960 (2.52698)  --> STEP: 345/406 -- GLOBAL_STEP: 12525 | > loss: 0.19616 (0.20733) | > log_mle: -0.15363 (-0.13221) | > loss_dur: 0.34978 (0.33954) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.91110 (5.24789) | > current_lr: 0.00001 | > step_time: 0.90970 (0.62863) | > loader_time: 3.17490 (2.58442)  --> STEP: 370/406 -- GLOBAL_STEP: 12550 | > loss: 0.17943 (0.20682) | > log_mle: -0.16094 (-0.13321) | > loss_dur: 0.34037 (0.34003) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.45980 (5.18804) | > current_lr: 0.00001 | > step_time: 0.99500 (0.65249) | > loader_time: 2.56270 (2.62560)  --> STEP: 395/406 -- GLOBAL_STEP: 12575 | > loss: 0.20235 (0.20628) | > log_mle: -0.15379 (-0.13410) | > loss_dur: 0.35614 (0.34038) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.14794 (5.12375) | > current_lr: 0.00001 | > step_time: 1.04700 (0.67220) | > loader_time: 3.66350 (2.66844)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.24956 (+0.05656) | > avg_loss: 0.16998 (-0.02563) | > avg_log_mle: -0.14871 (-0.01316) | > avg_loss_dur: 0.31869 (-0.01248) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_12586.pth  > EPOCH: 31/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 16:30:39)   --> STEP: 14/406 -- GLOBAL_STEP: 12600 | > loss: 0.18341 (0.19296) | > log_mle: -0.13178 (-0.11798) | > loss_dur: 0.31518 (0.31094) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.08665 (3.14351) | > current_lr: 0.00001 | > step_time: 0.27500 (0.37941) | > loader_time: 1.91290 (1.74267)  --> STEP: 39/406 -- GLOBAL_STEP: 12625 | > loss: 0.20587 (0.19332) | > log_mle: -0.11557 (-0.11720) | > loss_dur: 0.32144 (0.31052) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.52464 (3.21819) | > current_lr: 0.00001 | > step_time: 0.43290 (0.39147) | > loader_time: 2.37910 (2.02480)  --> STEP: 64/406 -- GLOBAL_STEP: 12650 | > loss: 0.19400 (0.19558) | > log_mle: -0.11872 (-0.11958) | > loss_dur: 0.31272 (0.31516) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 1.90432 (3.89284) | > current_lr: 0.00001 | > step_time: 0.53140 (0.42303) | > loader_time: 2.61750 (2.14506)  --> STEP: 89/406 -- GLOBAL_STEP: 12675 | > loss: 0.21451 (0.19522) | > log_mle: -0.12472 (-0.12275) | > loss_dur: 0.33923 (0.31797) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.00377 (4.36636) | > current_lr: 0.00001 | > step_time: 0.57270 (0.45128) | > loader_time: 2.34230 (2.21652)  --> STEP: 114/406 -- GLOBAL_STEP: 12700 | > loss: 0.17691 (0.19339) | > log_mle: -0.14972 (-0.12608) | > loss_dur: 0.32662 (0.31947) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 12.02988 (4.52387) | > current_lr: 0.00001 | > step_time: 0.58680 (0.47520) | > loader_time: 2.67840 (2.26995)  --> STEP: 139/406 -- GLOBAL_STEP: 12725 | > loss: 0.19898 (0.19324) | > log_mle: -0.13935 (-0.12832) | > loss_dur: 0.33833 (0.32156) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.36887 (4.62356) | > current_lr: 0.00001 | > step_time: 0.50750 (0.49324) | > loader_time: 2.86310 (2.36551)  --> STEP: 164/406 -- GLOBAL_STEP: 12750 | > loss: 0.19809 (0.19288) | > log_mle: -0.13143 (-0.13011) | > loss_dur: 0.32953 (0.32299) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.52580 (4.77355) | > current_lr: 0.00001 | > step_time: 0.53630 (0.50859) | > loader_time: 2.92090 (2.47831)  --> STEP: 189/406 -- GLOBAL_STEP: 12775 | > loss: 0.18926 (0.19298) | > log_mle: -0.14329 (-0.13188) | > loss_dur: 0.33255 (0.32486) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.93592 (4.89201) | > current_lr: 0.00001 | > step_time: 0.64060 (0.52547) | > loader_time: 3.07120 (2.58146)  --> STEP: 214/406 -- GLOBAL_STEP: 12800 | > loss: 0.17481 (0.19256) | > log_mle: -0.14668 (-0.13326) | > loss_dur: 0.32149 (0.32583) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.04469 (4.93098) | > current_lr: 0.00001 | > step_time: 0.71570 (0.54136) | > loader_time: 2.95970 (2.64904)  --> STEP: 239/406 -- GLOBAL_STEP: 12825 | > loss: 0.18783 (0.19266) | > log_mle: -0.15175 (-0.13470) | > loss_dur: 0.33958 (0.32737) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.71076 (5.08928) | > current_lr: 0.00001 | > step_time: 0.74230 (0.55798) | > loader_time: 3.77300 (2.69542)  --> STEP: 264/406 -- GLOBAL_STEP: 12850 | > loss: 0.19316 (0.19209) | > log_mle: -0.14092 (-0.13590) | > loss_dur: 0.33407 (0.32798) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 20.52954 (5.60037) | > current_lr: 0.00001 | > step_time: 0.81990 (0.57727) | > loader_time: 2.91720 (2.73508)  --> STEP: 289/406 -- GLOBAL_STEP: 12875 | > loss: 0.17969 (0.19143) | > log_mle: -0.14235 (-0.13694) | > loss_dur: 0.32203 (0.32837) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.95224 (6.18036) | > current_lr: 0.00001 | > step_time: 0.76200 (0.59683) | > loader_time: 3.15210 (2.76290)  --> STEP: 314/406 -- GLOBAL_STEP: 12900 | > loss: 0.19715 (0.19142) | > log_mle: -0.15424 (-0.13792) | > loss_dur: 0.35139 (0.32933) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.31879 (6.21103) | > current_lr: 0.00001 | > step_time: 0.71110 (0.61926) | > loader_time: 3.36470 (2.77901)  --> STEP: 339/406 -- GLOBAL_STEP: 12925 | > loss: 0.18654 (0.19112) | > log_mle: -0.15159 (-0.13877) | > loss_dur: 0.33814 (0.32989) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 1.99726 (6.25118) | > current_lr: 0.00001 | > step_time: 0.90410 (0.63671) | > loader_time: 3.48850 (2.81899)  --> STEP: 364/406 -- GLOBAL_STEP: 12950 | > loss: 0.18875 (0.19084) | > log_mle: -0.15247 (-0.13973) | > loss_dur: 0.34122 (0.33057) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.87665 (6.32742) | > current_lr: 0.00001 | > step_time: 1.26810 (0.65419) | > loader_time: 3.58740 (2.85707)  --> STEP: 389/406 -- GLOBAL_STEP: 12975 | > loss: 0.17468 (0.19052) | > log_mle: -0.16490 (-0.14060) | > loss_dur: 0.33958 (0.33112) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.26780 (6.36125) | > current_lr: 0.00001 | > step_time: 0.98850 (0.67255) | > loader_time: 3.48030 (2.88126)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.33579 (+0.08622) | > avg_loss: 0.15445 (-0.01553) | > avg_log_mle: -0.15587 (-0.00716) | > avg_loss_dur: 0.31032 (-0.00837) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_12992.pth  > EPOCH: 32/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 16:55:28)   --> STEP: 8/406 -- GLOBAL_STEP: 13000 | > loss: 0.13669 (0.17554) | > log_mle: -0.13630 (-0.12161) | > loss_dur: 0.27299 (0.29715) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.33196 (2.19341) | > current_lr: 0.00001 | > step_time: 0.36660 (0.34471) | > loader_time: 2.02150 (1.54843)  --> STEP: 33/406 -- GLOBAL_STEP: 13025 | > loss: 0.15852 (0.18029) | > log_mle: -0.12109 (-0.12371) | > loss_dur: 0.27960 (0.30399) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.34610 (2.63329) | > current_lr: 0.00001 | > step_time: 0.44380 (0.36456) | > loader_time: 2.14720 (1.79350)  --> STEP: 58/406 -- GLOBAL_STEP: 13050 | > loss: 0.19404 (0.18227) | > log_mle: -0.12847 (-0.12564) | > loss_dur: 0.32251 (0.30791) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.16764 (3.92413) | > current_lr: 0.00001 | > step_time: 0.36270 (0.40367) | > loader_time: 1.80330 (1.86608)  --> STEP: 83/406 -- GLOBAL_STEP: 13075 | > loss: 0.19554 (0.18215) | > log_mle: -0.13713 (-0.12838) | > loss_dur: 0.33267 (0.31053) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.74251 (4.96493) | > current_lr: 0.00001 | > step_time: 0.56990 (0.44110) | > loader_time: 2.19750 (1.93517)  --> STEP: 108/406 -- GLOBAL_STEP: 13100 | > loss: 0.16166 (0.18118) | > log_mle: -0.13998 (-0.13121) | > loss_dur: 0.30164 (0.31239) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.52586 (5.47559) | > current_lr: 0.00001 | > step_time: 0.48100 (0.46757) | > loader_time: 2.27380 (1.99692)  --> STEP: 133/406 -- GLOBAL_STEP: 13125 | > loss: 0.18084 (0.18046) | > log_mle: -0.14585 (-0.13386) | > loss_dur: 0.32669 (0.31432) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.36860 (5.50774) | > current_lr: 0.00001 | > step_time: 0.51810 (0.48794) | > loader_time: 2.54380 (2.03052)  --> STEP: 158/406 -- GLOBAL_STEP: 13150 | > loss: 0.15650 (0.18070) | > log_mle: -0.15374 (-0.13569) | > loss_dur: 0.31024 (0.31639) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.40974 (5.41481) | > current_lr: 0.00001 | > step_time: 0.50940 (0.50662) | > loader_time: 2.71780 (2.08883)  --> STEP: 183/406 -- GLOBAL_STEP: 13175 | > loss: 0.16200 (0.18055) | > log_mle: -0.15306 (-0.13733) | > loss_dur: 0.31506 (0.31788) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.97983 (5.36274) | > current_lr: 0.00001 | > step_time: 0.67620 (0.52355) | > loader_time: 2.69980 (2.18702)  --> STEP: 208/406 -- GLOBAL_STEP: 13200 | > loss: 0.15741 (0.18067) | > log_mle: -0.15755 (-0.13880) | > loss_dur: 0.31496 (0.31947) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.94445 (5.70781) | > current_lr: 0.00001 | > step_time: 0.61200 (0.53967) | > loader_time: 2.96060 (2.26265)  --> STEP: 233/406 -- GLOBAL_STEP: 13225 | > loss: 0.18234 (0.18011) | > log_mle: -0.15554 (-0.14036) | > loss_dur: 0.33787 (0.32047) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.29415 (6.22818) | > current_lr: 0.00001 | > step_time: 0.66840 (0.55409) | > loader_time: 3.08410 (2.32579)  --> STEP: 258/406 -- GLOBAL_STEP: 13250 | > loss: 0.17168 (0.17956) | > log_mle: -0.15204 (-0.14165) | > loss_dur: 0.32372 (0.32121) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.45658 (6.33978) | > current_lr: 0.00001 | > step_time: 0.77690 (0.57168) | > loader_time: 2.91780 (2.38969)  --> STEP: 283/406 -- GLOBAL_STEP: 13275 | > loss: 0.16943 (0.17874) | > log_mle: -0.15390 (-0.14280) | > loss_dur: 0.32332 (0.32154) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.06266 (6.39208) | > current_lr: 0.00001 | > step_time: 0.69750 (0.58710) | > loader_time: 2.74610 (2.44262)  --> STEP: 308/406 -- GLOBAL_STEP: 13300 | > loss: 0.16864 (0.17839) | > log_mle: -0.15691 (-0.14375) | > loss_dur: 0.32555 (0.32215) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.31416 (6.55689) | > current_lr: 0.00001 | > step_time: 0.76240 (0.60437) | > loader_time: 2.96400 (2.48552)  --> STEP: 333/406 -- GLOBAL_STEP: 13325 | > loss: 0.16390 (0.17816) | > log_mle: -0.16132 (-0.14456) | > loss_dur: 0.32522 (0.32272) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 17.79581 (6.65337) | > current_lr: 0.00001 | > step_time: 0.76350 (0.62224) | > loader_time: 3.64910 (2.54031)  --> STEP: 358/406 -- GLOBAL_STEP: 13350 | > loss: 0.14139 (0.17800) | > log_mle: -0.17371 (-0.14543) | > loss_dur: 0.31509 (0.32343) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.99503 (7.03956) | > current_lr: 0.00001 | > step_time: 0.83560 (0.64021) | > loader_time: 3.22550 (2.59841)  --> STEP: 383/406 -- GLOBAL_STEP: 13375 | > loss: 0.18562 (0.17774) | > log_mle: -0.15762 (-0.14631) | > loss_dur: 0.34324 (0.32405) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.54304 (7.06729) | > current_lr: 0.00001 | > step_time: 0.92400 (0.65858) | > loader_time: 3.74200 (2.65277)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.09921 (-0.23658) | > avg_loss: 0.14342 (-0.01104) | > avg_log_mle: -0.16242 (-0.00655) | > avg_loss_dur: 0.30584 (-0.00449) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_13398.pth  > EPOCH: 33/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 17:18:51)   --> STEP: 2/406 -- GLOBAL_STEP: 13400 | > loss: 0.16169 (0.15551) | > log_mle: -0.13966 (-0.13418) | > loss_dur: 0.30135 (0.28969) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.44563 (2.21555) | > current_lr: 0.00001 | > step_time: 0.36110 (0.36334) | > loader_time: 1.67240 (1.67648)  --> STEP: 27/406 -- GLOBAL_STEP: 13425 | > loss: 0.16092 (0.16693) | > log_mle: -0.12969 (-0.12955) | > loss_dur: 0.29061 (0.29648) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.50272 (3.53519) | > current_lr: 0.00001 | > step_time: 0.29620 (0.38705) | > loader_time: 2.13220 (1.94045)  --> STEP: 52/406 -- GLOBAL_STEP: 13450 | > loss: 0.18015 (0.17065) | > log_mle: -0.13214 (-0.13107) | > loss_dur: 0.31229 (0.30172) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.49070 (4.01541) | > current_lr: 0.00001 | > step_time: 0.76520 (0.42149) | > loader_time: 2.53700 (2.08032)  --> STEP: 77/406 -- GLOBAL_STEP: 13475 | > loss: 0.17626 (0.17042) | > log_mle: -0.13364 (-0.13363) | > loss_dur: 0.30990 (0.30405) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.52486 (4.37503) | > current_lr: 0.00001 | > step_time: 0.57750 (0.45824) | > loader_time: 2.21720 (2.13191)  --> STEP: 102/406 -- GLOBAL_STEP: 13500 | > loss: 0.14250 (0.17001) | > log_mle: -0.15620 (-0.13653) | > loss_dur: 0.29870 (0.30654) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 13.33667 (4.93345) | > current_lr: 0.00001 | > step_time: 0.60210 (0.48203) | > loader_time: 2.31240 (2.16000)  --> STEP: 127/406 -- GLOBAL_STEP: 13525 | > loss: 0.17377 (0.16872) | > log_mle: -0.14956 (-0.13922) | > loss_dur: 0.32332 (0.30794) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.96259 (5.44303) | > current_lr: 0.00001 | > step_time: 0.50420 (0.50068) | > loader_time: 2.28780 (2.19667)  --> STEP: 152/406 -- GLOBAL_STEP: 13550 | > loss: 0.17253 (0.16863) | > log_mle: -0.14854 (-0.14127) | > loss_dur: 0.32107 (0.30990) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.85801 (5.41579) | > current_lr: 0.00001 | > step_time: 0.53910 (0.51692) | > loader_time: 2.66870 (2.23398)  --> STEP: 177/406 -- GLOBAL_STEP: 13575 | > loss: 0.15706 (0.16827) | > log_mle: -0.16459 (-0.14290) | > loss_dur: 0.32165 (0.31117) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.01996 (5.69098) | > current_lr: 0.00001 | > step_time: 0.75390 (0.53589) | > loader_time: 2.94600 (2.32174)  --> STEP: 202/406 -- GLOBAL_STEP: 13600 | > loss: 0.20279 (0.16804) | > log_mle: -0.14700 (-0.14439) | > loss_dur: 0.34978 (0.31243) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.82672 (6.31438) | > current_lr: 0.00001 | > step_time: 0.61350 (0.55236) | > loader_time: 2.72140 (2.39036)  --> STEP: 227/406 -- GLOBAL_STEP: 13625 | > loss: 0.18066 (0.16741) | > log_mle: -0.14746 (-0.14589) | > loss_dur: 0.32812 (0.31330) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.04587 (6.51060) | > current_lr: 0.00001 | > step_time: 0.64710 (0.57158) | > loader_time: 3.15280 (2.45068)  --> STEP: 252/406 -- GLOBAL_STEP: 13650 | > loss: 0.15714 (0.16690) | > log_mle: -0.16079 (-0.14730) | > loss_dur: 0.31793 (0.31420) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 12.56037 (6.66382) | > current_lr: 0.00001 | > step_time: 0.77420 (0.58992) | > loader_time: 3.22410 (2.49066)  --> STEP: 277/406 -- GLOBAL_STEP: 13675 | > loss: 0.16076 (0.16617) | > log_mle: -0.15792 (-0.14851) | > loss_dur: 0.31868 (0.31468) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.86737 (6.98557) | > current_lr: 0.00001 | > step_time: 0.90010 (0.60710) | > loader_time: 2.64220 (2.52904)  --> STEP: 302/406 -- GLOBAL_STEP: 13700 | > loss: 0.16814 (0.16570) | > log_mle: -0.16713 (-0.14952) | > loss_dur: 0.33527 (0.31522) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.11344 (7.03789) | > current_lr: 0.00001 | > step_time: 1.15160 (0.62551) | > loader_time: 3.13720 (2.57642)  --> STEP: 327/406 -- GLOBAL_STEP: 13725 | > loss: 0.15923 (0.16539) | > log_mle: -0.16283 (-0.15037) | > loss_dur: 0.32207 (0.31575) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.17010 (7.06292) | > current_lr: 0.00001 | > step_time: 0.78610 (0.64057) | > loader_time: 4.05280 (2.64352)  --> STEP: 352/406 -- GLOBAL_STEP: 13750 | > loss: 0.16208 (0.16513) | > log_mle: -0.15863 (-0.15127) | > loss_dur: 0.32071 (0.31640) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.20759 (7.15633) | > current_lr: 0.00001 | > step_time: 0.93270 (0.65839) | > loader_time: 3.83200 (2.72378)  --> STEP: 377/406 -- GLOBAL_STEP: 13775 | > loss: 0.15815 (0.16478) | > log_mle: -0.16205 (-0.15226) | > loss_dur: 0.32020 (0.31704) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.60328 (7.26080) | > current_lr: 0.00001 | > step_time: 0.99030 (0.67495) | > loader_time: 3.68860 (2.79143)  --> STEP: 402/406 -- GLOBAL_STEP: 13800 | > loss: 0.14873 (0.16439) | > log_mle: -0.17112 (-0.15314) | > loss_dur: 0.31985 (0.31753) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.18298 (7.34873) | > current_lr: 0.00001 | > step_time: 0.87800 (0.69529) | > loader_time: 3.94870 (2.84973)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.29380 (+0.19459) | > avg_loss: 0.12892 (-0.01450) | > avg_log_mle: -0.16840 (-0.00599) | > avg_loss_dur: 0.29732 (-0.00852) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_13804.pth  > EPOCH: 34/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 17:43:28)   --> STEP: 21/406 -- GLOBAL_STEP: 13825 | > loss: 0.12717 (0.15391) | > log_mle: -0.14020 (-0.13489) | > loss_dur: 0.26737 (0.28879) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.24051 (3.06028) | > current_lr: 0.00001 | > step_time: 0.29210 (0.36585) | > loader_time: 2.46680 (1.87932)  --> STEP: 46/406 -- GLOBAL_STEP: 13850 | > loss: 0.14422 (0.15637) | > log_mle: -0.14046 (-0.13522) | > loss_dur: 0.28468 (0.29159) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.12352 (3.52957) | > current_lr: 0.00001 | > step_time: 0.45190 (0.39905) | > loader_time: 2.14770 (2.11392)  --> STEP: 71/406 -- GLOBAL_STEP: 13875 | > loss: 0.15777 (0.15683) | > log_mle: -0.14567 (-0.13863) | > loss_dur: 0.30344 (0.29546) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.82487 (3.87185) | > current_lr: 0.00001 | > step_time: 0.53900 (0.42897) | > loader_time: 2.46570 (2.22887)  --> STEP: 96/406 -- GLOBAL_STEP: 13900 | > loss: 0.14410 (0.15646) | > log_mle: -0.15256 (-0.14199) | > loss_dur: 0.29665 (0.29845) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.85645 (4.16965) | > current_lr: 0.00001 | > step_time: 0.57060 (0.45522) | > loader_time: 2.58080 (2.28142)  --> STEP: 121/406 -- GLOBAL_STEP: 13925 | > loss: 0.15738 (0.15580) | > log_mle: -0.15288 (-0.14465) | > loss_dur: 0.31026 (0.30045) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.57585 (4.49256) | > current_lr: 0.00001 | > step_time: 0.61800 (0.48038) | > loader_time: 2.58500 (2.31909)  --> STEP: 146/406 -- GLOBAL_STEP: 13950 | > loss: 0.15027 (0.15578) | > log_mle: -0.14814 (-0.14675) | > loss_dur: 0.29841 (0.30254) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.47496 (4.96594) | > current_lr: 0.00001 | > step_time: 0.65780 (0.49801) | > loader_time: 2.34270 (2.36360)  --> STEP: 171/406 -- GLOBAL_STEP: 13975 | > loss: 0.14249 (0.15583) | > log_mle: -0.15985 (-0.14842) | > loss_dur: 0.30234 (0.30425) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.45747 (5.38958) | > current_lr: 0.00001 | > step_time: 1.03940 (0.51962) | > loader_time: 3.14800 (2.47346)  --> STEP: 196/406 -- GLOBAL_STEP: 14000 | > loss: 0.15437 (0.15557) | > log_mle: -0.16447 (-0.15003) | > loss_dur: 0.31884 (0.30560) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.90697 (5.75309) | > current_lr: 0.00001 | > step_time: 0.74740 (0.53745) | > loader_time: 2.90710 (2.53994)  --> STEP: 221/406 -- GLOBAL_STEP: 14025 | > loss: 0.16028 (0.15506) | > log_mle: -0.15434 (-0.15144) | > loss_dur: 0.31462 (0.30650) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.40505 (6.13303) | > current_lr: 0.00001 | > step_time: 0.65480 (0.55637) | > loader_time: 3.25050 (2.58732)  --> STEP: 246/406 -- GLOBAL_STEP: 14050 | > loss: 0.13726 (0.15468) | > log_mle: -0.16193 (-0.15283) | > loss_dur: 0.29920 (0.30752) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.56177 (6.35898) | > current_lr: 0.00001 | > step_time: 0.82440 (0.57529) | > loader_time: 2.71280 (2.61289)  --> STEP: 271/406 -- GLOBAL_STEP: 14075 | > loss: 0.12639 (0.15407) | > log_mle: -0.17662 (-0.15399) | > loss_dur: 0.30301 (0.30806) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.62354 (6.66081) | > current_lr: 0.00001 | > step_time: 0.74390 (0.59503) | > loader_time: 2.62210 (2.63650)  --> STEP: 296/406 -- GLOBAL_STEP: 14100 | > loss: 0.16659 (0.15373) | > log_mle: -0.16463 (-0.15491) | > loss_dur: 0.33121 (0.30864) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.71441 (6.74830) | > current_lr: 0.00001 | > step_time: 0.79760 (0.61414) | > loader_time: 2.72390 (2.65603)  --> STEP: 321/406 -- GLOBAL_STEP: 14125 | > loss: 0.14048 (0.15362) | > log_mle: -0.16084 (-0.15574) | > loss_dur: 0.30132 (0.30936) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 16.19803 (7.01508) | > current_lr: 0.00001 | > step_time: 0.90950 (0.63884) | > loader_time: 3.66990 (2.68446)  --> STEP: 346/406 -- GLOBAL_STEP: 14150 | > loss: 0.15569 (0.15351) | > log_mle: -0.16347 (-0.15650) | > loss_dur: 0.31916 (0.31001) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.53304 (7.54974) | > current_lr: 0.00001 | > step_time: 1.00190 (0.65855) | > loader_time: 2.91510 (2.73274)  --> STEP: 371/406 -- GLOBAL_STEP: 14175 | > loss: 0.14090 (0.15296) | > log_mle: -0.17311 (-0.15743) | > loss_dur: 0.31401 (0.31039) | > amp_scaler: 32768.00000 (16825.61725) | > grad_norm: 15.07241 (7.71799) | > current_lr: 0.00001 | > step_time: 0.93380 (0.67658) | > loader_time: 3.31350 (2.77497)  --> STEP: 396/406 -- GLOBAL_STEP: 14200 | > loss: 0.13467 (0.15270) | > log_mle: -0.18215 (-0.15824) | > loss_dur: 0.31682 (0.31094) | > amp_scaler: 32768.00000 (17832.08081) | > grad_norm: 9.69981 (7.80128) | > current_lr: 0.00001 | > step_time: 0.94770 (0.69710) | > loader_time: 3.09220 (2.81994)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.19214 (-0.10166) | > avg_loss: 0.11754 (-0.01138) | > avg_log_mle: -0.17346 (-0.00506) | > avg_loss_dur: 0.29100 (-0.00632) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_14210.pth  > EPOCH: 35/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 18:08:00)   --> STEP: 15/406 -- GLOBAL_STEP: 14225 | > loss: 0.12265 (0.13595) | > log_mle: -0.14356 (-0.14145) | > loss_dur: 0.26621 (0.27740) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 6.26809 (2.80936) | > current_lr: 0.00001 | > step_time: 0.40590 (0.35811) | > loader_time: 1.83620 (1.74523)  --> STEP: 40/406 -- GLOBAL_STEP: 14250 | > loss: 0.14248 (0.14412) | > log_mle: -0.13346 (-0.14024) | > loss_dur: 0.27594 (0.28436) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 3.98937 (3.77495) | > current_lr: 0.00001 | > step_time: 0.47340 (0.38989) | > loader_time: 2.32480 (2.04181)  --> STEP: 65/406 -- GLOBAL_STEP: 14275 | > loss: 0.14575 (0.14496) | > log_mle: -0.15200 (-0.14283) | > loss_dur: 0.29776 (0.28779) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 13.38681 (3.93266) | > current_lr: 0.00001 | > step_time: 0.53960 (0.42450) | > loader_time: 2.59750 (2.15437)  --> STEP: 90/406 -- GLOBAL_STEP: 14300 | > loss: 0.15150 (0.14503) | > log_mle: -0.15545 (-0.14590) | > loss_dur: 0.30695 (0.29093) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.28900 (4.57765) | > current_lr: 0.00001 | > step_time: 0.99720 (0.45935) | > loader_time: 2.82000 (2.20186)  --> STEP: 115/406 -- GLOBAL_STEP: 14325 | > loss: 0.15731 (0.14387) | > log_mle: -0.14788 (-0.14902) | > loss_dur: 0.30519 (0.29288) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 6.74098 (5.22979) | > current_lr: 0.00001 | > step_time: 0.59480 (0.48168) | > loader_time: 2.48760 (2.22738)  --> STEP: 140/406 -- GLOBAL_STEP: 14350 | > loss: 0.13695 (0.14428) | > log_mle: -0.16437 (-0.15134) | > loss_dur: 0.30131 (0.29562) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 18.38086 (5.74387) | > current_lr: 0.00001 | > step_time: 0.65990 (0.50057) | > loader_time: 2.25640 (2.25424)  --> STEP: 165/406 -- GLOBAL_STEP: 14375 | > loss: 0.13732 (0.14420) | > log_mle: -0.17291 (-0.15302) | > loss_dur: 0.31022 (0.29722) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 17.74237 (5.93860) | > current_lr: 0.00001 | > step_time: 0.51570 (0.51820) | > loader_time: 2.90730 (2.32249)  --> STEP: 190/406 -- GLOBAL_STEP: 14400 | > loss: 0.13703 (0.14410) | > log_mle: -0.17047 (-0.15474) | > loss_dur: 0.30750 (0.29884) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 11.83859 (6.40987) | > current_lr: 0.00001 | > step_time: 0.73140 (0.53403) | > loader_time: 2.68630 (2.42397)  --> STEP: 215/406 -- GLOBAL_STEP: 14425 | > loss: 0.13395 (0.14381) | > log_mle: -0.17088 (-0.15613) | > loss_dur: 0.30483 (0.29994) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 6.68230 (6.67241) | > current_lr: 0.00001 | > step_time: 0.85690 (0.55074) | > loader_time: 3.01690 (2.50097)  --> STEP: 240/406 -- GLOBAL_STEP: 14450 | > loss: 0.11912 (0.14360) | > log_mle: -0.17802 (-0.15761) | > loss_dur: 0.29714 (0.30121) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 10.02797 (6.97138) | > current_lr: 0.00001 | > step_time: 0.67520 (0.56577) | > loader_time: 3.45940 (2.56741)  --> STEP: 265/406 -- GLOBAL_STEP: 14475 | > loss: 0.14162 (0.14304) | > log_mle: -0.17358 (-0.15884) | > loss_dur: 0.31520 (0.30187) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 10.98019 (7.18588) | > current_lr: 0.00001 | > step_time: 0.67540 (0.58317) | > loader_time: 3.30310 (2.61260)  --> STEP: 290/406 -- GLOBAL_STEP: 14500 | > loss: 0.13207 (0.14248) | > log_mle: -0.18224 (-0.15997) | > loss_dur: 0.31431 (0.30245) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 8.04802 (7.34394) | > current_lr: 0.00001 | > step_time: 0.83550 (0.59963) | > loader_time: 2.83120 (2.64148)  --> STEP: 315/406 -- GLOBAL_STEP: 14525 | > loss: 0.12122 (0.14225) | > log_mle: -0.18349 (-0.16094) | > loss_dur: 0.30471 (0.30319) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.74674 (7.47730) | > current_lr: 0.00001 | > step_time: 0.79310 (0.61959) | > loader_time: 3.32070 (2.67425)  --> STEP: 340/406 -- GLOBAL_STEP: 14550 | > loss: 0.15332 (0.14218) | > log_mle: -0.16101 (-0.16168) | > loss_dur: 0.31433 (0.30386) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 4.79514 (7.44385) | > current_lr: 0.00001 | > step_time: 0.97970 (0.64097) | > loader_time: 3.05620 (2.71911)  --> STEP: 365/406 -- GLOBAL_STEP: 14575 | > loss: 0.13448 (0.14177) | > log_mle: -0.17529 (-0.16265) | > loss_dur: 0.30978 (0.30442) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 9.08377 (7.58155) | > current_lr: 0.00001 | > step_time: 0.87850 (0.66034) | > loader_time: 3.32860 (2.75724)  --> STEP: 390/406 -- GLOBAL_STEP: 14600 | > loss: 0.13414 (0.14147) | > log_mle: -0.18415 (-0.16351) | > loss_dur: 0.31829 (0.30498) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 10.24231 (7.77287) | > current_lr: 0.00001 | > step_time: 1.00900 (0.68333) | > loader_time: 3.55670 (2.79647)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.25816 (+0.06601) | > avg_loss: 0.10521 (-0.01233) | > avg_log_mle: -0.17872 (-0.00526) | > avg_loss_dur: 0.28393 (-0.00707) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_14616.pth  > EPOCH: 36/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 18:32:23)   --> STEP: 9/406 -- GLOBAL_STEP: 14625 | > loss: 0.16649 (0.13133) | > log_mle: -0.14451 (-0.14423) | > loss_dur: 0.31100 (0.27556) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 2.43564 (2.68859) | > current_lr: 0.00001 | > step_time: 0.39200 (0.36422) | > loader_time: 1.92070 (1.75801)  --> STEP: 34/406 -- GLOBAL_STEP: 14650 | > loss: 0.16049 (0.13352) | > log_mle: -0.13527 (-0.14545) | > loss_dur: 0.29576 (0.27898) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 5.73710 (3.94991) | > current_lr: 0.00001 | > step_time: 0.45360 (0.38615) | > loader_time: 1.84600 (1.98232)  --> STEP: 59/406 -- GLOBAL_STEP: 14675 | > loss: 0.14232 (0.13438) | > log_mle: -0.16165 (-0.14788) | > loss_dur: 0.30397 (0.28225) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 7.61499 (3.93101) | > current_lr: 0.00001 | > step_time: 0.53620 (0.42132) | > loader_time: 2.43530 (2.05056)  --> STEP: 84/406 -- GLOBAL_STEP: 14700 | > loss: 0.14084 (0.13436) | > log_mle: -0.16429 (-0.15061) | > loss_dur: 0.30513 (0.28497) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 6.17457 (4.80597) | > current_lr: 0.00001 | > step_time: 0.92830 (0.45753) | > loader_time: 2.48550 (2.12086)  --> STEP: 109/406 -- GLOBAL_STEP: 14725 | > loss: 0.13461 (0.13334) | > log_mle: -0.17156 (-0.15346) | > loss_dur: 0.30617 (0.28680) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 8.59391 (5.39283) | > current_lr: 0.00001 | > step_time: 0.60560 (0.48183) | > loader_time: 2.36830 (2.15147)  --> STEP: 134/406 -- GLOBAL_STEP: 14750 | > loss: 0.13511 (0.13257) | > log_mle: -0.16346 (-0.15604) | > loss_dur: 0.29857 (0.28861) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 7.07278 (5.96386) | > current_lr: 0.00001 | > step_time: 0.67750 (0.50240) | > loader_time: 2.50840 (2.17662)  --> STEP: 159/406 -- GLOBAL_STEP: 14775 | > loss: 0.13380 (0.13332) | > log_mle: -0.16575 (-0.15778) | > loss_dur: 0.29955 (0.29110) | > amp_scaler: 32768.00000 (32768.00000) | > grad_norm: 13.41621 (6.47404) | > current_lr: 0.00001 | > step_time: 0.52820 (0.52261) | > loader_time: 3.30750 (2.24350)  --> STEP: 184/406 -- GLOBAL_STEP: 14800 | > loss: 0.14589 (0.13370) | > log_mle: -0.18302 (-0.15931) | > loss_dur: 0.32891 (0.29301) | > amp_scaler: 16384.00000 (30720.00000) | > grad_norm: 16.99070 (7.42676) | > current_lr: 0.00001 | > step_time: 0.69880 (0.54204) | > loader_time: 2.90750 (2.32989)  --> STEP: 209/406 -- GLOBAL_STEP: 14825 | > loss: 0.11795 (0.13339) | > log_mle: -0.17955 (-0.16065) | > loss_dur: 0.29750 (0.29404) | > amp_scaler: 16384.00000 (29005.16746) | > grad_norm: 5.06593 (7.53055) | > current_lr: 0.00001 | > step_time: 0.58160 (0.55841) | > loader_time: 3.25720 (2.40968)  --> STEP: 234/406 -- GLOBAL_STEP: 14850 | > loss: 0.11928 (0.13314) | > log_mle: -0.18296 (-0.16218) | > loss_dur: 0.30224 (0.29532) | > amp_scaler: 16384.00000 (27656.75214) | > grad_norm: 11.44762 (7.80780) | > current_lr: 0.00001 | > step_time: 0.73180 (0.57576) | > loader_time: 3.11540 (2.47430)  --> STEP: 259/406 -- GLOBAL_STEP: 14875 | > loss: 0.13620 (0.13269) | > log_mle: -0.18627 (-0.16347) | > loss_dur: 0.32247 (0.29616) | > amp_scaler: 16384.00000 (26568.64865) | > grad_norm: 10.60219 (7.87879) | > current_lr: 0.00001 | > step_time: 0.79890 (0.59127) | > loader_time: 3.03590 (2.53834)  --> STEP: 284/406 -- GLOBAL_STEP: 14900 | > loss: 0.14784 (0.13221) | > log_mle: -0.18183 (-0.16457) | > loss_dur: 0.32968 (0.29678) | > amp_scaler: 16384.00000 (25672.11268) | > grad_norm: 7.67395 (7.83206) | > current_lr: 0.00001 | > step_time: 0.87530 (0.60871) | > loader_time: 3.35710 (2.59128)  --> STEP: 309/406 -- GLOBAL_STEP: 14925 | > loss: 0.12172 (0.13200) | > log_mle: -0.17826 (-0.16548) | > loss_dur: 0.29997 (0.29748) | > amp_scaler: 16384.00000 (24920.64725) | > grad_norm: 11.20879 (7.85180) | > current_lr: 0.00001 | > step_time: 0.90920 (0.62632) | > loader_time: 3.19560 (2.63049)  --> STEP: 334/406 -- GLOBAL_STEP: 14950 | > loss: 0.11909 (0.13184) | > log_mle: -0.18602 (-0.16632) | > loss_dur: 0.30510 (0.29816) | > amp_scaler: 16384.00000 (24281.67665) | > grad_norm: 15.70514 (8.14293) | > current_lr: 0.00001 | > step_time: 1.58510 (0.64555) | > loader_time: 3.72820 (2.69193)  --> STEP: 359/406 -- GLOBAL_STEP: 14975 | > loss: 0.11910 (0.13149) | > log_mle: -0.17578 (-0.16725) | > loss_dur: 0.29488 (0.29874) | > amp_scaler: 16384.00000 (23731.69916) | > grad_norm: 9.81887 (8.30485) | > current_lr: 0.00001 | > step_time: 0.87080 (0.66354) | > loader_time: 3.69300 (2.75291)  --> STEP: 384/406 -- GLOBAL_STEP: 15000 | > loss: 0.12119 (0.13117) | > log_mle: -0.17607 (-0.16813) | > loss_dur: 0.29726 (0.29930) | > amp_scaler: 16384.00000 (23253.33333) | > grad_norm: 8.90840 (8.49163) | > current_lr: 0.00001 | > step_time: 0.97770 (0.68203) | > loader_time: 3.90920 (2.80259)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.37821 (+0.12005) | > avg_loss: 0.09218 (-0.01303) | > avg_log_mle: -0.18295 (-0.00423) | > avg_loss_dur: 0.27513 (-0.00880) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_15022.pth  > EPOCH: 37/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 18:57:02)   --> STEP: 3/406 -- GLOBAL_STEP: 15025 | > loss: 0.15197 (0.12544) | > log_mle: -0.15510 (-0.15613) | > loss_dur: 0.30706 (0.28157) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.61063 (3.96927) | > current_lr: 0.00001 | > step_time: 0.37550 (0.37620) | > loader_time: 2.27450 (2.19049)  --> STEP: 28/406 -- GLOBAL_STEP: 15050 | > loss: 0.15990 (0.12323) | > log_mle: -0.14996 (-0.15081) | > loss_dur: 0.30986 (0.27403) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.84760 (3.90552) | > current_lr: 0.00001 | > step_time: 0.44130 (0.38089) | > loader_time: 2.50360 (2.31033)  --> STEP: 53/406 -- GLOBAL_STEP: 15075 | > loss: 0.13820 (0.12534) | > log_mle: -0.15004 (-0.15216) | > loss_dur: 0.28824 (0.27750) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.87013 (5.12030) | > current_lr: 0.00001 | > step_time: 0.53280 (0.41772) | > loader_time: 2.33320 (2.35288)  --> STEP: 78/406 -- GLOBAL_STEP: 15100 | > loss: 0.10577 (0.12440) | > log_mle: -0.15965 (-0.15487) | > loss_dur: 0.26542 (0.27927) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.97347 (5.39695) | > current_lr: 0.00001 | > step_time: 0.42940 (0.44610) | > loader_time: 2.12140 (2.33319)  --> STEP: 103/406 -- GLOBAL_STEP: 15125 | > loss: 0.09619 (0.12421) | > log_mle: -0.18638 (-0.15799) | > loss_dur: 0.28257 (0.28219) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 24.76799 (6.08791) | > current_lr: 0.00001 | > step_time: 0.46100 (0.47058) | > loader_time: 2.24990 (2.31683)  --> STEP: 128/406 -- GLOBAL_STEP: 15150 | > loss: 0.13764 (0.12347) | > log_mle: -0.17667 (-0.16046) | > loss_dur: 0.31432 (0.28393) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.99037 (6.79325) | > current_lr: 0.00001 | > step_time: 0.63440 (0.49736) | > loader_time: 1.82440 (2.30666)  --> STEP: 153/406 -- GLOBAL_STEP: 15175 | > loss: 0.13703 (0.12390) | > log_mle: -0.16727 (-0.16243) | > loss_dur: 0.30430 (0.28633) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.46086 (7.14615) | > current_lr: 0.00001 | > step_time: 0.52990 (0.51656) | > loader_time: 3.19790 (2.32082)  --> STEP: 178/406 -- GLOBAL_STEP: 15200 | > loss: 0.13427 (0.12363) | > log_mle: -0.16159 (-0.16399) | > loss_dur: 0.29586 (0.28761) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.47292 (7.27390) | > current_lr: 0.00001 | > step_time: 0.55220 (0.53392) | > loader_time: 2.83190 (2.39306)  --> STEP: 203/406 -- GLOBAL_STEP: 15225 | > loss: 0.14076 (0.12364) | > log_mle: -0.16551 (-0.16548) | > loss_dur: 0.30627 (0.28912) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.00383 (7.62051) | > current_lr: 0.00001 | > step_time: 0.64830 (0.55639) | > loader_time: 2.80190 (2.44052)  --> STEP: 228/406 -- GLOBAL_STEP: 15250 | > loss: 0.11629 (0.12331) | > log_mle: -0.18520 (-0.16698) | > loss_dur: 0.30149 (0.29029) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.31268 (8.09164) | > current_lr: 0.00001 | > step_time: 0.75640 (0.57254) | > loader_time: 2.49710 (2.47699)  --> STEP: 253/406 -- GLOBAL_STEP: 15275 | > loss: 0.13417 (0.12334) | > log_mle: -0.18877 (-0.16831) | > loss_dur: 0.32294 (0.29166) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 20.71135 (8.40731) | > current_lr: 0.00001 | > step_time: 0.67940 (0.59015) | > loader_time: 2.90130 (2.51037)  --> STEP: 278/406 -- GLOBAL_STEP: 15300 | > loss: 0.11284 (0.12260) | > log_mle: -0.16676 (-0.16933) | > loss_dur: 0.27960 (0.29192) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.49358 (8.62222) | > current_lr: 0.00001 | > step_time: 0.73280 (0.60694) | > loader_time: 2.92480 (2.53675)  --> STEP: 303/406 -- GLOBAL_STEP: 15325 | > loss: 0.14625 (0.12229) | > log_mle: -0.17829 (-0.17034) | > loss_dur: 0.32454 (0.29262) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 11.56898 (8.80498) | > current_lr: 0.00001 | > step_time: 0.71350 (0.62399) | > loader_time: 2.91400 (2.57200)  --> STEP: 328/406 -- GLOBAL_STEP: 15350 | > loss: 0.14260 (0.12219) | > log_mle: -0.18416 (-0.17113) | > loss_dur: 0.32676 (0.29333) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.97766 (8.91714) | > current_lr: 0.00001 | > step_time: 0.89270 (0.64078) | > loader_time: 3.95100 (2.64332)  --> STEP: 353/406 -- GLOBAL_STEP: 15375 | > loss: 0.10931 (0.12200) | > log_mle: -0.19313 (-0.17200) | > loss_dur: 0.30244 (0.29401) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 15.21723 (9.04221) | > current_lr: 0.00001 | > step_time: 0.95870 (0.65822) | > loader_time: 3.51450 (2.70893)  --> STEP: 378/406 -- GLOBAL_STEP: 15400 | > loss: 0.11962 (0.12154) | > log_mle: -0.18013 (-0.17291) | > loss_dur: 0.29975 (0.29446) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 12.15584 (9.19546) | > current_lr: 0.00001 | > step_time: 0.91080 (0.67923) | > loader_time: 3.52370 (2.75553)  --> STEP: 403/406 -- GLOBAL_STEP: 15425 | > loss: 0.09559 (0.12114) | > log_mle: -0.18133 (-0.17377) | > loss_dur: 0.27692 (0.29491) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.23599 (9.33006) | > current_lr: 0.00001 | > step_time: 1.18870 (0.70184) | > loader_time: 3.69220 (2.80048)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.32717 (-0.05104) | > avg_loss: 0.07936 (-0.01282) | > avg_log_mle: -0.18939 (-0.00644) | > avg_loss_dur: 0.26875 (-0.00638) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_15428.pth  > EPOCH: 38/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 19:21:23)   --> STEP: 22/406 -- GLOBAL_STEP: 15450 | > loss: 0.10927 (0.11232) | > log_mle: -0.15090 (-0.15499) | > loss_dur: 0.26017 (0.26731) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.29305 (4.88956) | > current_lr: 0.00001 | > step_time: 0.43060 (0.38723) | > loader_time: 3.09490 (2.05922)  --> STEP: 47/406 -- GLOBAL_STEP: 15475 | > loss: 0.12521 (0.11666) | > log_mle: -0.16709 (-0.15546) | > loss_dur: 0.29230 (0.27212) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.56020 (4.52774) | > current_lr: 0.00001 | > step_time: 0.33800 (0.41200) | > loader_time: 2.54580 (2.27425)  --> STEP: 72/406 -- GLOBAL_STEP: 15500 | > loss: 0.12259 (0.11686) | > log_mle: -0.15524 (-0.15840) | > loss_dur: 0.27783 (0.27526) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.20997 (5.32188) | > current_lr: 0.00001 | > step_time: 0.57500 (0.44853) | > loader_time: 2.33560 (2.34219)  --> STEP: 97/406 -- GLOBAL_STEP: 15525 | > loss: 0.12234 (0.11620) | > log_mle: -0.16575 (-0.16172) | > loss_dur: 0.28809 (0.27792) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.51257 (6.18131) | > current_lr: 0.00001 | > step_time: 1.02200 (0.48117) | > loader_time: 2.52020 (2.37099)  --> STEP: 122/406 -- GLOBAL_STEP: 15550 | > loss: 0.09708 (0.11540) | > log_mle: -0.16338 (-0.16415) | > loss_dur: 0.26046 (0.27955) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.05086 (6.75713) | > current_lr: 0.00001 | > step_time: 0.50350 (0.50353) | > loader_time: 2.22090 (2.38090)  --> STEP: 147/406 -- GLOBAL_STEP: 15575 | > loss: 0.12527 (0.11553) | > log_mle: -0.17606 (-0.16629) | > loss_dur: 0.30133 (0.28182) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.47762 (7.21610) | > current_lr: 0.00001 | > step_time: 0.55940 (0.52298) | > loader_time: 2.26910 (2.38510)  --> STEP: 172/406 -- GLOBAL_STEP: 15600 | > loss: 0.10754 (0.11537) | > log_mle: -0.18012 (-0.16796) | > loss_dur: 0.28766 (0.28332) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.50214 (7.57399) | > current_lr: 0.00001 | > step_time: 0.54830 (0.54330) | > loader_time: 2.82760 (2.47063)  --> STEP: 197/406 -- GLOBAL_STEP: 15625 | > loss: 0.09436 (0.11492) | > log_mle: -0.17875 (-0.16956) | > loss_dur: 0.27311 (0.28448) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.62914 (7.90230) | > current_lr: 0.00001 | > step_time: 0.59150 (0.55927) | > loader_time: 3.25130 (2.56771)  --> STEP: 222/406 -- GLOBAL_STEP: 15650 | > loss: 0.10549 (0.11451) | > log_mle: -0.18595 (-0.17104) | > loss_dur: 0.29144 (0.28556) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.94097 (8.20647) | > current_lr: 0.00001 | > step_time: 0.77190 (0.57340) | > loader_time: 3.54540 (2.65642)  --> STEP: 247/406 -- GLOBAL_STEP: 15675 | > loss: 0.11813 (0.11420) | > log_mle: -0.17057 (-0.17238) | > loss_dur: 0.28870 (0.28658) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.40159 (8.43353) | > current_lr: 0.00001 | > step_time: 1.48270 (0.59173) | > loader_time: 3.41120 (2.70599)  --> STEP: 272/406 -- GLOBAL_STEP: 15700 | > loss: 0.09501 (0.11342) | > log_mle: -0.19000 (-0.17361) | > loss_dur: 0.28500 (0.28703) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.84535 (8.62859) | > current_lr: 0.00001 | > step_time: 0.88380 (0.60847) | > loader_time: 2.96760 (2.73327)  --> STEP: 297/406 -- GLOBAL_STEP: 15725 | > loss: 0.10477 (0.11300) | > log_mle: -0.18295 (-0.17452) | > loss_dur: 0.28772 (0.28752) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.53754 (8.77253) | > current_lr: 0.00001 | > step_time: 0.75840 (0.62441) | > loader_time: 2.99090 (2.76734)  --> STEP: 322/406 -- GLOBAL_STEP: 15750 | > loss: 0.11243 (0.11282) | > log_mle: -0.19002 (-0.17541) | > loss_dur: 0.30245 (0.28823) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 11.41290 (8.88195) | > current_lr: 0.00001 | > step_time: 1.47120 (0.64267) | > loader_time: 4.39220 (2.82056)  --> STEP: 347/406 -- GLOBAL_STEP: 15775 | > loss: 0.09181 (0.11257) | > log_mle: -0.19676 (-0.17628) | > loss_dur: 0.28857 (0.28885) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 15.15130 (9.07154) | > current_lr: 0.00001 | > step_time: 0.85660 (0.66199) | > loader_time: 3.57660 (2.87634)  --> STEP: 372/406 -- GLOBAL_STEP: 15800 | > loss: 0.12307 (0.11228) | > log_mle: -0.17959 (-0.17713) | > loss_dur: 0.30266 (0.28941) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 4.50627 (9.18688) | > current_lr: 0.00001 | > step_time: 1.00200 (0.68314) | > loader_time: 3.55520 (2.91659)  --> STEP: 397/406 -- GLOBAL_STEP: 15825 | > loss: 0.09786 (0.11188) | > log_mle: -0.19069 (-0.17794) | > loss_dur: 0.28855 (0.28982) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 11.37954 (9.44082) | > current_lr: 0.00001 | > step_time: 1.08760 (0.71191) | > loader_time: 3.66470 (2.94222)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.44630 (+0.11913) | > avg_loss: 0.07102 (-0.00834) | > avg_log_mle: -0.19164 (-0.00225) | > avg_loss_dur: 0.26266 (-0.00609) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_15834.pth  > EPOCH: 39/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 19:46:58)   --> STEP: 16/406 -- GLOBAL_STEP: 15850 | > loss: 0.10503 (0.09667) | > log_mle: -0.16476 (-0.16174) | > loss_dur: 0.26978 (0.25840) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.08885 (4.49030) | > current_lr: 0.00001 | > step_time: 0.25090 (0.38562) | > loader_time: 1.59710 (1.86109)  --> STEP: 41/406 -- GLOBAL_STEP: 15875 | > loss: 0.10712 (0.10572) | > log_mle: -0.15210 (-0.15956) | > loss_dur: 0.25921 (0.26528) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.39793 (5.75471) | > current_lr: 0.00001 | > step_time: 0.36220 (0.40293) | > loader_time: 2.46650 (2.12042)  --> STEP: 66/406 -- GLOBAL_STEP: 15900 | > loss: 0.13577 (0.10750) | > log_mle: -0.16784 (-0.16222) | > loss_dur: 0.30361 (0.26972) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.67607 (6.35388) | > current_lr: 0.00001 | > step_time: 0.39350 (0.43282) | > loader_time: 2.68490 (2.21250)  --> STEP: 91/406 -- GLOBAL_STEP: 15925 | > loss: 0.10957 (0.10707) | > log_mle: -0.18952 (-0.16552) | > loss_dur: 0.29909 (0.27259) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 15.04740 (6.89232) | > current_lr: 0.00001 | > step_time: 0.42850 (0.47177) | > loader_time: 2.33420 (2.23743)  --> STEP: 116/406 -- GLOBAL_STEP: 15950 | > loss: 0.12826 (0.10601) | > log_mle: -0.16750 (-0.16842) | > loss_dur: 0.29576 (0.27442) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.99428 (7.65427) | > current_lr: 0.00001 | > step_time: 0.49590 (0.49661) | > loader_time: 2.79400 (2.24574)  --> STEP: 141/406 -- GLOBAL_STEP: 15975 | > loss: 0.10047 (0.10578) | > log_mle: -0.18467 (-0.17077) | > loss_dur: 0.28514 (0.27655) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 16.05843 (8.00487) | > current_lr: 0.00001 | > step_time: 0.65910 (0.51471) | > loader_time: 2.53980 (2.25984)  --> STEP: 166/406 -- GLOBAL_STEP: 16000 | > loss: 0.10159 (0.10561) | > log_mle: -0.19322 (-0.17249) | > loss_dur: 0.29481 (0.27810) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.09475 (8.25569) | > current_lr: 0.00001 | > step_time: 0.67400 (0.52971) | > loader_time: 3.21300 (2.31534)  --> STEP: 191/406 -- GLOBAL_STEP: 16025 | > loss: 0.08485 (0.10515) | > log_mle: -0.18837 (-0.17415) | > loss_dur: 0.27323 (0.27930) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 11.02240 (8.58935) | > current_lr: 0.00001 | > step_time: 0.74950 (0.54844) | > loader_time: 2.50530 (2.37541)  --> STEP: 216/406 -- GLOBAL_STEP: 16050 | > loss: 0.09161 (0.10458) | > log_mle: -0.19340 (-0.17550) | > loss_dur: 0.28501 (0.28008) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 14.37985 (8.92875) | > current_lr: 0.00001 | > step_time: 0.78280 (0.56772) | > loader_time: 2.99930 (2.42387)  --> STEP: 241/406 -- GLOBAL_STEP: 16075 | > loss: 0.11023 (0.10446) | > log_mle: -0.18041 (-0.17688) | > loss_dur: 0.29063 (0.28134) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 11.57902 (9.15183) | > current_lr: 0.00001 | > step_time: 0.78150 (0.58829) | > loader_time: 2.69570 (2.47618)  --> STEP: 266/406 -- GLOBAL_STEP: 16100 | > loss: 0.11085 (0.10403) | > log_mle: -0.18469 (-0.17805) | > loss_dur: 0.29554 (0.28207) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.72145 (9.28998) | > current_lr: 0.00001 | > step_time: 1.15860 (0.66741) | > loader_time: 2.92390 (2.56491)  --> STEP: 291/406 -- GLOBAL_STEP: 16125 | > loss: 0.11540 (0.10326) | > log_mle: -0.17754 (-0.17908) | > loss_dur: 0.29293 (0.28234) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.16918 (9.46266) | > current_lr: 0.00001 | > step_time: 1.06540 (0.69442) | > loader_time: 2.86100 (2.59221)  --> STEP: 316/406 -- GLOBAL_STEP: 16150 | > loss: 0.10089 (0.10311) | > log_mle: -0.19050 (-0.18005) | > loss_dur: 0.29139 (0.28316) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.97837 (9.60224) | > current_lr: 0.00001 | > step_time: 1.28320 (0.72292) | > loader_time: 3.45010 (2.62391)  --> STEP: 341/406 -- GLOBAL_STEP: 16175 | > loss: 0.08478 (0.10303) | > log_mle: -0.20796 (-0.18082) | > loss_dur: 0.29274 (0.28385) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 12.77769 (9.69013) | > current_lr: 0.00001 | > step_time: 1.03250 (0.74768) | > loader_time: 3.20780 (2.67925)  --> STEP: 366/406 -- GLOBAL_STEP: 16200 | > loss: 0.09378 (0.10260) | > log_mle: -0.19970 (-0.18175) | > loss_dur: 0.29348 (0.28435) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 16.19172 (9.85402) | > current_lr: 0.00001 | > step_time: 0.81550 (0.76589) | > loader_time: 3.59540 (2.72768)  --> STEP: 391/406 -- GLOBAL_STEP: 16225 | > loss: 0.08415 (0.10228) | > log_mle: -0.20295 (-0.18257) | > loss_dur: 0.28710 (0.28486) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 19.04012 (9.98774) | > current_lr: 0.00001 | > step_time: 0.85280 (0.78284) | > loader_time: 2.92280 (2.76881)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.13499 (-0.31132) | > avg_loss: 0.06085 (-0.01017) | > avg_log_mle: -0.19705 (-0.00541) | > avg_loss_dur: 0.25790 (-0.00475) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_16240.pth  > EPOCH: 40/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 20:12:10)   --> STEP: 10/406 -- GLOBAL_STEP: 16250 | > loss: 0.09178 (0.08895) | > log_mle: -0.16327 (-0.16353) | > loss_dur: 0.25505 (0.25247) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.53258 (2.70416) | > current_lr: 0.00001 | > step_time: 0.71430 (0.61828) | > loader_time: 1.55040 (1.53884)  --> STEP: 35/406 -- GLOBAL_STEP: 16275 | > loss: 0.08183 (0.09314) | > log_mle: -0.18129 (-0.16460) | > loss_dur: 0.26312 (0.25774) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 6.47728 (5.84182) | > current_lr: 0.00001 | > step_time: 0.73160 (0.65208) | > loader_time: 2.01460 (1.64849)  --> STEP: 60/406 -- GLOBAL_STEP: 16300 | > loss: 0.08587 (0.09622) | > log_mle: -0.16950 (-0.16608) | > loss_dur: 0.25538 (0.26230) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.14916 (7.61407) | > current_lr: 0.00001 | > step_time: 1.04010 (0.72273) | > loader_time: 2.16760 (1.74275)  --> STEP: 85/406 -- GLOBAL_STEP: 16325 | > loss: 0.08031 (0.09680) | > log_mle: -0.19077 (-0.16888) | > loss_dur: 0.27108 (0.26568) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 5.95813 (7.56177) | > current_lr: 0.00001 | > step_time: 0.62350 (0.74553) | > loader_time: 1.65040 (1.78914)  --> STEP: 110/406 -- GLOBAL_STEP: 16350 | > loss: 0.10078 (0.09584) | > log_mle: -0.19607 (-0.17177) | > loss_dur: 0.29685 (0.26762) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 14.15659 (8.05553) | > current_lr: 0.00001 | > step_time: 0.79740 (0.76031) | > loader_time: 2.13230 (1.83350)  --> STEP: 135/406 -- GLOBAL_STEP: 16375 | > loss: 0.10650 (0.09535) | > log_mle: -0.18865 (-0.17430) | > loss_dur: 0.29515 (0.26965) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.08076 (8.32791) | > current_lr: 0.00001 | > step_time: 0.75340 (0.75011) | > loader_time: 2.43970 (1.88851)  --> STEP: 160/406 -- GLOBAL_STEP: 16400 | > loss: 0.09813 (0.09567) | > log_mle: -0.18793 (-0.17605) | > loss_dur: 0.28605 (0.27173) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.23996 (8.60956) | > current_lr: 0.00001 | > step_time: 0.83590 (0.75196) | > loader_time: 2.85880 (1.99355)  --> STEP: 185/406 -- GLOBAL_STEP: 16425 | > loss: 0.11812 (0.09578) | > log_mle: -0.18866 (-0.17768) | > loss_dur: 0.30678 (0.27346) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.45463 (8.63014) | > current_lr: 0.00001 | > step_time: 0.60290 (0.74782) | > loader_time: 3.22560 (2.11532)  --> STEP: 210/406 -- GLOBAL_STEP: 16450 | > loss: 0.08163 (0.09541) | > log_mle: -0.18669 (-0.17902) | > loss_dur: 0.26832 (0.27443) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 12.79550 (8.87239) | > current_lr: 0.00001 | > step_time: 0.60810 (0.75266) | > loader_time: 2.88840 (2.20153)  --> STEP: 235/406 -- GLOBAL_STEP: 16475 | > loss: 0.08805 (0.09521) | > log_mle: -0.19096 (-0.18054) | > loss_dur: 0.27901 (0.27575) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 12.27228 (9.37461) | > current_lr: 0.00001 | > step_time: 0.84780 (0.76565) | > loader_time: 2.85490 (2.27941)  --> STEP: 260/406 -- GLOBAL_STEP: 16500 | > loss: 0.08022 (0.09471) | > log_mle: -0.19887 (-0.18183) | > loss_dur: 0.27909 (0.27654) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 13.37421 (9.70845) | > current_lr: 0.00001 | > step_time: 0.67360 (0.76490) | > loader_time: 2.91000 (2.34208)  --> STEP: 285/406 -- GLOBAL_STEP: 16525 | > loss: 0.09566 (0.09407) | > log_mle: -0.18904 (-0.18290) | > loss_dur: 0.28471 (0.27697) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 11.36318 (9.92837) | > current_lr: 0.00001 | > step_time: 0.70390 (0.76731) | > loader_time: 3.56050 (2.41242)  --> STEP: 310/406 -- GLOBAL_STEP: 16550 | > loss: 0.08919 (0.09393) | > log_mle: -0.18590 (-0.18377) | > loss_dur: 0.27510 (0.27770) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 10.47760 (10.11414) | > current_lr: 0.00001 | > step_time: 0.74930 (0.77009) | > loader_time: 3.14000 (2.46686)  --> STEP: 335/406 -- GLOBAL_STEP: 16575 | > loss: 0.11346 (0.09390) | > log_mle: -0.19725 (-0.18453) | > loss_dur: 0.31071 (0.27844) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 14.00020 (10.20950) | > current_lr: 0.00001 | > step_time: 0.89710 (0.77668) | > loader_time: 3.41880 (2.54396)  --> STEP: 360/406 -- GLOBAL_STEP: 16600 | > loss: 0.08793 (0.09356) | > log_mle: -0.20019 (-0.18542) | > loss_dur: 0.28811 (0.27898) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.18499 (10.42254) | > current_lr: 0.00001 | > step_time: 0.83020 (0.78781) | > loader_time: 3.69610 (2.62321)  --> STEP: 385/406 -- GLOBAL_STEP: 16625 | > loss: 0.09869 (0.09319) | > log_mle: -0.19444 (-0.18623) | > loss_dur: 0.29313 (0.27942) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 15.89072 (10.82015) | > current_lr: 0.00001 | > step_time: 1.02070 (0.79918) | > loader_time: 3.19100 (2.67896)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.16428 (+0.02929) | > avg_loss: 0.04992 (-0.01093) | > avg_log_mle: -0.20285 (-0.00580) | > avg_loss_dur: 0.25278 (-0.00513) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_16646.pth  > EPOCH: 41/1000 --> /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000  > TRAINING (2023-06-26 20:36:32)   --> STEP: 4/406 -- GLOBAL_STEP: 16650 | > loss: 0.13945 (0.09262) | > log_mle: -0.15013 (-0.16871) | > loss_dur: 0.28958 (0.26133) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 2.40418 (4.10325) | > current_lr: 0.00001 | > step_time: 0.39830 (0.40094) | > loader_time: 1.28740 (1.39615)  --> STEP: 29/406 -- GLOBAL_STEP: 16675 | > loss: 0.10748 (0.08463) | > log_mle: -0.16943 (-0.16870) | > loss_dur: 0.27691 (0.25333) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 3.37976 (4.95611) | > current_lr: 0.00001 | > step_time: 0.47330 (0.39918) | > loader_time: 1.76320 (1.73037)  --> STEP: 54/406 -- GLOBAL_STEP: 16700 | > loss: 0.09280 (0.08909) | > log_mle: -0.17373 (-0.16972) | > loss_dur: 0.26653 (0.25881) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 7.23956 (5.42100) | > current_lr: 0.00001 | > step_time: 0.39300 (0.43575) | > loader_time: 2.12180 (1.94563)  --> STEP: 79/406 -- GLOBAL_STEP: 16725 | > loss: 0.08361 (0.08873) | > log_mle: -0.18492 (-0.17244) | > loss_dur: 0.26853 (0.26117) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 11.65090 (6.64751) | > current_lr: 0.00001 | > step_time: 0.58030 (0.46831) | > loader_time: 2.47510 (2.02826)  --> STEP: 104/406 -- GLOBAL_STEP: 16750 | > loss: 0.10629 (0.08893) | > log_mle: -0.17745 (-0.17541) | > loss_dur: 0.28374 (0.26434) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 8.04149 (7.56235) | > current_lr: 0.00001 | > step_time: 0.66870 (0.49701) | > loader_time: 2.29580 (2.09893)  --> STEP: 129/406 -- GLOBAL_STEP: 16775 | > loss: 0.08635 (0.08821) | > log_mle: -0.18047 (-0.17796) | > loss_dur: 0.26682 (0.26616) | > amp_scaler: 16384.00000 (16384.00000) | > grad_norm: 9.52951 (8.10047) | > current_lr: 0.00001 | > step_time: 0.52650 (0.51767) | > loader_time: 1.99570 (2.13108)  --> STEP: 154/406 -- GLOBAL_STEP: 16800 | > loss: 0.08854 (0.08808) | > log_mle: -0.18379 (-0.17991) | > loss_dur: 0.27233 (0.26799) | > amp_scaler: 16384.00000 (18192.62338) | > grad_norm: 10.93746 (8.50206) | > current_lr: 0.00001 | > step_time: 0.71720 (0.53639) | > loader_time: 3.01690 (2.15897)  --> STEP: 179/406 -- GLOBAL_STEP: 16825 | > loss: 0.09546 (0.08792) | > log_mle: -0.19532 (-0.18146) | > loss_dur: 0.29078 (0.26938) | > amp_scaler: 16384.00000 (17940.02235) | > grad_norm: 20.21898 (9.10620) | > current_lr: 0.00001 | > step_time: 0.77660 (0.55009) | > loader_time: 3.16390 (2.29214)  --> STEP: 204/406 -- GLOBAL_STEP: 16850 | > loss: 0.09146 (0.08802) | > log_mle: -0.18955 (-0.18276) | > loss_dur: 0.28102 (0.27078) | > amp_scaler: 16384.00000 (17749.33333) | > grad_norm: 34.91249 (9.78030) | > current_lr: 0.00001 | > step_time: 0.61180 (0.56546) | > loader_time: 2.97970 (2.36939)  --> STEP: 229/406 -- GLOBAL_STEP: 16875 | > loss: 0.07900 (0.08745) | > log_mle: -0.18914 (-0.18422) | > loss_dur: 0.26814 (0.27167) | > amp_scaler: 16384.00000 (17600.27948) | > grad_norm: 15.85990 (10.28329) | > current_lr: 0.00001 | > step_time: 1.31500 (0.58497) | > loader_time: 3.12020 (2.41538)  --> STEP: 254/406 -- GLOBAL_STEP: 16900 | > loss: 0.08431 (0.08752) | > log_mle: -0.18849 (-0.18559) | > loss_dur: 0.27280 (0.27311) | > amp_scaler: 16384.00000 (17480.56693) | > grad_norm: 13.44719 (10.72456) | > current_lr: 0.00001 | > step_time: 0.81680 (0.60200) | > loader_time: 2.85410 (2.44882)  --> STEP: 279/406 -- GLOBAL_STEP: 16925 | > loss: 0.09877 (0.08679) | > log_mle: -0.19039 (-0.18670) | > loss_dur: 0.28916 (0.27349) | > amp_scaler: 16384.00000 (17382.30824) | > grad_norm: 9.18960 (10.98046) | > current_lr: 0.00001 | > step_time: 1.05230 (0.61940) | > loader_time: 3.17810 (2.48499)  --> STEP: 304/406 -- GLOBAL_STEP: 16950 | > loss: 0.07333 (0.08670) | > log_mle: -0.20255 (-0.18768) | > loss_dur: 0.27587 (0.27438) | > amp_scaler: 16384.00000 (17300.21053) | > grad_norm: 12.33768 (11.39028) | > current_lr: 0.00001 | > step_time: 0.90890 (0.63667) | > loader_time: 2.86150 (2.51235)  --> STEP: 329/406 -- GLOBAL_STEP: 16975 | > loss: 0.08461 (0.08662) | > log_mle: -0.19008 (-0.18839) | > loss_dur: 0.27469 (0.27502) | > amp_scaler: 16384.00000 (17230.58967) | > grad_norm: 9.55473 (11.31876) | > current_lr: 0.00001 | > step_time: 0.94700 (0.65462) | > loader_time: 3.00190 (2.56082)  --> STEP: 354/406 -- GLOBAL_STEP: 17000 | > loss: 0.08634 (0.08648) | > log_mle: -0.19731 (-0.18927) | > loss_dur: 0.28365 (0.27575) | > amp_scaler: 16384.00000 (17170.80226) | > grad_norm: 18.33617 (11.36661) | > current_lr: 0.00001 | > step_time: 1.00170 (0.72011) | > loader_time: 4.91030 (2.60755)  --> STEP: 379/406 -- GLOBAL_STEP: 17025 | > loss: 0.07347 (0.08591) | > log_mle: -0.20559 (-0.19016) | > loss_dur: 0.27906 (0.27606) | > amp_scaler: 16384.00000 (17118.90237) | > grad_norm: 8.27077 (11.30550) | > current_lr: 0.00001 | > step_time: 1.31270 (0.76448) | > loader_time: 3.62220 (2.67447)  --> STEP: 404/406 -- GLOBAL_STEP: 17050 | > loss: 0.09158 (0.08553) | > log_mle: -0.20016 (-0.19093) | > loss_dur: 0.29174 (0.27646) | > amp_scaler: 16384.00000 (17073.42574) | > grad_norm: 20.25908 (11.46174) | > current_lr: 0.00001 | > step_time: 2.02400 (0.81222) | > loader_time: 3.15630 (2.75480)  > EVALUATION  --> EVAL PERFORMANCE | > avg_loader_time: 1.46697 (+0.30269) | > avg_loss: 0.04040 (-0.00952) | > avg_log_mle: -0.20743 (-0.00458) | > avg_loss_dur: 0.24783 (-0.00494) > BEST MODEL : /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000/best_model_17052.pth ! Run is kept in /home/cryptogoth/src/tts-training/tacotron/run-June-26-2023_05+28AM-0000000