This commit is contained in:
wudong 2022-11-18 15:28:50 +08:00
commit 67a9d2c5a9

View File

@ -400,6 +400,7 @@ def train(hyp, opt, device, data_list,id,callbacks): # hyp is path/to/hyp.yaml
# Backward
scaler.scale(loss).backward()
report_cellback(epoch, epochs, float(loss))
# Optimize - https://pytorch.org/docs/master/notes/amp_examples.html
if ni - last_opt_step >= accumulate:
@ -484,7 +485,7 @@ def train(hyp, opt, device, data_list,id,callbacks): # hyp is path/to/hyp.yaml
# break # must break all DDP ranks
######实时传输训练精度参数#############
#gain_train_report(int(epoch + 1), float(results[0]), pro, version, epochs)
report_cellback(epoch, epochs, float(results[0]))
# report_cellback(epoch, epochs, float(results[0]))
# end epoch ----------------------------------------------------------------------------------------------------
########训练数量和保存模型###########
#save_train_report_result(pro, train_num, best)