tp = ((labels == one) * (pred == one)).sum()
fp = ((labels == zero) * (pred == one)).sum()
fn = ((labels == one) * (pred == zero)).sum()
train_sum_fn += fn.item()
train_sum_fp += fp.item()
train_sum_tn += tn.item()
train_sum_tp += tp.item()
train_sum_loss += loss.item()
train_sum_correct += acc.item()
train_loss = train_sum_loss * 1.0 / len(trainDataLoader)
train_correct = train_sum_correct * 1.0 / len(trainDataLoader) / batch_size
train_precision = train_sum_tp * 1.0 / (train_sum_fp + train_sum_tp)
train_recall = train_sum_tp * 1.0 / (train_sum_fn + train_sum_tp)
writer.add_scalar(“train loss“, train_loss, global_step=epoch)
writer.add_scalar(“train correct“,
train_correct, global_step=epoch)
writer.add_scalar(“train precision“,
train_precision, global_step=epoch)
writer.add_scalar(“train recall“, train_recall, global_step=epoch)
if not os.path.exists
本章未完,请点击下一页继续阅读!