tensorboard
from tensorboardX import SummaryWriter
#设置保存日志文件路径
logger_path = os.path.join(path, current_time)
logger = SummaryWriter(log_dir=logger_path, comment=comment)
#要保存的数据
logger.add_scalar("value_loss", value_loss, global_step=(step + 1))
记录日志
import wandb
# start a new wandb run to track this script
wandb.init(
# 文件名称
project="my-awesome-project",
# 运行超参数的配置
config={
"learning_rate": 0.02,
"architecture": "CNN",
"dataset": "CIFAR-100",
"epochs": 10,
},
)
在要记录的数据后加上
# log metrics to wandb
wandb.log({"acc": acc, "loss": loss})
最后
wandb.finish()
具体实例
import wandb
import random
# start a new wandb run to track this script
wandb.init(
# set the wandb project where this run will be logged
project="my-awesome-project",
# track hyperparameters and run metadata
config={
"learning_rate": 0.02,
"architecture": "CNN",
"dataset": "CIFAR-100",
"epochs": 10,
},
)
# simulate training
epochs = 10
offset = random.random() / 5
for epoch in range(2, epochs):
acc = 1 - 2**-epoch - random.random() / epoch - offset
loss = 2**-epoch + random.random() / epoch + offset
# log metrics to wandb
wandb.log({"acc": acc, "loss": loss})
# [optional] finish the wandb run, necessary in notebooks
wandb.finish()
ratio = torch.from_numpy(np.array([1.6, 0.5, 1, 0.6]))
x=x.numpy()