用两个优化器独立训练神经网络两个模块

  • Post author:
  • Post category:其他


# 创建 ResNet 实例
resnet = ResNet(num_classes, args)

# 定义损失函数和优化器
loss_fn1 = nn.CrossEntropyLoss()
loss_fn2 = nn.MSELoss()
optimizer1 = optim.Adam([
    {'params': resnet.conv1.parameters()},
    {'params': resnet.conv2.parameters()},
    {'params': resnet.cross_attention.parameters()}
], lr=0.001)                        #调整自己想要的学习率
optimizer2 = optim.Adam([
    {'params': resnet.backbone.parameters()},
    {'params': resnet.fc.parameters()},
    {'params': resnet.fc2.parameters()}
], lr=0.001)

# 训练循环
for epoch in range(num_epochs):
    total_loss1 = 0.0
    total_loss2 = 0.0

    for inputs, targets in dataloader:
        optimizer1.zero_grad()
        optimizer2.zero_grad()

        outputs1, outputs2 = resnet(inputs)
        loss1 = loss_fn1(outputs1, targets)
        loss2 = loss_fn2(outputs2, targets)

        loss1.backward()
        loss2.backward()

        optimizer1.step()
        optimizer2.step()



版权声明:本文为A2321161581原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。