mov optimizer to grad sub.
This commit is contained in:
		
							parent
							
								
									ce64a2f7aa
								
							
						
					
					
						commit
						39cecd1146
					
				| 
						 | 
				
			
			@ -108,7 +108,6 @@ epochs = 10
 | 
			
		|||
model.conv1.weight.requires_grad = True
 | 
			
		||||
model.conv2.weight.requires_grad = False
 | 
			
		||||
model.fc1.weight.requires_grad = False
 | 
			
		||||
optimizer_unsuper = torch.optim.SGD(model.parameters(), lr=0.1)
 | 
			
		||||
n_total_steps = len(train_loader)
 | 
			
		||||
for epoch in range(epochs):
 | 
			
		||||
    for i, (images, labels) in enumerate(train_loader):
 | 
			
		||||
| 
						 | 
				
			
			@ -121,9 +120,11 @@ for epoch in range(epochs):
 | 
			
		|||
        diff_ratio_mean = torch.mean(diff_ratio * diff_ratio, dim=1)
 | 
			
		||||
        label = diff_ratio_mean * 0.5
 | 
			
		||||
        loss = F.l1_loss(diff_ratio_mean, label)
 | 
			
		||||
        optimizer_unsuper.zero_grad()
 | 
			
		||||
        if model.conv1.weight.grad is None:
 | 
			
		||||
            model.conv1.weight.grad = model.conv1.weight.data
 | 
			
		||||
        model.conv1.weight.grad = model.conv1.weight.grad * 0.0
 | 
			
		||||
        loss.backward()
 | 
			
		||||
        optimizer_unsuper.step()
 | 
			
		||||
        model.conv1.weight.data = model.conv1.weight.data - model.conv1.weight.grad * 0.2
 | 
			
		||||
        if (i + 1) % 100 == 0:
 | 
			
		||||
            print(f"Epoch [{epoch+1}/{epochs}], Step [{i+1}/{n_total_steps}], Loss: {loss.item():.8f}")
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in New Issue