'''
Author: 365JHWZGo
Description: 8.复习保存和提取
Date: 2021-10-25 16:34:53
FilePath: pytorchpytorchday07-1.py
LastEditTime: 2021-10-25 17:00:28
LastEditors: 365JHWZGo
'''
# 导包
import torch
import matplotlib.pyplot as plt
from torch.autograd import Variable
# 创建数据
x = torch.unsqueeze(torch.linspace(-1, 1, 100), dim=1) #注意此处不宜过大范围,否则偏移不明显
y = x.pow(2) + 0.2*torch.rand(x.size())
x, y = Variable(x, requires_grad=True), Variable(y, requires_grad=True)
# 搭建神经层
def save():
net1 = torch.nn.Sequential( # care: torch.nn.Sequential
torch.nn.Linear(1, 10),
torch.nn.ReLU(),
torch.nn.Linear(10, 1)
)
# 创建优化器
optimizer = torch.optim.SGD(net1.parameters(), lr=0.5)
# 损失函数
loss_func = torch.nn.MSELoss() # Mean-Squared Loss均方差
# 训练
for i in range(100):
prediction = net1(x)
loss = loss_func(prediction, y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
plt.figure(1, figsize=(10, 3))
plt.subplot(131)
plt.scatter(x.data.numpy(), y.data.numpy())
plt.title('net1')
plt.plot(x.data.numpy(), prediction.data.numpy(), 'r-', lw=5)
torch.save(net1, 'net1.pkl')
torch.save(net1.state_dict(), 'net1_params.pkl')
def restore_net():
net2 = torch.load('net1.pkl')
prediction = net2(x)
plt.subplot(132)
plt.scatter(x.data.numpy(), y.data.numpy())
plt.title('net2')
plt.plot(x.data.numpy(), prediction.data.numpy(), 'r-', lw=5)
def restore_params():
net3 = torch.nn.Sequential(
torch.nn.Linear(1, 10),
torch.nn.ReLU(),
torch.nn.Linear(10, 1)
)
net3.load_state_dict(torch.load('net1_params.pkl'))
prediction = net3(x)
plt.subplot(133)
plt.scatter(x.data.numpy(), y.data.numpy())
plt.title('net3')
plt.plot(x.data.numpy(), prediction.data.numpy(), 'r-', lw=5)
plt.show()
save()
restore_net()
restore_params()