- 1.问题:无法查看网络输出层情况
- 2.输出
- 3.总结
import torch
from torch import nn
net = nn.Sequential(
nn.Conv2d(1,96,kernel_size=11,stride=4,padding=1),nn.ReLU(),
nn.MaxPool2d(kernel_size=3,stride=2),
nn.Conv2d(96,256,kernel_size=5,padding=2),nn.ReLU(),
nn.MaxPool2d(kernel_size=3,stride=2),
nn.Conv2d(256,384,kernel_size=3,padding=1),nn.ReLU(),
nn.Conv2d(384, 384, kernel_size=3, padding=1), nn.ReLU(),
nn.Conv2d(384,256,kernel_size=3,padding=1),nn.ReLU(),
nn.MaxPool2d(kernel_size=3,stride=2),
nn.Flatten(),
nn.Linear(6400,4096),nn.ReLU(),
nn.Dropout(p=0.5),
nn.Linear(4096,4096),nn.ReLU(),
nn.Dropout(p=0.5),
nn.Linear(4096,10)
)
x = torch.randn(1,1,224,224)
for layer in net:
x = layer(x)
print(layer.__class__.__name__,'t;out_shape:t',x.shape)
2.输出
Conv2d ;out_shape: torch.Size([1, 96, 54, 54]) ReLU ;out_shape: torch.Size([1, 96, 54, 54]) MaxPool2d ;out_shape: torch.Size([1, 96, 26, 26]) Conv2d ;out_shape: torch.Size([1, 256, 26, 26]) ReLU ;out_shape: torch.Size([1, 256, 26, 26]) MaxPool2d ;out_shape: torch.Size([1, 256, 12, 12]) Conv2d ;out_shape: torch.Size([1, 384, 12, 12]) ReLU ;out_shape: torch.Size([1, 384, 12, 12]) Conv2d ;out_shape: torch.Size([1, 384, 12, 12]) ReLU ;out_shape: torch.Size([1, 384, 12, 12]) Conv2d ;out_shape: torch.Size([1, 256, 12, 12]) ReLU ;out_shape: torch.Size([1, 256, 12, 12]) MaxPool2d ;out_shape: torch.Size([1, 256, 5, 5]) Flatten ;out_shape: torch.Size([1, 6400]) Linear ;out_shape: torch.Size([1, 4096]) ReLU ;out_shape: torch.Size([1, 4096]) Dropout ;out_shape: torch.Size([1, 4096]) Linear ;out_shape: torch.Size([1, 4096]) ReLU ;out_shape: torch.Size([1, 4096]) Dropout ;out_shape: torch.Size([1, 4096]) Linear ;out_shape: torch.Size([1, 10])3.总结
通过循环就方便我们去观察每个层的数据形状大小,方便理解及后续调试。



