栏目分类:
子分类:
返回
名师互学网用户登录
快速导航关闭
当前搜索
当前分类
子分类
实用工具
热门搜索
名师互学网 > IT > 软件开发 > 后端开发 > Python

pytorch

Python 更新时间: 发布时间: IT归档 最新发布 模块sitemap 名妆网 法律咨询 聚返吧 英语巴士网 伯小乐 网商动力

pytorch

def __init__(self, block, layers, num_classes 1000, zero_init_residual False, groups 1, width_per_group 64, replace_stride_with_dilation None, norm_layer None,dropout_p 0.5): super(ResNet, self).__init__() if norm_layer is None: norm_layer nn.BatchNorm2d self._norm_layer norm_layer self.inplanes 64 self.dilation 1 if replace_stride_with_dilation is None: # each element in the tuple indicates if we should replace # the 2x2 stride with a dilated convolution instead replace_stride_with_dilation [False, False, False] if len(replace_stride_with_dilation) ! 3: raise ValueError( replace_stride_with_dilation should be None or a 3-element tuple, got {} .format(replace_stride_with_dilation)) self.groups groups self.base_width width_per_group self.conv1 nn.Conv2d(3, self.inplanes, kernel_size 7, stride 2, padding 3, bias False) self.bn1 norm_layer(self.inplanes) self.relu nn.ReLU(inplace True) self.maxpool nn.MaxPool2d(kernel_size 3, stride 2, padding 1) self.layer1 self._make_layer(block, 64, layers[0]) self.layer2 self._make_layer(block, 128, layers[1], stride 2, dilate replace_stride_with_dilation[0]) self.layer3 self._make_layer(block, 256, layers[2], stride 2, dilate replace_stride_with_dilation[1]) self.layer4 self._make_layer(block, 512, layers[3], stride 2, dilate replace_stride_with_dilation[2]) # self.avgpool nn.AdaptiveAvgPool2d((1, 1)) self.fc nn.Linear(512 * block.expansion, 1000) self.softmax nn.Softmax(dim -1) self.dropout nn.Dropout(dropout_p) if dropout_p is not None else None self.last_linear nn.Linear(1000, 4) self.last_linear.weight.data.normal_(0, 0.01) self.last_linear.bias.data.fill_(0.0) def _make_layer(self, block, planes, blocks, stride 1, dilate False): norm_layer self._norm_layer downsample None previous_dilation self.dilation if dilate: self.dilation * stride stride 1 if stride ! 1 or self.inplanes ! planes * block.expansion: downsample nn.Sequential( conv1x1(self.inplanes, planes * block.expansion, stride), norm_layer(planes * block.expansion), layers [] layers.append(block(self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer)) self.inplanes planes * block.expansion for _ in range(1, blocks): layers.append(block(self.inplanes, planes, groups self.groups, base_width self.base_width, dilation self.dilation, norm_layer norm_layer)) return nn.Sequential(*layers) def _forward_impl(self, x): x self.conv1(x) x self.bn1(x) x self.relu(x) x self.maxpool(x) x self.layer1(x) x self.layer2(x) x self.layer3(x) x self.layer4(x) x x.mean([2,3]) x self.fc(x) x self.dropout(x) x self.last_linear(x) return x def forward(self, x): return self._forward_impl(x)
转载请注明:文章转载自 www.mshxw.com
本文地址:https://www.mshxw.com/it/267712.html
我们一直用心在做
关于我们 文章归档 网站地图 联系我们

版权所有 (c)2021-2022 MSHXW.COM

ICP备案号:晋ICP备2021003244-6号