1# Copyright 2021 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15 16import mindspore.nn as nn 17from mindspore.common.initializer import TruncatedNormal 18 19def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): 20 """weight initial for conv layer""" 21 weight = weight_variable() 22 return nn.Conv2d( 23 in_channels, 24 out_channels, 25 kernel_size=kernel_size, 26 stride=stride, 27 padding=padding, 28 weight_init=weight, 29 has_bias=False, 30 pad_mode="valid", 31 ) 32 33 34def fc_with_initialize(input_channels, out_channels): 35 """weight initial for fc layer""" 36 weight = weight_variable() 37 bias = weight_variable() 38 return nn.Dense(input_channels, out_channels, weight, bias) 39 40 41def weight_variable(): 42 """weight initial""" 43 return TruncatedNormal(0.02) 44 45 46class LeNet5(nn.Cell): 47 def __init__(self, num_class=10, channel=3): 48 super(LeNet5, self).__init__() 49 self.num_class = num_class 50 self.conv1 = conv(channel, 6, 5) 51 self.conv2 = conv(6, 16, 5) 52 self.fc1 = fc_with_initialize(16 * 5 * 5, 120) 53 self.fc2 = fc_with_initialize(120, 84) 54 self.fc3 = fc_with_initialize(84, self.num_class) 55 self.relu = nn.ReLU() 56 self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) 57 self.flatten = nn.Flatten() 58 59 def construct(self, x): 60 x = self.conv1(x) 61 x = self.relu(x) 62 x = self.max_pool2d(x) 63 x = self.conv2(x) 64 x = self.relu(x) 65 x = self.max_pool2d(x) 66 x = self.flatten(x) 67 x = self.fc1(x) 68 x = self.relu(x) 69 x = self.fc2(x) 70 x = self.relu(x) 71 x = self.fc3(x) 72 return x 73