1# Copyright 2020 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15""" 16@File : test_adapter.py 17@Author: 18@Date : 2019-03-20 19@Desc : test mindspore compile method 20""" 21import logging 22import numpy as np 23 24import mindspore.nn as nn 25from mindspore import Tensor, Parameter 26from mindspore.ops import operations as P 27 28log = logging.getLogger("test") 29log.setLevel(level=logging.ERROR) 30 31 32def conv3x3(in_channels, out_channels, stride=1, padding=1): 33 """3x3 convolution """ 34 weight = Tensor(np.ones([out_channels, in_channels, 3, 3]).astype(np.float32)) 35 return nn.Conv2d(in_channels, out_channels, 36 kernel_size=3, stride=stride, 37 padding=padding, weight_init=weight) 38 39 40def conv1x1(in_channels, out_channels, stride=1, padding=0): 41 """1x1 convolution""" 42 weight = Tensor(np.ones([out_channels, in_channels, 1, 1]).astype(np.float32)) 43 return nn.Conv2d(in_channels, out_channels, 44 kernel_size=1, stride=stride, 45 padding=padding, weight_init=weight) 46 47 48class ResidualBlock(nn.Cell): 49 """ 50 residual Block 51 """ 52 expansion = 4 53 54 def __init__(self, 55 in_channels, 56 out_channels, 57 stride=1, 58 down_sample=False): 59 super(ResidualBlock, self).__init__() 60 61 out_chls = out_channels // self.expansion 62 self.conv1 = conv1x1(in_channels, out_chls, stride=1, padding=0) 63 self.bn1 = nn.BatchNorm2d(out_chls) 64 65 self.conv2 = conv3x3(out_chls, out_chls, stride=stride, padding=1) 66 self.bn2 = nn.BatchNorm2d(out_chls) 67 68 self.conv3 = conv1x1(out_chls, out_channels, stride=1, padding=0) 69 self.bn3 = nn.BatchNorm2d(out_channels) 70 71 self.relu = nn.ReLU() 72 self.downsample = down_sample 73 74 if self.downsample: 75 self.conv_down_sample = conv1x1(in_channels, out_channels, 76 stride=stride, padding=0) 77 self.bn_down_sample = nn.BatchNorm2d(out_channels) 78 self.add = P.Add() 79 80 def construct(self, x): 81 identity = x 82 83 out = self.conv1(x) 84 out = self.bn1(out) 85 out = self.relu(out) 86 87 out = self.conv2(out) 88 out = self.bn2(out) 89 out = self.relu(out) 90 91 out = self.conv3(out) 92 out = self.bn3(out) 93 94 if self.downsample: 95 identity = self.conv_down_sample(identity) 96 identity = self.bn_down_sample(identity) 97 98 out = self.add(out, identity) 99 out = self.relu(out) 100 101 return out 102 103 104class ResNet(nn.Cell): 105 """ ResNet definition """ 106 107 def __init__(self, tensor): 108 super(ResNet, self).__init__() 109 self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3) 110 self.bn1 = nn.BatchNorm2d(64) 111 self.weight = Parameter(tensor, name='w') 112 113 def construct(self, x): 114 x = self.conv1(x) 115 x = self.bn1(x) 116 return x 117 118 119class LeNet(nn.Cell): 120 """ LeNet definition """ 121 122 def __init__(self): 123 super(LeNet, self).__init__() 124 self.relu = nn.ReLU() 125 weight1 = Tensor(np.ones([6, 1, 5, 5]).astype(np.float32) * 0.01) 126 weight2 = Tensor(np.ones([16, 6, 5, 5]).astype(np.float32) * 0.01) 127 self.conv1 = nn.Conv2d(1, 6, (5, 5), weight_init=weight1, stride=1, padding=0, pad_mode='valid') 128 self.conv2 = nn.Conv2d(6, 16, (5, 5), weight_init=weight2, pad_mode='valid') 129 self.pool = nn.MaxPool2d(2) 130 self.flatten = nn.Flatten() 131 fcweight1 = Tensor(np.ones([120, 16 * 5 * 5]).astype(np.float32) * 0.01) 132 fcweight2 = Tensor(np.ones([84, 120]).astype(np.float32) * 0.01) 133 fcweight3 = Tensor(np.ones([10, 84]).astype(np.float32) * 0.01) 134 self.fc1 = nn.Dense(16 * 5 * 5, 120, weight_init=fcweight1) 135 self.fc2 = nn.Dense(120, 84, weight_init=fcweight2) 136 self.fc3 = nn.Dense(84, 10, weight_init=fcweight3) 137 138 def construct(self, input_x): 139 output = self.conv1(input_x) 140 output = self.relu(output) 141 output = self.pool(output) 142 output = self.conv2(output) 143 output = self.relu(output) 144 output = self.pool(output) 145 output = self.flatten(output) 146 output = self.fc1(output) 147 output = self.fc2(output) 148 output = self.fc3(output) 149 return output 150 151 152def loss_func(x): 153 return x 154 155 156def optimizer(x): 157 return x 158 159 160class Net(nn.Cell): 161 """ Net definition """ 162 163 def __init__(self, dim): 164 super(Net, self).__init__() 165 self.softmax = nn.Softmax(dim) 166 167 def construct(self, input_x): 168 return self.softmax(input_x) 169