• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15""" test_pynative_model """
16import numpy as np
17
18import mindspore.nn as nn
19from mindspore import Parameter, ParameterTuple, Tensor
20from mindspore import context
21from mindspore.nn.optim import Momentum
22from mindspore.ops import composite as C
23from mindspore.ops import operations as P
24from ..ut_filter import non_graph_engine
25
26
27grad_by_list = C.GradOperation(get_by_list=True)
28
29
30def setup_module(module):
31    context.set_context(mode=context.PYNATIVE_MODE)
32
33
34class GradWrap(nn.Cell):
35    """ GradWrap definition """
36
37    def __init__(self, network):
38        super(GradWrap, self).__init__()
39        self.network = network
40        self.weights = ParameterTuple(network.get_parameters())
41
42    def construct(self, x, label):
43        weights = self.weights
44        return grad_by_list(self.network, weights)(x, label)
45
46
47@non_graph_engine
48def test_softmaxloss_grad():
49    """ test_softmaxloss_grad """
50
51    class NetWithLossClass(nn.Cell):
52        """ NetWithLossClass definition """
53
54        def __init__(self, network):
55            super(NetWithLossClass, self).__init__()
56            self.loss = nn.SoftmaxCrossEntropyWithLogits()
57            self.network = network
58
59        def construct(self, x, label):
60            predict = self.network(x)
61            return self.loss(predict, label)
62
63    class Net(nn.Cell):
64        """ Net definition """
65
66        def __init__(self):
67            super(Net, self).__init__()
68            self.weight = Parameter(Tensor(np.ones([64, 10]).astype(np.float32)), name="weight")
69            self.bias = Parameter(Tensor(np.ones([10]).astype(np.float32)), name="bias")
70            self.fc = P.MatMul()
71            self.biasAdd = P.BiasAdd()
72
73        def construct(self, x):
74            x = self.biasAdd(self.fc(x, self.weight), self.bias)
75            return x
76
77    net = GradWrap(NetWithLossClass(Net()))
78
79    predict = Tensor(np.ones([1, 64]).astype(np.float32))
80    label = Tensor(np.zeros([1, 10]).astype(np.float32))
81    print("pynative run")
82    out = net.construct(predict, label)
83    print("out:", out)
84    print(out[0], (out[0]).asnumpy(), ":result")
85
86
87@non_graph_engine
88def test_lenet_grad():
89    """ test_lenet_grad """
90
91    class NetWithLossClass(nn.Cell):
92        """ NetWithLossClass definition """
93
94        def __init__(self, network):
95            super(NetWithLossClass, self).__init__()
96            self.loss = nn.SoftmaxCrossEntropyWithLogits()
97            self.network = network
98
99        def construct(self, x, label):
100            predict = self.network(x)
101            return self.loss(predict, label)
102
103    class LeNet5(nn.Cell):
104        """ LeNet5 definition """
105
106        def __init__(self):
107            super(LeNet5, self).__init__()
108            self.conv1 = nn.Conv2d(1, 6, 5, pad_mode='valid')
109            self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')
110            self.fc1 = nn.Dense(16 * 5 * 5, 120)
111            self.fc2 = nn.Dense(120, 84)
112            self.fc3 = nn.Dense(84, 10)
113            self.relu = nn.ReLU()
114            self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
115            self.flatten = P.Flatten()
116
117        def construct(self, x):
118            x = self.max_pool2d(self.relu(self.conv1(x)))
119            x = self.max_pool2d(self.relu(self.conv2(x)))
120            x = self.flatten(x)
121            x = self.relu(self.fc1(x))
122            x = self.relu(self.fc2(x))
123            x = self.fc3(x)
124            return x
125
126    input_data = Tensor(np.ones([1, 1, 32, 32]).astype(np.float32) * 0.01)
127    label = Tensor(np.ones([1, 10]).astype(np.float32))
128    iteration_num = 1
129    verification_step = 0
130
131    net = LeNet5()
132    loss = nn.SoftmaxCrossEntropyWithLogits()
133    momen_opti = Momentum(net.trainable_params(), learning_rate=0.1, momentum=0.9)
134    train_net = GradWrap(NetWithLossClass(net))
135    train_net.set_train()
136
137    for i in range(0, iteration_num):
138        # get the gradients
139        grads = train_net(input_data, label)
140        # update parameters
141        success = momen_opti(grads)
142        if success is False:
143            print("fail to run optimizer")
144        # verification
145        if i == verification_step:
146            fw_output = net(input_data)
147            loss_output = loss(fw_output, label)
148            print("The loss of %s-th iteration is %s" % (i, loss_output.asnumpy()))
149