• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2021 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15
16import mindspore.nn as nn
17from mindspore.ops import operations as P
18from mindspore.common.initializer import TruncatedNormal
19
20def conv(in_channels, out_channels, kernel_size, stride=1, padding=0):
21    """weight initial for conv layer"""
22    weight = weight_variable()
23    return nn.Conv2d(
24        in_channels,
25        out_channels,
26        kernel_size=kernel_size,
27        stride=stride,
28        padding=padding,
29        weight_init=weight,
30        has_bias=False,
31        pad_mode="valid",
32    )
33
34
35def fc_with_initialize(input_channels, out_channels):
36    """weight initial for fc layer"""
37    weight = weight_variable()
38    bias = weight_variable()
39    return nn.Dense(input_channels, out_channels, weight, bias)
40
41
42def weight_variable():
43    """weight initial"""
44    return TruncatedNormal(0.02)
45
46
47class LeNet5(nn.Cell):
48    def __init__(self, num_class=10, channel=3):
49        super(LeNet5, self).__init__()
50        self.num_class = num_class
51        self.conv1 = conv(channel, 6, 5)
52        self.conv2 = conv(6, 16, 5)
53        self.fc1 = fc_with_initialize(16 * 5 * 5, 120)
54        self.fc2 = fc_with_initialize(120, 84)
55        self.fc3 = fc_with_initialize(84, self.num_class)
56        self.relu = nn.ReLU()
57        self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
58        self.flatten = nn.Flatten()
59
60    def construct(self, x):
61        x = self.conv1(x)
62        x = self.relu(x)
63        x = self.max_pool2d(x)
64        x = self.conv2(x)
65        x = self.relu(x)
66        x = self.max_pool2d(x)
67        x = self.flatten(x)
68        x = self.fc1(x)
69        x = self.relu(x)
70        x = self.fc2(x)
71        x = self.relu(x)
72        x = self.fc3(x)
73        return x
74
75
76class StartFLJob(nn.Cell):
77    def __init__(self, data_size):
78        super(StartFLJob, self).__init__()
79        self.start_fl_job = P.StartFLJob(data_size)
80
81    def construct(self):
82        return self.start_fl_job()
83
84
85class UpdateAndGetModel(nn.Cell):
86    def __init__(self, weights):
87        super(UpdateAndGetModel, self).__init__()
88        self.update_model = P.UpdateModel()
89        self.get_model = P.GetModel()
90        self.weights = weights
91
92    def construct(self):
93        self.update_model(self.weights)
94        get_model = self.get_model(self.weights)
95        return get_model
96