• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15"""MobileNetV2 utils"""
16
17import time
18import numpy as np
19
20from mindspore.train.callback import Callback
21from mindspore import Tensor
22from mindspore import nn
23from mindspore.nn.loss.loss import LossBase
24from mindspore.ops import operations as P
25from mindspore.ops import functional as F
26from mindspore.common import dtype as mstype
27
28
29class Monitor(Callback):
30    """
31    Monitor loss and time.
32
33    Args:
34        lr_init (numpy array): train lr
35
36    Returns:
37        None
38
39    Examples:
40        >>> Monitor(100,lr_init=Tensor([0.05]*100).asnumpy())
41    """
42
43    def __init__(self, lr_init=None, step_threshold=10):
44        super(Monitor, self).__init__()
45        self.lr_init = lr_init
46        self.lr_init_len = len(lr_init)
47        self.step_threshold = step_threshold
48        self.step_mseconds = 50000
49
50    def epoch_begin(self, run_context):
51        self.losses = []
52        self.epoch_time = time.time()
53
54    def epoch_end(self, run_context):
55        cb_params = run_context.original_args()
56
57        epoch_mseconds = (time.time() - self.epoch_time) * 1000
58        per_step_mseconds = epoch_mseconds / cb_params.batch_num
59        print("epoch time: {:5.3f}, per step time: {:5.3f}, avg loss: {:8.6f}".format(epoch_mseconds,
60                                                                                      per_step_mseconds,
61                                                                                      np.mean(self.losses)))
62        self.epoch_mseconds = epoch_mseconds
63
64    def step_begin(self, run_context):
65        self.step_time = time.time()
66
67    def step_end(self, run_context):
68        cb_params = run_context.original_args()
69        step_mseconds = (time.time() - self.step_time) * 1000
70        self.step_mseconds = min(self.step_mseconds, step_mseconds)
71        step_loss = cb_params.net_outputs
72
73        if isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0], Tensor):
74            step_loss = step_loss[0]
75        if isinstance(step_loss, Tensor):
76            step_loss = np.mean(step_loss.asnumpy())
77
78        self.losses.append(step_loss)
79        cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num
80
81        print("epoch: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.6f}/{:5.3f}], time:[{:5.3f}], lr:[{:5.5f}]".format(
82            cb_params.cur_epoch_num, cb_params.epoch_num, cur_step_in_epoch +
83            1, cb_params.batch_num, step_loss,
84            np.mean(self.losses), self.step_mseconds, self.lr_init[cb_params.cur_step_num - 1]))
85
86        if cb_params.cur_step_num == self.step_threshold:
87            run_context.request_stop()
88
89
90class CrossEntropyWithLabelSmooth(LossBase):
91    """
92    CrossEntropyWith LabelSmooth.
93
94    Args:
95        smooth_factor (float): smooth factor, default=0.
96        num_classes (int): num classes
97
98    Returns:
99        None.
100
101    Examples:
102        >>> CrossEntropyWithLabelSmooth(smooth_factor=0., num_classes=1000)
103    """
104
105    def __init__(self, smooth_factor=0., num_classes=1000):
106        super(CrossEntropyWithLabelSmooth, self).__init__()
107        self.onehot = P.OneHot()
108        self.on_value = Tensor(1.0 - smooth_factor, mstype.float32)
109        self.off_value = Tensor(1.0 * smooth_factor /
110                                (num_classes - 1), mstype.float32)
111        self.ce = nn.SoftmaxCrossEntropyWithLogits()
112        self.mean = P.ReduceMean(False)
113        self.cast = P.Cast()
114
115    def construct(self, logit, label):
116        one_hot_label = self.onehot(self.cast(label, mstype.int32), F.shape(logit)[1],
117                                    self.on_value, self.off_value)
118        out_loss = self.ce(logit, one_hot_label)
119        out_loss = self.mean(out_loss, 0)
120        return out_loss
121