• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #ifndef MINDSPORE_INCLUDE_API_CFG_H
17 #define MINDSPORE_INCLUDE_API_CFG_H
18 
19 #include <cstddef>
20 #include <string>
21 #include <vector>
22 #include <memory>
23 #include "include/api/data_type.h"
24 #include "include/api/dual_abi_helper.h"
25 
26 namespace mindspore {
27 
28 class MixPrecisionCfg {
29  public:
30   MixPrecisionCfg() {
31     this->dynamic_loss_scale_ = false;
32     this->loss_scale_ = 128.0f;
33     this->num_of_not_nan_iter_th_ = 1000;
34   }
35 
36   bool dynamic_loss_scale_ = false; /**< Enable\disable dynamic loss scale during mix precision training */
37   float loss_scale_;                /**< Initial loss scale factor  */
38   uint32_t num_of_not_nan_iter_th_; /**< a threshold for modifying loss scale when dynamic loss scale is enabled */
39   bool is_raw_mix_precision_ = false; /**< Is mix precision model export from mindspore  */
40 };
41 
42 class TrainCfg {
43  public:
44   TrainCfg() { this->loss_name_ = "_loss_fn"; }
45 
46   OptimizationLevel optimization_level_ = kO0;
47   std::string loss_name_;             /**< Set part of the name that identify a loss kernel */
48   MixPrecisionCfg mix_precision_cfg_; /**< Mix precision configuration */
49   bool accumulate_gradients_ = false;
50 };
51 
52 }  // namespace mindspore
53 #endif  // MINDSPORE_INCLUDE_API_CFG_H
54