• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2020 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_CPU_FP32_GRAD_ADAM_H_
18 #define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_CPU_FP32_GRAD_ADAM_H_
19 
20 #include <vector>
21 #include "src/train/optimizer_kernel.h"
22 #include "nnacl/fp32_grad/optimizer.h"
23 
24 namespace mindspore::kernel {
25 constexpr int kAdamLrIndex = 5;
26 constexpr int kAdamGradIndex = 9;
27 
28 class AdamCPUKernel : public OptimizerKernel {
29  public:
AdamCPUKernel(OpParameter * parameter,const std::vector<lite::Tensor * > & inputs,const std::vector<lite::Tensor * > & outputs,const lite::InnerContext * ctx)30   explicit AdamCPUKernel(OpParameter *parameter, const std::vector<lite::Tensor *> &inputs,
31                          const std::vector<lite::Tensor *> &outputs, const lite::InnerContext *ctx)
32       : OptimizerKernel(parameter, inputs, outputs, ctx, kAdamLrIndex, kAdamGradIndex),
33         thread_count_(ctx->thread_num_) {
34     adam_param_ = reinterpret_cast<AdamParameter *>(parameter);
35   }
~AdamCPUKernel()36   ~AdamCPUKernel() override {
37     if (grad_sum_ != nullptr) {
38       ms_context_->allocator->Free(grad_sum_);
39       grad_sum_ = nullptr;
40     }
41   }
42   int Prepare() override;
43   int ReSize() override;
44   int Run() override;
45   int DoExecute(int task_id);
46   int OptimizerStep() override;
47   std::vector<int> GetOptimizerParamsIdxs() const override;
48   std::vector<int> GetTrainableParamsIdxs() const override;
49 
50  private:
51   int thread_count_;
52   AdamParameter *adam_param_;
53 };
54 }  // namespace mindspore::kernel
55 
56 #endif  // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_CPU_FP32_GRAD_ADAM_H_
57