1 /** 2 * Copyright 2020-2021 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef MINDSPORE_CORE_OPS_CONV2D_BACKPROP_FILTER_FUSION_H_ 18 #define MINDSPORE_CORE_OPS_CONV2D_BACKPROP_FILTER_FUSION_H_ 19 #include <memory> 20 #include <vector> 21 22 #include "mindapi/base/types.h" 23 #include "ops/grad/conv2d_backprop_filter.h" 24 25 namespace mindspore { 26 namespace ops { 27 constexpr auto kNameConv2DBackpropFilterFusion = "Conv2DBackpropFilterFusion"; 28 /// \brief Conv2DBackpropFilterFusion defined Conv2DBackpropFilter operator prototype of lite. 29 class MIND_API Conv2DBackpropFilterFusion : public Conv2DBackpropFilter { 30 public: 31 MIND_API_BASE_MEMBER(Conv2DBackpropFilterFusion); 32 /// \brief Constructor. Conv2DBackpropFilterFusion()33 Conv2DBackpropFilterFusion() : Conv2DBackpropFilter(kNameConv2DBackpropFilterFusion) { 34 InitIOName({"out_backprop", "input", "filter_sizes"}, {"output"}); 35 } 36 37 /// \brief Method to init the op's attributes. 38 /// 39 /// \param[in] out_channel Define the number of output channel. 40 /// \param[in] kernel_size Define the size of the filter kernel. 41 /// \param[in] pad_mode Define the padding method. 42 /// \param[in] pad_list Define the concrete padding value on H and W dimension. 43 /// \param[in] mode Define the category of conv, which is useless on lite. 44 /// \param[in] stride Define the moving size of the filter kernel. 45 /// \param[in] dilation Define the coefficient of expansion of the filter kernel, which is useful for dilated 46 /// convolution. 47 /// \param[in] group Define the number of group. 48 /// \param[in] format Define the format of input tensor. 49 /// \param[in] activation_type Define the activation type. 50 void Init(const int64_t out_channel, const std::vector<int64_t> &kernel_size, const PadMode &pad_mode = VALID, 51 const std::vector<int64_t> &pad_list = {0, 0, 0, 0}, const int64_t mode = 1, 52 const std::vector<int64_t> &stride = {1, 1}, const std::vector<int64_t> &dilation = {1, 1, 1, 1}, 53 const int64_t group = 1, const Format &format = NCHW, const ActivationType activation_type = NO_ACTIVATION); 54 55 /// \brief Method to set activation type. 56 /// 57 /// \param[in] activation_type Define the activation type. 58 void set_activation_type(const ActivationType activation_type); 59 60 /// \brief Method to set in_channel attribute. 61 /// 62 /// \param[in] in_channel Define the number of input channel. 63 void set_in_channel(const int64_t in_channel); 64 65 /// \brief Method to get activation type. 66 /// 67 /// \return activation type. 68 ActivationType get_activation_type() const; 69 70 /// \brief Method to get in_channel attribute. 71 /// 72 /// \return the number of input channel. 73 int64_t get_in_channel() const; 74 }; 75 } // namespace ops 76 } // namespace mindspore 77 78 #endif // MINDSPORE_CORE_OPS_CONV2D_BACKPROP_FILTER_FUSION_H_ 79