1 /** 2 * Copyright 2020 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef MINDSPORE_CORE_OPS_CONV2D_TRANSPOSE_H_ 18 #define MINDSPORE_CORE_OPS_CONV2D_TRANSPOSE_H_ 19 #include <map> 20 #include <memory> 21 #include <string> 22 #include <vector> 23 24 #include "mindapi/base/format.h" 25 #include "mindapi/base/types.h" 26 #include "ops/base_operator.h" 27 28 namespace mindspore { 29 namespace ops { 30 constexpr auto kNameConv2DTranspose = "Conv2DTranspose"; 31 /// \brief 2D transposed convolution layer. Refer to Python API @ref mindspore.nn.Conv2dTranspose for more details. 32 class MIND_API Conv2DTranspose : public BaseOperator { 33 public: 34 MIND_API_BASE_MEMBER(Conv2DTranspose); 35 /// \brief Constructor. Conv2DTranspose()36 Conv2DTranspose() : BaseOperator(kNameConv2DTranspose) { 37 InitIOName({"out_backprop", "filter", "input_sizes"}, {"output"}); 38 } Conv2DTranspose(const std::string k_name)39 explicit Conv2DTranspose(const std::string k_name) : BaseOperator(k_name) { 40 InitIOName({"out_backprop", "filter", "input_sizes"}, {"output"}); 41 } 42 /// \brief Init. Refer to the parameters of Python API @ref mindspore.nn.Conv2dTranspose for the inputs. 43 void Init(int64_t in_channel, int64_t out_channel, const std::vector<int64_t> &kernel_size, int64_t mode = 1, 44 const PadMode &pad_mode = VALID, const std::vector<int64_t> &pad = {0, 0, 0, 0}, 45 const std::vector<int64_t> &stride = {1, 1}, const std::vector<int64_t> &dilation = {1, 1}, 46 int64_t group = 1, const Format &format = NCHW, const std::vector<int64_t> &pad_list = {0, 0, 0, 0}); 47 /// \brief Set in_channel. 48 void set_in_channel(int64_t in_channel); 49 /// \brief Set out_channel. 50 void set_out_channel(int64_t out_channel); 51 /// \brief Set kernel_size. 52 virtual void set_kernel_size(const std::vector<int64_t> &kernel_size); 53 /// \brief Set stride. 54 void set_stride(const std::vector<int64_t> &stride); 55 /// \brief Set dilation. 56 virtual void set_dilation(const std::vector<int64_t> &dilation); 57 /// \brief Set pad_mode. 58 void set_pad_mode(const PadMode &pad_mode); 59 /// \brief Set pad. 60 void set_pad(const std::vector<int64_t> &pad); 61 /// \brief Set mode. 62 void set_mode(int64_t mode); 63 /// \brief Set group. 64 void set_group(int64_t group); 65 /// \brief Set format. 66 void set_format(const Format &format); 67 /// \brief Set pad_list. 68 void set_pad_list(const std::vector<int64_t> &pad_list); 69 70 /// \brief Get in_channel. 71 /// 72 /// \return in_channel. 73 int64_t get_in_channel() const; 74 /// \brief Get out_channel. 75 /// 76 /// \return out_channel. 77 int64_t get_out_channel() const; 78 /// \brief Get kernel_size. 79 /// 80 /// \return kernel_size. 81 std::vector<int64_t> get_kernel_size() const; 82 /// \brief Get stride. 83 /// 84 /// \return stride. 85 std::vector<int64_t> get_stride() const; 86 /// \brief Get dilation. 87 /// 88 /// \return dilation. 89 std::vector<int64_t> get_dilation() const; 90 /// \brief Get pad_mode. 91 /// 92 /// \return pad_mode. 93 PadMode get_pad_mode() const; 94 /// \brief Get pad. 95 /// 96 /// \return pad. 97 std::vector<int64_t> get_pad() const; 98 /// \brief Get mode. 99 /// 100 /// \return mode. 101 int64_t get_mode() const; 102 /// \brief Get group. 103 /// 104 /// \return group. 105 int64_t get_group() const; 106 /// \brief Get format. 107 /// 108 /// \return format. 109 Format get_format() const; 110 /// \brief Get pad_list. 111 /// 112 /// \return pad_list. 113 std::vector<int64_t> get_pad_list() const; 114 }; 115 } // namespace ops 116 } // namespace mindspore 117 #endif // MINDSPORE_CORE_OPS_CONV2D_TRANSPOSE_H_ 118