• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_CORE_OPS_SPARSE_SOFTMAX_CROSS_ENTROPY_WITH_LOGITS_H_
18 #define MINDSPORE_CORE_OPS_SPARSE_SOFTMAX_CROSS_ENTROPY_WITH_LOGITS_H_
19 #include <memory>
20 #include <vector>
21 #include "ops/primitive_c.h"
22 #include "abstract/abstract_value.h"
23 #include "utils/check_convert_utils.h"
24 
25 namespace mindspore {
26 namespace ops {
27 constexpr auto kNameSparseSoftmaxCrossEntropyWithLogits = "SparseSoftmaxCrossEntropyWithLogits";
28 /// \brief Computes the softmax cross-entropy value between logits and sparse encoding labels.
29 /// Refer to Python API @ref mindspore.ops.SparseSoftmaxCrossEntropyWithLogits for more details.
30 class MS_CORE_API SparseSoftmaxCrossEntropyWithLogits : public PrimitiveC {
31  public:
32   /// \brief Constructor.
SparseSoftmaxCrossEntropyWithLogits()33   SparseSoftmaxCrossEntropyWithLogits() : PrimitiveC(kNameSparseSoftmaxCrossEntropyWithLogits) {}
34   /// \brief Destructor.
35   ~SparseSoftmaxCrossEntropyWithLogits() = default;
36   MS_DECLARE_PARENT(SparseSoftmaxCrossEntropyWithLogits, PrimitiveC);
37   /// \brief Init.
38   /// Refer to the parameters of python API @ref mindspore.ops.SparseSoftmaxCrossEntropyWithLogits for the inputs.
39   void Init(const bool is_grad = false);
40   /// \brief Set is_grad.
41   void set_is_grad(const bool is_grad);
42   /// \brief Get is_grad.
43   ///
44   /// \return is_grad.
45   bool get_is_grad() const;
46 };
47 AbstractBasePtr SparseSoftmaxCrossEntropyWithLogitsInfer(const abstract::AnalysisEnginePtr &,
48                                                          const PrimitivePtr &primitive,
49                                                          const std::vector<AbstractBasePtr> &input_args);
50 using PrimSparseSoftmaxCrossEntropyWithLogitsPtr = std::shared_ptr<SparseSoftmaxCrossEntropyWithLogits>;
51 }  // namespace ops
52 }  // namespace mindspore
53 
54 #endif  // MINDSPORE_CORE_OPS_SPARSE_SOFTMAX_CROSS_ENTROPY_WITH_LOGITS_H_
55