• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_CORE_OPS_SOFTMAX_CROSS_ENTROPY_WITH_LOGITS_H_
18 #define MINDSPORE_CORE_OPS_SOFTMAX_CROSS_ENTROPY_WITH_LOGITS_H_
19 #include <map>
20 #include <memory>
21 #include <string>
22 #include <vector>
23 
24 #include "mindapi/base/types.h"
25 #include "ops/base_operator.h"
26 
27 namespace mindspore {
28 namespace ops {
29 constexpr auto kNameSoftmaxCrossEntropyWithLogits = "SoftmaxCrossEntropyWithLogits";
30 /// \brief Gets the softmax cross-entropy value between logits and labels with one-hot encoding.
31 /// Refer to Python API @ref mindspore.ops.SoftmaxCrossEntropyWithLogits for more details.
32 class MIND_API SoftmaxCrossEntropyWithLogits : public BaseOperator {
33  public:
34   MIND_API_BASE_MEMBER(SoftmaxCrossEntropyWithLogits);
35   /// \brief Constructor.
SoftmaxCrossEntropyWithLogits()36   SoftmaxCrossEntropyWithLogits() : BaseOperator(kNameSoftmaxCrossEntropyWithLogits) {
37     InitIOName({"features", "labels"}, {"loss", "backprop"});
38   }
39   /// \brief Init.
Init()40   void Init() const {}
41 };
42 using kPrimSoftmaxCrossEntropyWithLogitsPtr = std::shared_ptr<SoftmaxCrossEntropyWithLogits>;
43 }  // namespace ops
44 }  // namespace mindspore
45 
46 #endif  // MINDSPORE_CORE_OPS_SOFTMAX_CROSS_ENTROPY_WITH_LOGITS_H_
47