• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2020 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #ifndef MINDSPORE_CORE_OPS_DETECTION_POST_PROCESS_H_
17 #define MINDSPORE_CORE_OPS_DETECTION_POST_PROCESS_H_
18 
19 #include <map>
20 #include <memory>
21 #include <string>
22 #include <vector>
23 #include "mindapi/base/format.h"
24 #include "mindapi/base/types.h"
25 #include "ops/base_operator.h"
26 
27 namespace mindspore {
28 namespace ops {
29 constexpr auto kNameDetectionPostProcess = "DetectionPostProcess";
30 class MIND_API DetectionPostProcess : public BaseOperator {
31  public:
32   MIND_API_BASE_MEMBER(DetectionPostProcess);
DetectionPostProcess()33   DetectionPostProcess() : BaseOperator(kNameDetectionPostProcess) {}
34   void Init(const int64_t inputSize, const std::vector<float> &scale, const float NmsIouThreshold,
35             const float NmsScoreThreshold, const int64_t MaxDetections, const int64_t DetectionsPerClass,
36             const int64_t MaxClassesPerDetection, const int64_t NumClasses, const bool UseRegularNms,
37             const bool OutQuantized, const Format &format = NCHW);
38   //  scale:(h,w,x,y)
39   void set_input_size(const int64_t inputSize);
40   void set_scale(const std::vector<float> &scale);
41   void set_nms_iou_threshold(const float NmsIouThreshold);
42   void set_nms_score_threshold(const float NmsScoreThreshold);
43   void set_max_detections(const int64_t MaxDetections);
44   void set_detections_per_class(const int64_t DetectionsPerClass);
45   void set_max_classes_per_detection(const int64_t MaxClassesPerDetection);
46   void set_num_classes(const int64_t NumClasses);
47   void set_use_regular_nms(const bool UseRegularNms);
48   void set_out_quantized(const bool OutQuantized);
49   void set_format(const Format &format);
50 
51   int64_t get_input_size() const;
52   std::vector<float> get_scale() const;
53   float get_nms_iou_threshold() const;
54   float get_nms_score_threshold() const;
55   int64_t get_max_detections() const;
56   int64_t get_detections_per_class() const;
57   int64_t get_max_classes_per_detection() const;
58   int64_t get_num_classes() const;
59 
60   bool get_use_regular_nms() const;
61   bool get_out_quantized() const;
62   Format get_format() const;
63 };
64 MIND_API abstract::AbstractBasePtr DetectionPostProcessInfer(const abstract::AnalysisEnginePtr &,
65                                                              const PrimitivePtr &primitive,
66                                                              const std::vector<abstract::AbstractBasePtr> &input_args);
67 }  // namespace ops
68 }  // namespace mindspore
69 
70 #endif  // MINDSPORE_CORE_OPS_DETECTION_POST_PROCESS_H_
71