1 /** 2 * Copyright 2022 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef MINDSPORE_CCSRC_PIPELINE_PYNATIVE_FORWARD_DO_INFER_H_ 18 #define MINDSPORE_CCSRC_PIPELINE_PYNATIVE_FORWARD_DO_INFER_H_ 19 20 #include <vector> 21 #include <string> 22 #include <memory> 23 #include <set> 24 #include <shared_mutex> 25 #include "pipeline/pynative/base.h" 26 #include "pipeline/pynative/pynative_cache.h" 27 28 namespace mindspore { 29 namespace pynative { 30 class InferOperation { 31 public: InferOperation()32 InferOperation() { 33 node_abs_cache_.reserve(kDefaultContainerSize); 34 prim_abs_list_.reserve(kDefaultContainerSize); 35 } 36 ~InferOperation() = default; 37 void DoInfer(const FrontendOpRunInfoPtr &op_run_info); 38 // Manage node abs cache. ClearNodeAbsCache()39 inline void ClearNodeAbsCache() { node_abs_cache_.clear(); } 40 void SetNodeAbsCacheByValue(const FrontendOpRunInfoPtr &op_run_info); 41 void SetNodeAbsCacheById(const std::string &id, const abstract::AbstractBasePtr &abs); 42 void UpdateNodeAbsCacheById(const std::string &id, const abstract::AbstractBasePtr &abs); 43 AbstractBasePtr GetNodeAbsById(const std::string &id) const; 44 // Manage primitive output abstract cache. ClearPrimAbsList()45 inline void ClearPrimAbsList() { prim_abs_list_.clear(); } 46 // Manage constant flag primitive cache. ClearConstFlagPrimCache()47 inline void ClearConstFlagPrimCache() { no_const_flag_prims_.clear(); } 48 py::object CallConstantFolding(const py::args &args) const; 49 50 private: 51 void PynativeInfer(const FrontendOpRunInfoPtr &op_run_info) const; 52 void SetNodeAbsById(const std::string &id, const abstract::AbstractBasePtr &abs); 53 // Set abstract for each input value. 54 void SetInputAbstract(const FrontendOpRunInfoPtr &op_run_info); 55 AbstractBasePtr GetInputValueAbs(const FrontendOpRunInfoPtr &op_run_info, const ValuePtr &input_value, 56 size_t input_index); 57 AbstractBasePtr GetInputTupleValueAbstract(const FrontendOpRunInfoPtr &op_run_info, 58 const ValueSequencePtr &tuple_value, size_t input_index); 59 AbstractBasePtr GetAbstractByValue(const ValuePtr &value, size_t input_index, const std::string &input_id); 60 // Infer output abstract. 61 void InferOutputAbstract(const FrontendOpRunInfoPtr &op_run_info); 62 bool GetOutputAbstractByCache(const FrontendOpRunInfoPtr &op_run_info) const; 63 void SaveOutputAbstractToCache(const FrontendOpRunInfoPtr &op_run_info); 64 void SaveSpecifiedOutputToCache(const std::string &op_name, const ValuePtrList &value_list, 65 const AbstractBasePtrList &abs_list); 66 67 // The primitive has no constant flag(const prim or const input) will be saved in this map. 68 mindspore::HashSet<std::string> no_const_flag_prims_; 69 // This map is used to get the input abstract of input value form cache. 70 // It works when top cell forward run begin and is cleared when top cell forward run end. 71 NodeAbsCache node_abs_cache_; 72 mutable std::shared_mutex abs_mutex_; 73 // This map is used to cache op output abstract. 74 PrimAbsCache prim_abs_list_; 75 }; 76 using InferOperationPtr = std::shared_ptr<InferOperation>; 77 } // namespace pynative 78 } // namespace mindspore 79 80 #endif // MINDSPORE_CCSRC_PIPELINE_PYNATIVE_FORWARD_DO_INFER_H_ 81