1 /** 2 * Copyright 2021-2022 Huawei Technologies Co., Ltd 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef MINDSPORE_LITE_SRC_EXTENDRT_MINDIR_LOADER_ABSTRACT_BASE_MODEL_H_ 18 #define MINDSPORE_LITE_SRC_EXTENDRT_MINDIR_LOADER_ABSTRACT_BASE_MODEL_H_ 19 20 #include <string> 21 #include <vector> 22 23 #include "include/model.h" 24 #include "src/tensor.h" 25 #include "src/executor/kernel_exec.h" 26 27 using Model = mindspore::lite::Model; 28 using LiteGraph = mindspore::lite::LiteGraph; 29 30 namespace mindspore::infer { 31 class AbstractBaseModel : public Model { 32 public: 33 virtual bool ModelVerify() const = 0; 34 virtual int ConvertTensors(std::vector<mindspore::lite::Tensor *> *lite_tensors) = 0; 35 virtual std::string GetModelPath() const = 0; 36 virtual mindspore::kernel::KernelExec *FindBackendKernel(const std::vector<mindspore::lite::Tensor *> &in_tensors, 37 const std::vector<mindspore::lite::Tensor *> &out_tensors, 38 const LiteGraph::Node *node, lite::InnerContext *context, 39 TypeId prefer_data_type) = 0; 40 }; 41 } // namespace mindspore::infer 42 43 #endif // MINDSPORE_LITE_SRC_EXTENDRT_MINDIR_LOADER_ABSTRACT_BASE_MODEL_H_ 44