1 /* 2 * Copyright (C) 2021 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #pragma once 18 19 #include <memory> 20 #include <utility> 21 #include <vector> 22 23 #include <aidl/android/hardware/neuralnetworks/BnPreparedModel.h> 24 #include <android-base/logging.h> 25 #include "ShimDevice.h" 26 #include "SupportLibrary.h" 27 #include "SupportLibraryWrapper.h" 28 29 namespace aidl::android::hardware::neuralnetworks { 30 31 class ShimPreparedModel : public BnPreparedModel { 32 public: ShimPreparedModel(std::shared_ptr<const NnApiSupportLibrary> nnapi,std::shared_ptr<ShimBufferTracker> bufferTracker,::android::nn::sl_wrapper::Compilation compilation,std::vector<::android::nn::sl_wrapper::Model> mainAndReferencedModels,std::vector<std::unique_ptr<::android::nn::sl_wrapper::Memory>> memoryPools,std::vector<uint8_t> copiedOperandValues)33 ShimPreparedModel(std::shared_ptr<const NnApiSupportLibrary> nnapi, 34 std::shared_ptr<ShimBufferTracker> bufferTracker, 35 ::android::nn::sl_wrapper::Compilation compilation, 36 std::vector<::android::nn::sl_wrapper::Model> mainAndReferencedModels, 37 std::vector<std::unique_ptr<::android::nn::sl_wrapper::Memory>> memoryPools, 38 std::vector<uint8_t> copiedOperandValues) 39 : mNnapi(nnapi), 40 mBufferTracker(bufferTracker), 41 mCompilation(std::move(compilation)), 42 mMainAndReferencedModels(std::move(mainAndReferencedModels)), 43 mMemoryPools(std::move(memoryPools)), 44 mCopiedOperandValues(std::move(copiedOperandValues)) { 45 CHECK(mMainAndReferencedModels.size() > 0); 46 }; 47 48 ::ndk::ScopedAStatus executeSynchronously(const Request& request, bool measureTiming, 49 int64_t deadlineNs, int64_t loopTimeoutDurationNs, 50 ExecutionResult* executionResults) override; 51 ::ndk::ScopedAStatus executeFenced(const Request& request, 52 const std::vector<::ndk::ScopedFileDescriptor>& waitFor, 53 bool measureTiming, int64_t deadlineNs, 54 int64_t loopTimeoutDurationNs, int64_t durationNs, 55 FencedExecutionResult* fencedExecutionResult) override; 56 ndk::ScopedAStatus configureExecutionBurst(std::shared_ptr<IBurst>* burst) override; 57 getCompilation()58 const ::android::nn::sl_wrapper::Compilation& getCompilation() const { return mCompilation; } getMainModel()59 const ::android::nn::sl_wrapper::Model& getMainModel() const { 60 return mMainAndReferencedModels[0]; 61 } 62 63 private: 64 ErrorStatus parseInputs( 65 const Request& request, bool measure, int64_t deadlineNs, int64_t loopTimeoutDurationNs, 66 ::android::nn::sl_wrapper::Execution* execution, 67 std::vector<std::shared_ptr<::android::nn::sl_wrapper::Memory>>* requestMemoryPools); 68 69 std::shared_ptr<const NnApiSupportLibrary> mNnapi; 70 std::shared_ptr<ShimBufferTracker> mBufferTracker; 71 72 ::android::nn::sl_wrapper::Compilation mCompilation; 73 std::vector<::android::nn::sl_wrapper::Model> mMainAndReferencedModels; 74 std::vector<std::unique_ptr<::android::nn::sl_wrapper::Memory>> mMemoryPools; 75 std::vector<uint8_t> mCopiedOperandValues; 76 }; 77 78 } // namespace aidl::android::hardware::neuralnetworks 79