• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2022 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "nnacl/cxx_utils.h"
18 #include "src/litert/pack_weight_manager.h"
19 #include "src/litert/thread_cost_model.h"
20 #include "thread/threadpool.h"
21 #include "src/litert/inner_allocator.h"
22 #include "src/common/log_adapter.h"
23 #include "src/common/log_util.h"
24 #include "include/errorcode.h"
25 
26 namespace mindspore::nnacl {
DefaultAllocatorMalloc(void * allocator,size_t sz)27 void *DefaultAllocatorMalloc(void *allocator, size_t sz) {
28   if (allocator == nullptr) {
29     MS_LOG(ERROR) << "in param invalid";
30     return nullptr;
31   }
32   auto default_allocator = static_cast<mindspore::DefaultAllocator *>(allocator);
33   return default_allocator->Malloc(sz);
34 }
35 
DefaultAllocatorFree(void * allocator,void * ptr)36 void DefaultAllocatorFree(void *allocator, void *ptr) {
37   if (allocator == nullptr || ptr == nullptr) {
38     MS_LOG(ERROR) << "in param invalid";
39     return;
40   }
41   auto default_allocator = static_cast<mindspore::DefaultAllocator *>(allocator);
42   return default_allocator->Free(ptr);
43 }
44 
DefaultThreadPoolParallelLunch(void * threadPool,void * task,void * param,int taskNr)45 int DefaultThreadPoolParallelLunch(void *threadPool, void *task, void *param, int taskNr) {
46   using TaskFunc = int (*)(void *param, int task_id, float l, float r);
47   TaskFunc task_func = (TaskFunc)task;
48 
49   ThreadPool *pool = static_cast<ThreadPool *>(threadPool);
50   if (pool == nullptr) {
51     MS_LOG(ERROR) << "thread pool is nullptr";
52     return lite::RET_NULL_PTR;
53   }
54   return pool->ParallelLaunch(task_func, param, taskNr);
55 }
56 
DefaultGetSharingPackData(void * manager,const void * tensor_data,const size_t size,bool * is_packed)57 void *DefaultGetSharingPackData(void *manager, const void *tensor_data, const size_t size, bool *is_packed) {
58   if (manager == nullptr) {
59     MS_LOG(ERROR) << "in param invalid";
60     return nullptr;
61   }
62   auto weight_manager = static_cast<mindspore::lite::PackWeightManager *>(manager);
63   return weight_manager->GetPackData(tensor_data, size, is_packed);
64 }
65 
DefaultFreeSharingPackData(void * manager,void * tensor_data)66 void DefaultFreeSharingPackData(void *manager, void *tensor_data) {
67   if (manager == nullptr) {
68     MS_LOG(ERROR) << "in param invalid";
69     return;
70   }
71   auto weight_manager = static_cast<mindspore::lite::PackWeightManager *>(manager);
72   return weight_manager->Free(tensor_data);
73 }
74 
DefaultUpdateThreadNumPass(int32_t kernel_type,int64_t per_unit_load_num,int64_t per_unit_store_num,int64_t unit_num,int thread_num)75 int DefaultUpdateThreadNumPass(int32_t kernel_type, int64_t per_unit_load_num, int64_t per_unit_store_num,
76                                int64_t unit_num, int thread_num) {
77 #ifdef DYNAMIC_THREAD_DISTRIBUTE
78   int update_thread = lite::UpdateThreadNum(kernel_type, per_unit_load_num, per_unit_store_num, unit_num, thread_num);
79 #else
80   int update_thread = thread_num > 0 ? thread_num : 1;
81 #endif
82   return update_thread;
83 }
84 }  // namespace mindspore::nnacl
85