1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "core/common/task_executor_impl.h"
17
18 #include <cerrno>
19 #include <functional>
20 #if !defined(PREVIEW)
21 #ifdef OHOS_STANDARD_SYSTEM
22 #include <sys/prctl.h>
23 #endif
24 #include <sys/resource.h>
25 #endif
26 #include <sys/time.h>
27 #include <unistd.h>
28
29 #include "base/log/log.h"
30 #include "base/log/trace_id.h"
31 #include "base/thread/background_task_executor.h"
32 #include "base/utils/utils.h"
33 #include "core/common/container.h"
34 #include "core/common/container_scope.h"
35 #include "core/common/task_runner_adapter_factory.h"
36
37 namespace OHOS::Ace {
38 namespace {
39 constexpr int32_t GPU_THREAD_PRIORITY = -10;
40 constexpr int32_t UI_THREAD_PRIORITY = -15;
41
GenJsThreadName()42 inline std::string GenJsThreadName()
43 {
44 static std::atomic<uint32_t> instanceCount { 1 };
45 return std::string("jsThread-") + std::to_string(instanceCount.fetch_add(1, std::memory_order_relaxed));
46 }
47 } // namespace
48
WrapTaskWithContainer(TaskExecutor::Task && task,int32_t id,std::function<void ()> && traceIdFunc) const49 TaskExecutor::Task TaskExecutorImpl::WrapTaskWithContainer(
50 TaskExecutor::Task&& task, int32_t id, std::function<void()>&& traceIdFunc) const
51 {
52 auto wrappedTask = [originTask = std::move(task), id, traceId = TraceId::CreateTraceId(),
53 traceIdFunc = std::move(traceIdFunc)]() {
54 ContainerScope scope(id);
55 std::unique_ptr<TraceId> traceIdPtr(traceId);
56 if (originTask && traceIdPtr) {
57 traceIdPtr->SetTraceId();
58 originTask();
59 traceIdPtr->ClearTraceId();
60 } else {
61 LOGW("WrapTaskWithContainer: originTask or traceIdPtr is null.");
62 }
63 if (traceIdFunc) {
64 traceIdFunc();
65 }
66 };
67 return wrappedTask;
68 }
69
WrapTaskWithCustomWrapper(TaskExecutor::Task && task,int32_t id,std::function<void ()> && traceIdFunc) const70 TaskExecutor::Task TaskExecutorImpl::WrapTaskWithCustomWrapper(
71 TaskExecutor::Task&& task, int32_t id, std::function<void()>&& traceIdFunc) const
72 {
73 auto wrappedTask = [taskWrapper = taskWrapper_, originTask = std::move(task), id,
74 traceId = TraceId::CreateTraceId(), traceIdFunc = std::move(traceIdFunc)]() {
75 ContainerScope scope(id);
76 std::unique_ptr<TraceId> traceIdPtr(traceId);
77 if (originTask && traceIdPtr) {
78 traceIdPtr->SetTraceId();
79 taskWrapper->Call(originTask);
80 traceIdPtr->ClearTraceId();
81 } else {
82 LOGW("WrapTaskWithContainer: originTask or traceIdPtr is null.");
83 }
84 if (traceIdFunc) {
85 traceIdFunc();
86 }
87 };
88 return wrappedTask;
89 }
90
PostTaskToTaskRunner(const RefPtr<TaskRunnerAdapter> & taskRunner,TaskExecutor::Task && task,uint32_t delayTime,const std::string & callerInfo) const91 bool TaskExecutorImpl::PostTaskToTaskRunner(const RefPtr<TaskRunnerAdapter>& taskRunner, TaskExecutor::Task&& task,
92 uint32_t delayTime, const std::string& callerInfo) const
93 {
94 CHECK_NULL_RETURN(taskRunner, false);
95 CHECK_NULL_RETURN(task, false);
96
97 if (delayTime > 0) {
98 taskRunner->PostDelayedTask(std::move(task), delayTime, callerInfo);
99 } else {
100 taskRunner->PostTask(std::move(task), callerInfo);
101 }
102 return true;
103 }
104
SetThreadPriority(int32_t priority) const105 void TaskExecutorImpl::SetThreadPriority(int32_t priority) const
106 {
107 #if !defined(PREVIEW) and !defined(IOS_PLATFORM)
108 if (setpriority(PRIO_PROCESS, gettid(), priority) < 0) {
109 LOGW("Failed to set thread priority, errno = %{private}d", errno);
110 }
111 #endif
112 }
113
TaskExecutorImpl(const RefPtr<TaskExecutorImpl> & taskExecutor)114 TaskExecutorImpl::TaskExecutorImpl(const RefPtr<TaskExecutorImpl>& taskExecutor)
115 {
116 jsRunner_ = TaskRunnerAdapterFactory::Create(false, GenJsThreadName());
117 platformRunner_ = taskExecutor->platformRunner_;
118 uiRunner_ = taskExecutor->uiRunner_;
119 ioRunner_ = taskExecutor->ioRunner_;
120 gpuRunner_ = taskExecutor->gpuRunner_;
121 }
122
TaskExecutorImpl(const OHOS::Ace::TaskRunners & taskRunners)123 TaskExecutorImpl::TaskExecutorImpl(const OHOS::Ace::TaskRunners& taskRunners)
124 {
125 jsRunner_ = TaskRunnerAdapterFactory::Create(false, GenJsThreadName());
126
127 platformRunner_ = taskRunners.GetPlatformTaskRunner();
128 uiRunner_ = taskRunners.GetUITaskRunner();
129 ioRunner_ = taskRunners.GetIOTaskRunner();
130 gpuRunner_ = taskRunners.GetGPUTaskRunner();
131 }
132
InitPlatformThread(bool useCurrentEventRunner,bool isStageModel)133 void TaskExecutorImpl::InitPlatformThread(bool useCurrentEventRunner, bool isStageModel)
134 {
135 platformRunner_ = TaskRunnerAdapterFactory::Create(useCurrentEventRunner, "");
136 FillTaskTypeTable(TaskType::PLATFORM);
137 }
138
InitJsThread(bool newThread)139 void TaskExecutorImpl::InitJsThread(bool newThread)
140 {
141 if (newThread) {
142 jsRunner_ = TaskRunnerAdapterFactory::Create(false, GenJsThreadName());
143 } else {
144 jsRunner_ = uiRunner_;
145 }
146
147 PostTaskToTaskRunner(
148 jsRunner_, [weak = AceType::WeakClaim(this)] { FillTaskTypeTable(weak, TaskType::JS); }, 0);
149 }
150
InitOtherThreads(ThreadModelImpl * threadModel)151 void TaskExecutorImpl::InitOtherThreads(ThreadModelImpl* threadModel)
152 {
153 if (threadModel) {
154 InitOtherThreads(threadModel->GetTaskRunners());
155 }
156 }
157
InitOtherThreads(const OHOS::Ace::TaskRunners & taskRunners)158 void TaskExecutorImpl::InitOtherThreads(const OHOS::Ace::TaskRunners& taskRunners)
159 {
160 uiRunner_ = taskRunners.GetUITaskRunner();
161 ioRunner_ = taskRunners.GetIOTaskRunner();
162 gpuRunner_ = taskRunners.GetGPUTaskRunner();
163
164 PostTaskToTaskRunner(
165 uiRunner_, [this] { SetThreadPriority(UI_THREAD_PRIORITY); }, 0);
166 PostTaskToTaskRunner(
167 gpuRunner_, [this] { SetThreadPriority(GPU_THREAD_PRIORITY); }, 0);
168
169 PostTaskToTaskRunner(
170 uiRunner_, [weak = AceType::WeakClaim(this)] { FillTaskTypeTable(weak, TaskType::UI); }, 0);
171 PostTaskToTaskRunner(
172 ioRunner_, [weak = AceType::WeakClaim(this)] { FillTaskTypeTable(weak, TaskType::IO); }, 0);
173 PostTaskToTaskRunner(
174 gpuRunner_, [weak = AceType::WeakClaim(this)] { FillTaskTypeTable(weak, TaskType::GPU); }, 0);
175 }
176
OnPostTask(Task && task,TaskType type,uint32_t delayTime,const std::string & callerInfo) const177 bool TaskExecutorImpl::OnPostTask(Task&& task, TaskType type, uint32_t delayTime, const std::string& callerInfo) const
178 {
179 int32_t currentId = Container::CurrentId();
180 auto traceIdFunc = [weak = WeakClaim(const_cast<TaskExecutorImpl*>(this)), type]() {
181 auto sp = weak.Upgrade();
182 if (sp) {
183 sp->taskIdTable_[static_cast<uint32_t>(type)]++;
184 }
185 };
186
187 TaskExecutor::Task wrappedTask;
188 if (taskWrapper_ != nullptr) {
189 switch (type) {
190 case TaskType::PLATFORM:
191 case TaskType::UI:
192 case TaskType::JS:
193 LOGD("wrap npi task, currentId = %{public}d", currentId);
194 wrappedTask =
195 WrapTaskWithCustomWrapper(std::move(task), currentId, std::move(traceIdFunc));
196 break;
197 case TaskType::IO:
198 case TaskType::GPU:
199 case TaskType::BACKGROUND:
200 wrappedTask = currentId >= 0 ? WrapTaskWithContainer(std::move(task), currentId, std::move(traceIdFunc))
201 : std::move(task);
202 break;
203 default:
204 return false;
205 }
206 } else {
207 wrappedTask = currentId >= 0 ? WrapTaskWithContainer(std::move(task), currentId, std::move(traceIdFunc))
208 : std::move(task);
209 }
210
211 switch (type) {
212 case TaskType::PLATFORM:
213 return PostTaskToTaskRunner(platformRunner_, std::move(wrappedTask), delayTime);
214 case TaskType::UI:
215 return PostTaskToTaskRunner(uiRunner_, std::move(wrappedTask), delayTime);
216 case TaskType::IO:
217 return PostTaskToTaskRunner(ioRunner_, std::move(wrappedTask), delayTime);
218 case TaskType::GPU:
219 return PostTaskToTaskRunner(gpuRunner_, std::move(wrappedTask), delayTime);
220 case TaskType::JS:
221 return PostTaskToTaskRunner(jsRunner_, std::move(wrappedTask), delayTime);
222 case TaskType::BACKGROUND:
223 // Ignore delay time
224 return BackgroundTaskExecutor::GetInstance().PostTask(std::move(wrappedTask));
225 default:
226 return false;
227 }
228 }
229
WrapTaskWithTraceId(Task && task,int32_t id) const230 TaskExecutor::Task TaskExecutorImpl::WrapTaskWithTraceId(Task&& task, int32_t id) const
231 {
232 return WrapTaskWithContainer(std::move(task), id);
233 }
234
WillRunOnCurrentThread(TaskType type) const235 bool TaskExecutorImpl::WillRunOnCurrentThread(TaskType type) const
236 {
237 switch (type) {
238 case TaskType::PLATFORM:
239 return platformRunner_ ? (taskWrapper_ != nullptr ? taskWrapper_->WillRunOnCurrentThread()
240 : platformRunner_->RunsTasksOnCurrentThread())
241 : false;
242 case TaskType::UI:
243 return uiRunner_ ? (taskWrapper_ != nullptr ? taskWrapper_->WillRunOnCurrentThread()
244 : uiRunner_->RunsTasksOnCurrentThread())
245 : false;
246 case TaskType::IO:
247 return ioRunner_ ? ioRunner_->RunsTasksOnCurrentThread() : false;
248 case TaskType::GPU:
249 return gpuRunner_ ? gpuRunner_->RunsTasksOnCurrentThread() : false;
250 case TaskType::JS:
251 return jsRunner_ ? (taskWrapper_ != nullptr ? taskWrapper_->WillRunOnCurrentThread()
252 : jsRunner_->RunsTasksOnCurrentThread())
253 : false;
254 case TaskType::BACKGROUND:
255 // Always return false for background tasks.
256 return false;
257 default:
258 return false;
259 }
260 }
261
262 thread_local TaskExecutor::TaskType TaskExecutorImpl::localTaskType = TaskExecutor::TaskType::UNKNOWN;
263
264 #ifdef ACE_DEBUG
TaskTypeToString(TaskExecutor::TaskType type)265 static const char* TaskTypeToString(TaskExecutor::TaskType type)
266 {
267 switch (type) {
268 case TaskExecutor::TaskType::PLATFORM:
269 return "PLATFORM";
270 case TaskExecutor::TaskType::UI:
271 return "UI";
272 case TaskExecutor::TaskType::IO:
273 return "IO";
274 case TaskExecutor::TaskType::GPU:
275 return "GPU";
276 case TaskExecutor::TaskType::JS:
277 return "JS";
278 case TaskExecutor::TaskType::BACKGROUND:
279 return "BACKGROUND";
280 case TaskExecutor::TaskType::UNKNOWN:
281 default:
282 return "UNKNOWN";
283 }
284 }
285
OnPreSyncTask(TaskType type) const286 bool TaskExecutorImpl::OnPreSyncTask(TaskType type) const
287 {
288 std::lock_guard<std::mutex> lock(tableMutex_);
289 auto it = taskTypeTable_.find(type);
290 // when task type not filled, just skip
291 if (it == taskTypeTable_.end()) {
292 return true;
293 }
294
295 auto itSync = syncTaskTable_.find(it->second.threadId);
296 while (itSync != syncTaskTable_.end()) {
297 if (itSync->second == std::this_thread::get_id()) {
298 DumpDeadSyncTask(localTaskType, type);
299 ACE_DCHECK(itSync->second != std::this_thread::get_id() && "DEAD LOCK HAPPENED !!!");
300 return false;
301 }
302
303 itSync = syncTaskTable_.find(itSync->second);
304 }
305
306 syncTaskTable_.emplace(std::this_thread::get_id(), it->second.threadId);
307 return true;
308 }
309
OnPostSyncTask() const310 void TaskExecutorImpl::OnPostSyncTask() const
311 {
312 std::lock_guard<std::mutex> lock(tableMutex_);
313 syncTaskTable_.erase(std::this_thread::get_id());
314 }
315
DumpDeadSyncTask(TaskType from,TaskType to) const316 void TaskExecutorImpl::DumpDeadSyncTask(TaskType from, TaskType to) const
317 {
318 auto itFrom = taskTypeTable_.find(from);
319 auto itTo = taskTypeTable_.find(to);
320
321 ACE_DCHECK(itFrom != taskTypeTable_.end());
322 ACE_DCHECK(itTo != taskTypeTable_.end());
323
324 LOGE("DEAD LOCK HAPPEN: %{public}s(%{public}d, %{public}s) -> %{public}s(%{public}d, %{public}s)",
325 TaskTypeToString(from), itFrom->second.tid, itFrom->second.threadName.c_str(), TaskTypeToString(to),
326 itTo->second.tid, itTo->second.threadName.c_str());
327 }
328 #endif
329
FillTaskTypeTable(TaskType type)330 void TaskExecutorImpl::FillTaskTypeTable(TaskType type)
331 {
332 constexpr size_t MAX_THREAD_NAME_SIZE = 32;
333 char threadNameBuf[MAX_THREAD_NAME_SIZE] = { 0 };
334 const char* threadName = threadNameBuf;
335 #if !defined(PREVIEW) and !defined(IOS_PLATFORM)
336 #ifdef OHOS_STANDARD_SYSTEM
337 if (prctl(PR_GET_NAME, threadNameBuf) < 0) {
338 threadName = "unknown";
339 }
340 #else
341 if (pthread_getname_np(pthread_self(), threadNameBuf, sizeof(threadNameBuf)) != 0) {
342 threadName = "unknown";
343 }
344 #endif
345 #endif
346
347 localTaskType = type;
348 ThreadInfo info = {
349 .threadId = std::this_thread::get_id(),
350 #if !defined(PREVIEW) and !defined(IOS_PLATFORM)
351 .tid = gettid(),
352 #endif
353 .threadName = threadName,
354 };
355
356 std::lock_guard<std::mutex> lock(tableMutex_);
357 taskTypeTable_.emplace(type, info);
358 }
359
FillTaskTypeTable(const WeakPtr<TaskExecutorImpl> & weak,TaskType type)360 void TaskExecutorImpl::FillTaskTypeTable(const WeakPtr<TaskExecutorImpl>& weak, TaskType type)
361 {
362 auto taskExecutor = weak.Upgrade();
363 if (taskExecutor) {
364 taskExecutor->FillTaskTypeTable(type);
365 }
366 }
367 } // namespace OHOS::Ace
368