1 /*
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "native_async_work.h"
17
18 #ifdef ENABLE_HITRACE
19 #include "hitrace_meter.h"
20 #include "parameter.h"
21 #endif
22 #ifdef ENABLE_CONTAINER_SCOPE
23 #include "core/common/container_scope.h"
24 #endif
25
26 #include <cinttypes>
27 #include "native_api_internal.h"
28
29 #ifdef ENABLE_CONTAINER_SCOPE
30 using OHOS::Ace::ContainerScope;
31 #endif
32
33 #ifdef ENABLE_HITRACE
34 std::atomic<bool> g_napiTraceIdEnabled(false);
35 std::atomic<bool> g_ParamUpdated(false);
36 constexpr size_t TRACE_BUFFER_SIZE = 120;
37 constexpr size_t TRACEID_PARAM_SIZE = 10;
38 const std::string TRACE_POINT_QUEUE = "napi::NativeAsyncWork::Queue";
39 const std::string TRACE_POINT_QUEUE_WITH_QOS = "napi::NativeAsyncWork::QueueWithQos";
40 const std::string TRACE_POINT_QUEUE_ORDERED = "napi::NativeAsyncWork::QueueOrdered";
41 const std::string TRACE_POINT_ASYNCWORKCALLBACK = "napi::NativeAsyncWork::AsyncWorkCallback";
42 using namespace OHOS::HiviewDFX;
43 #endif
44
NativeAsyncWork(NativeEngine * engine,NativeAsyncExecuteCallback execute,NativeAsyncCompleteCallback complete,const std::string & asyncResourceName,void * data)45 NativeAsyncWork::NativeAsyncWork(NativeEngine* engine,
46 NativeAsyncExecuteCallback execute,
47 NativeAsyncCompleteCallback complete,
48 const std::string &asyncResourceName,
49 void* data)
50 : work_({ 0 }), engine_(engine), engineId_(engine->GetId()), execute_(execute), complete_(complete), data_(data)
51 {
52 work_.data = this;
53 (void)asyncResourceName;
54 #ifdef ENABLE_HITRACE
55 if (!g_ParamUpdated.load()) {
56 char napiTraceIdEnabled[TRACEID_PARAM_SIZE] = {0};
57 int ret = GetParameter("persist.hiviewdfx.napitraceid.enabled", "false",
58 napiTraceIdEnabled, sizeof(napiTraceIdEnabled));
59 if (ret > 0 && strcmp(napiTraceIdEnabled, "true") == 0) {
60 g_napiTraceIdEnabled.store(true);
61 }
62 g_ParamUpdated.store(true);
63 }
64 bool createdTraceId = false;
65
66 HiTraceId thisId = HiTraceChain::GetId();
67 if (g_napiTraceIdEnabled.load() && (!thisId.IsValid())) {
68 thisId = HiTraceChain::Begin("New NativeAsyncWork", 0);
69 createdTraceId = true;
70 }
71 if (thisId.IsValid()) {
72 taskTraceId_ = HiTraceChain::CreateSpan();
73 }
74 char traceStr[TRACE_BUFFER_SIZE] = {0};
75 if (sprintf_s(traceStr, sizeof(traceStr),
76 "name:%s#%" PRIuPTR ", traceid:0x%x",
77 asyncResourceName.c_str(),
78 reinterpret_cast<uintptr_t>(this),
79 taskTraceId_.GetChainId()) < 0) {
80 HILOG_ERROR("Get traceStr fail");
81 }
82 traceDescription_ = traceStr;
83 if (createdTraceId) {
84 OHOS::HiviewDFX::HiTraceChain::ClearId();
85 }
86 #endif
87 #ifdef ENABLE_CONTAINER_SCOPE
88 if (engine->IsContainerScopeEnabled()) {
89 containerScopeId_ = ContainerScope::CurrentId();
90 }
91 #endif
92 }
93
94 NativeAsyncWork::~NativeAsyncWork() = default;
95
Queue(NativeEngine * engine)96 bool NativeAsyncWork::Queue(NativeEngine* engine)
97 {
98 VALID_ENGINE_CHECK(engine, engine_, engineId_);
99
100 uv_loop_t* loop = nullptr;
101 if (engine_->IsMainEnvContext()) {
102 loop = engine_->GetUVLoop();
103 } else {
104 loop = engine_->GetParent()->GetUVLoop();
105 }
106
107 if (loop == nullptr) {
108 HILOG_ERROR("Get loop failed");
109 return false;
110 }
111 engine_->IncreaseWaitingRequestCounter();
112 #ifdef ENABLE_HITRACE
113 StartTrace(HITRACE_TAG_ACE, "Native async work queue, " + this->GetTraceDescription());
114 HiTraceId taskId = taskTraceId_;
115 HiTraceChain::Tracepoint(HITRACE_TP_CS, taskId, "%s", TRACE_POINT_QUEUE.c_str());
116 #endif
117 int status = uv_queue_work(loop, &work_, AsyncWorkCallback, AsyncAfterWorkCallback);
118 #ifdef ENABLE_HITRACE
119 HiTraceChain::Tracepoint(HITRACE_TP_CR, taskId, "%s", TRACE_POINT_QUEUE.c_str());
120 FinishTrace(HITRACE_TAG_ACE);
121 #endif
122 if (status != 0) {
123 HILOG_ERROR("uv_queue_work failed");
124 engine_->DecreaseWaitingRequestCounter();
125 return false;
126 }
127 HILOG_DEBUG("uv_queue_work succeed");
128 return true;
129 }
130
QueueWithQos(NativeEngine * engine,napi_qos_t qos)131 bool NativeAsyncWork::QueueWithQos(NativeEngine* engine, napi_qos_t qos)
132 {
133 VALID_ENGINE_CHECK(engine, engine_, engineId_);
134
135 uv_loop_t* loop = nullptr;
136 if (engine_->IsMainEnvContext()) {
137 loop = engine_->GetUVLoop();
138 } else {
139 loop = engine_->GetParent()->GetUVLoop();
140 }
141
142 if (loop == nullptr) {
143 HILOG_ERROR("Get loop failed");
144 return false;
145 }
146 engine_->IncreaseWaitingRequestCounter();
147 #ifdef ENABLE_HITRACE
148 StartTrace(HITRACE_TAG_ACE, "Native async work queueWithQos, " + this->GetTraceDescription());
149 HiTraceId taskId = taskTraceId_;
150 HiTraceChain::Tracepoint(HITRACE_TP_CS, taskId, "%s", TRACE_POINT_QUEUE_WITH_QOS.c_str());
151 #endif
152 int status = uv_queue_work_with_qos(loop, &work_, AsyncWorkCallback, AsyncAfterWorkCallback, uv_qos_t(qos));
153 #ifdef ENABLE_HITRACE
154 HiTraceChain::Tracepoint(HITRACE_TP_CR, taskId, "%s", TRACE_POINT_QUEUE_WITH_QOS.c_str());
155 FinishTrace(HITRACE_TAG_ACE);
156 #endif
157 if (status != 0) {
158 HILOG_ERROR("uv_queue_work_with_qos failed");
159 engine_->DecreaseWaitingRequestCounter();
160 return false;
161 }
162 HILOG_DEBUG("uv_queue_work_with_qos succeed");
163 return true;
164 }
165
QueueOrdered(NativeEngine * engine,napi_qos_t qos,uintptr_t queueId)166 bool NativeAsyncWork::QueueOrdered(NativeEngine* engine, napi_qos_t qos, uintptr_t queueId)
167 {
168 VALID_ENGINE_CHECK(engine, engine_, engineId_);
169
170 uv_loop_t* loop = nullptr;
171 if (engine_->IsMainEnvContext()) {
172 loop = engine_->GetUVLoop();
173 } else {
174 loop = engine_->GetParent()->GetUVLoop();
175 }
176
177 if (loop == nullptr) {
178 HILOG_ERROR("Get loop failed");
179 return false;
180 }
181 engine_->IncreaseWaitingRequestCounter();
182 #ifdef ENABLE_HITRACE
183 StartTrace(HITRACE_TAG_ACE, "Native async work queueOrdered, " + this->GetTraceDescription());
184 HiTraceId taskId = taskTraceId_;
185 HiTraceChain::Tracepoint(HITRACE_TP_CS, taskId, "%s", TRACE_POINT_QUEUE_ORDERED.c_str());
186 #endif
187 int status = uv_queue_work_ordered(loop, &work_, AsyncWorkCallback, AsyncAfterWorkCallback,
188 uv_qos_t(qos), queueId);
189 #ifdef ENABLE_HITRACE
190 HiTraceChain::Tracepoint(HITRACE_TP_CR, taskId, "%s", TRACE_POINT_QUEUE_ORDERED.c_str());
191 FinishTrace(HITRACE_TAG_ACE);
192 #endif
193 if (status != 0) {
194 HILOG_ERROR("uv_queue_work_ordered failed");
195 engine_->DecreaseWaitingRequestCounter();
196 return false;
197 }
198 HILOG_DEBUG("uv_queue_work_ordered succeed");
199 return true;
200 }
201
Cancel(NativeEngine * engine)202 bool NativeAsyncWork::Cancel(NativeEngine* engine)
203 {
204 VALID_ENGINE_CHECK(engine, engine_, engineId_);
205
206 int status = uv_cancel((uv_req_t*)&work_);
207 if (status != 0) {
208 HILOG_ERROR("uv_cancel failed");
209 return false;
210 }
211 return true;
212 }
213
AsyncWorkCallback(uv_work_t * req)214 void NativeAsyncWork::AsyncWorkCallback(uv_work_t* req)
215 {
216 if (req == nullptr) {
217 HILOG_ERROR("req is nullptr");
218 return;
219 }
220
221 auto that = reinterpret_cast<NativeAsyncWork*>(req->data);
222 HILOG_DEBUG("NativeAsyncWork::AsyncWorkCallback start to execute.");
223
224 #ifdef ENABLE_HITRACE
225 StartTrace(HITRACE_TAG_ACE, "Native async work execute callback, " + that->GetTraceDescription());
226 if (that->taskTraceId_.IsValid()) {
227 HiTraceId currentId = HiTraceChain::SaveAndSet(that->taskTraceId_);
228 HiTraceChain::Tracepoint(HITRACE_TP_SR, that->taskTraceId_, "%s", TRACE_POINT_ASYNCWORKCALLBACK.c_str());
229 that->execute_(that->engine_, that->data_);
230 FinishTrace(HITRACE_TAG_ACE);
231 HiTraceChain::Tracepoint(HITRACE_TP_SS, that->taskTraceId_, "%s", TRACE_POINT_ASYNCWORKCALLBACK.c_str());
232 HiTraceChain::Restore(currentId);
233 return;
234 }
235 #endif
236 that->execute_(that->engine_, that->data_);
237 #ifdef ENABLE_HITRACE
238 FinishTrace(HITRACE_TAG_ACE);
239 #endif
240 }
241
AsyncAfterWorkCallback(uv_work_t * req,int status)242 void NativeAsyncWork::AsyncAfterWorkCallback(uv_work_t* req, int status)
243 {
244 if (req == nullptr) {
245 HILOG_ERROR("req is nullptr");
246 return;
247 }
248
249 auto that = reinterpret_cast<NativeAsyncWork*>(req->data);
250 auto engine = that->engine_;
251 engine->DecreaseWaitingRequestCounter();
252 auto vm = engine->GetEcmaVm();
253 panda::LocalScope scope(vm);
254 napi_status nstatus = napi_generic_failure;
255 switch (status) {
256 case 0:
257 nstatus = napi_ok;
258 break;
259 case (int)UV_EINVAL:
260 nstatus = napi_invalid_arg;
261 break;
262 case (int)UV_ECANCELED:
263 nstatus = napi_cancelled;
264 break;
265 default:
266 nstatus = napi_generic_failure;
267 }
268 #ifdef ENABLE_CONTAINER_SCOPE
269 ContainerScope containerScope(that->containerScopeId_, engine->IsContainerScopeEnabled());
270 #endif
271
272 TryCatch tryCatch(reinterpret_cast<napi_env>(engine));
273 HILOG_DEBUG("NativeAsyncWork::AsyncAfterWorkCallback start to execute.");
274 #ifdef ENABLE_HITRACE
275 StartTrace(HITRACE_TAG_ACE, "Native async work complete callback, " + that->GetTraceDescription());
276 bool isValidTraceId = that->taskTraceId_.IsValid();
277 if (isValidTraceId) {
278 OHOS::HiviewDFX::HiTraceChain::SaveAndSet(that->taskTraceId_);
279 }
280 #endif
281
282 // Don't use that after complete
283 that->complete_(engine, nstatus, that->data_);
284 if (tryCatch.HasCaught()) {
285 engine->HandleUncaughtException();
286 }
287
288 #ifdef ENABLE_HITRACE
289 FinishTrace(HITRACE_TAG_ACE);
290 if (isValidTraceId) {
291 OHOS::HiviewDFX::HiTraceChain::ClearId();
292 }
293 #endif
294 }
295
GetTraceDescription()296 std::string NativeAsyncWork::GetTraceDescription()
297 {
298 return traceDescription_;
299 }
300