1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ecmascript/compiler/post_schedule.h"
17
18 #include <ecmascript/stubs/runtime_stubs.h>
19
20 #include "ecmascript/compiler/circuit_builder-inl.h"
21 #include "ecmascript/js_thread.h"
22
23 namespace panda::ecmascript::kungfu {
Run(ControlFlowGraph & cfg)24 void PostSchedule::Run(ControlFlowGraph &cfg)
25 {
26 GenerateExtraBB(cfg);
27
28 if (IsLogEnabled()) {
29 LOG_COMPILER(INFO) << "";
30 LOG_COMPILER(INFO) << "\033[34m"
31 << "===================="
32 << " After post schedule "
33 << "[" << GetMethodName() << "]"
34 << "===================="
35 << "\033[0m";
36 PrintGraph("Build extra basic block for scheduled gates", cfg);
37 LOG_COMPILER(INFO) << "\033[34m" << "========================= End ==========================" << "\033[0m";
38 }
39 }
40
GenerateExtraBB(ControlFlowGraph & cfg)41 void PostSchedule::GenerateExtraBB(ControlFlowGraph &cfg)
42 {
43 size_t bbNum = cfg.size();
44 size_t bbIdx = 0;
45 while (bbIdx < bbNum) {
46 const std::vector<GateRef>& bb = cfg.at(bbIdx);
47 size_t instNum = bb.size();
48 size_t instIdx = 0;
49 while (instIdx < instNum) {
50 const std::vector<GateRef>& currentBB = cfg.at(bbIdx);
51 GateRef current = currentBB[instIdx];
52 OpCode opcode = acc_.GetOpCode(current);
53 bool needRetraverse = false;
54 switch (opcode) {
55 case OpCode::HEAP_ALLOC: {
56 needRetraverse = VisitHeapAlloc(current, cfg, bbIdx, instIdx);
57 break;
58 }
59 case OpCode::STORE: {
60 needRetraverse = VisitStore(current, cfg, bbIdx, instIdx);
61 break;
62 }
63 default: {
64 break;
65 }
66 }
67 const std::vector<GateRef>& refreshedBB = cfg.at(bbIdx);
68 instNum = refreshedBB.size();
69 instIdx = needRetraverse ? 0 : (instIdx + 1);
70 }
71 bbNum = cfg.size();
72 bbIdx++;
73 }
74 }
75
VisitHeapAlloc(GateRef gate,ControlFlowGraph & cfg,size_t bbIdx,size_t instIdx)76 bool PostSchedule::VisitHeapAlloc(GateRef gate, ControlFlowGraph &cfg, size_t bbIdx, size_t instIdx)
77 {
78 int64_t flag = static_cast<int64_t>(acc_.TryGetValue(gate));
79 ASSERT(flag == RegionSpaceFlag::IN_YOUNG_SPACE ||
80 flag == RegionSpaceFlag::IN_SHARED_OLD_SPACE ||
81 flag == RegionSpaceFlag::IN_SHARED_NON_MOVABLE);
82 std::vector<GateRef> currentBBGates;
83 std::vector<GateRef> successBBGates;
84 std::vector<GateRef> failBBGates;
85 std::vector<GateRef> endBBGates;
86 LoweringHeapAllocAndPrepareScheduleGate(gate, currentBBGates, successBBGates, failBBGates, endBBGates, flag);
87 #ifdef ARK_ASAN_ON
88 ReplaceGateDirectly(currentBBGates, cfg, bbIdx, instIdx);
89 return false;
90 #else
91 ReplaceBBState(cfg, bbIdx, currentBBGates, endBBGates);
92 ScheduleEndBB(endBBGates, cfg, bbIdx, instIdx);
93 ScheduleNewBB(successBBGates, cfg, bbIdx);
94 ScheduleNewBB(failBBGates, cfg, bbIdx);
95 ScheduleCurrentBB(currentBBGates, cfg, bbIdx, instIdx);
96 return true;
97 #endif
98 }
99
ReplaceGateDirectly(std::vector<GateRef> & gates,ControlFlowGraph & cfg,size_t bbIdx,size_t instIdx)100 void PostSchedule::ReplaceGateDirectly(std::vector<GateRef> &gates, ControlFlowGraph &cfg, size_t bbIdx, size_t instIdx)
101 {
102 std::vector<GateRef>& bb = cfg.at(bbIdx);
103 bb.insert(bb.begin() + instIdx, gates.begin(), gates.end());
104 bb.erase(bb.begin() + instIdx + gates.size());
105 }
106
ScheduleEndBB(std::vector<GateRef> & gates,ControlFlowGraph & cfg,size_t bbIdx,size_t instIdx)107 void PostSchedule::ScheduleEndBB(std::vector<GateRef> &gates, ControlFlowGraph &cfg, size_t bbIdx, size_t instIdx)
108 {
109 std::vector<GateRef>& bb = cfg.at(bbIdx);
110 if (instIdx > 0) {
111 gates.insert(gates.begin() + 1, bb.begin(), bb.begin() + instIdx); // 1: after state gate
112 }
113 cfg.insert(cfg.begin() + bbIdx + 1, std::move(gates)); // 1: after current bb
114 }
115
ScheduleNewBB(std::vector<GateRef> & gates,ControlFlowGraph & cfg,size_t bbIdx)116 void PostSchedule::ScheduleNewBB(std::vector<GateRef> &gates, ControlFlowGraph &cfg, size_t bbIdx)
117 {
118 if (!gates.empty()) {
119 cfg.insert(cfg.begin() + bbIdx + 1, std::move(gates));
120 }
121 }
122
ScheduleCurrentBB(const std::vector<GateRef> & gates,ControlFlowGraph & cfg,size_t bbIdx,size_t instIdx)123 void PostSchedule::ScheduleCurrentBB(const std::vector<GateRef> &gates, ControlFlowGraph &cfg, size_t bbIdx,
124 size_t instIdx)
125 {
126 std::vector<GateRef>& bb = cfg.at(bbIdx);
127 if (instIdx == 0) {
128 bb.erase(bb.begin());
129 } else {
130 bb.erase(bb.begin(), bb.begin() + instIdx + 1); // 1: include current gate
131 }
132 bb.insert(bb.begin(), gates.begin(), gates.end());
133 }
134
PrepareToScheduleNewGate(GateRef gate,std::vector<GateRef> & gates)135 void PostSchedule::PrepareToScheduleNewGate(GateRef gate, std::vector<GateRef> &gates)
136 {
137 gates.emplace_back(gate);
138 }
139
ReplaceBBState(ControlFlowGraph & cfg,size_t bbIdx,std::vector<GateRef> & currentBBGates,std::vector<GateRef> & endBBGates)140 void PostSchedule::ReplaceBBState(ControlFlowGraph &cfg, size_t bbIdx, std::vector<GateRef> ¤tBBGates,
141 std::vector<GateRef> &endBBGates)
142 {
143 GateRef floatBranch = currentBBGates[0];
144 ASSERT(acc_.GetOpCode(floatBranch) == OpCode::IF_BRANCH);
145 GateRef endBBState = endBBGates[0];
146 ASSERT(acc_.GetOpCode(endBBState) == OpCode::MERGE);
147 std::vector<GateRef>& bb = cfg.at(bbIdx);
148 GateRef currentBBState = bb[0];
149 ASSERT(acc_.IsState(currentBBState));
150
151 OpCode opcode = acc_.GetOpCode(currentBBState);
152 switch (opcode) {
153 case OpCode::DEOPT_CHECK:
154 case OpCode::RETURN:
155 case OpCode::RETURN_VOID:
156 case OpCode::IF_BRANCH:
157 case OpCode::SWITCH_BRANCH: {
158 GateRef stateIn = acc_.GetState(currentBBState, 0);
159 acc_.ReplaceStateIn(floatBranch, stateIn);
160 acc_.ReplaceStateIn(currentBBState, endBBState);
161 break;
162 }
163 case OpCode::STATE_ENTRY:
164 case OpCode::ORDINARY_BLOCK:
165 case OpCode::IF_TRUE:
166 case OpCode::IF_FALSE:
167 case OpCode::SWITCH_CASE:
168 case OpCode::DEFAULT_CASE:
169 case OpCode::MERGE:
170 case OpCode::LOOP_BEGIN:
171 case OpCode::LOOP_BACK: {
172 acc_.ReplaceControlGate(currentBBState, endBBState);
173 acc_.ReplaceStateIn(floatBranch, currentBBState);
174 currentBBGates.insert(currentBBGates.begin(), currentBBState);
175 bb[0] = builder_.Nop();
176 break;
177 }
178 default: {
179 LOG_ECMA(FATAL) << "this branch is unreachable with opcode:" << opcode;
180 UNREACHABLE();
181 }
182 }
183 }
184
LoweringHeapAllocAndPrepareScheduleGate(GateRef gate,std::vector<GateRef> & currentBBGates,std::vector<GateRef> & successBBGates,std::vector<GateRef> & failBBGates,std::vector<GateRef> & endBBGates,int64_t flag)185 void PostSchedule::LoweringHeapAllocAndPrepareScheduleGate(GateRef gate,
186 std::vector<GateRef> ¤tBBGates,
187 std::vector<GateRef> &successBBGates,
188 std::vector<GateRef> &failBBGates,
189 std::vector<GateRef> &endBBGates,
190 [[maybe_unused]] int64_t flag)
191 {
192 #ifdef ARK_ASAN_ON
193 LoweringHeapAllocate(gate, currentBBGates, successBBGates, failBBGates, endBBGates, flag);
194 #else
195 Environment env(gate, circuit_, &builder_);
196 Label exit(&builder_);
197 GateRef glue = acc_.GetValueIn(gate, 0);
198 GateRef size = acc_.GetValueIn(gate, 1);
199 GateRef hole = circuit_->GetConstantGateWithoutCache(
200 MachineType::I64, JSTaggedValue::VALUE_HOLE, GateType::TaggedValue());
201 DEFVALUE(result, (&builder_), VariableType::JS_ANY(), hole);
202 Label success(&builder_);
203 Label callRuntime(&builder_);
204 size_t topOffset;
205 size_t endOffset;
206 if (flag == RegionSpaceFlag::IN_SHARED_OLD_SPACE) {
207 topOffset = JSThread::GlueData::GetSOldSpaceAllocationTopAddressOffset(false);
208 endOffset = JSThread::GlueData::GetSOldSpaceAllocationEndAddressOffset(false);
209 } else if (flag == RegionSpaceFlag::IN_SHARED_NON_MOVABLE) {
210 topOffset = JSThread::GlueData::GetSNonMovableSpaceAllocationTopAddressOffset(false);
211 endOffset = JSThread::GlueData::GetSNonMovableSpaceAllocationEndAddressOffset(false);
212 } else {
213 ASSERT(flag == RegionSpaceFlag::IN_YOUNG_SPACE);
214 topOffset = JSThread::GlueData::GetNewSpaceAllocationTopAddressOffset(false);
215 endOffset = JSThread::GlueData::GetNewSpaceAllocationEndAddressOffset(false);
216 }
217 GateRef topAddrOffset = circuit_->GetConstantGateWithoutCache(MachineType::I64, topOffset, GateType::NJSValue());
218 GateRef endAddrOffset = circuit_->GetConstantGateWithoutCache(MachineType::I64, endOffset, GateType::NJSValue());
219 GateRef topAddrAddr = builder_.PtrAdd(glue, topAddrOffset);
220 GateRef endAddrAddr = builder_.PtrAdd(glue, endAddrOffset);
221 GateRef topAddress = builder_.Load(VariableType::NATIVE_POINTER(), topAddrAddr);
222 GateRef endAddress = builder_.Load(VariableType::NATIVE_POINTER(), endAddrAddr);
223 GateRef addrOffset = circuit_->GetConstantGateWithoutCache(MachineType::I64, 0, GateType::NJSValue());
224 GateRef rawTopAddr = builder_.PtrAdd(topAddress, addrOffset);
225 GateRef rawEndAddr = builder_.PtrAdd(endAddress, addrOffset);
226 GateRef top = builder_.Load(VariableType::JS_POINTER(), rawTopAddr);
227 GateRef end = builder_.Load(VariableType::JS_POINTER(), rawEndAddr);
228
229 GateRef newTop = builder_.PtrAdd(top, size);
230 GateRef condition = builder_.Int64GreaterThan(newTop, end);
231 Label *currentLabel = env.GetCurrentLabel();
232 builder_.Branch(condition, &callRuntime, &success);
233 {
234 GateRef ifBranch = currentLabel->GetControl();
235 PrepareToScheduleNewGate(ifBranch, currentBBGates);
236 PrepareToScheduleNewGate(condition, currentBBGates);
237 PrepareToScheduleNewGate(newTop, currentBBGates);
238 PrepareToScheduleNewGate(end, currentBBGates);
239 PrepareToScheduleNewGate(top, currentBBGates);
240 PrepareToScheduleNewGate(rawEndAddr, currentBBGates);
241 PrepareToScheduleNewGate(rawTopAddr, currentBBGates);
242 PrepareToScheduleNewGate(topAddress, currentBBGates);
243 PrepareToScheduleNewGate(endAddress, currentBBGates);
244 PrepareToScheduleNewGate(addrOffset, currentBBGates);
245 PrepareToScheduleNewGate(topAddrAddr, currentBBGates);
246 PrepareToScheduleNewGate(endAddrAddr, currentBBGates);
247 PrepareToScheduleNewGate(topAddrOffset, currentBBGates);
248 PrepareToScheduleNewGate(endAddrOffset, currentBBGates);
249 PrepareToScheduleNewGate(hole, currentBBGates);
250 }
251 builder_.Bind(&success);
252 {
253 GateRef ifFalse = builder_.GetState();
254 GateRef addr = builder_.PtrAdd(topAddress, addrOffset);
255 builder_.StoreWithoutBarrier(VariableType::NATIVE_POINTER(), addr, newTop);
256 GateRef store = builder_.GetDepend();
257 result = top;
258 builder_.Jump(&exit);
259 {
260 GateRef ordinaryBlock = success.GetControl();
261 PrepareToScheduleNewGate(ordinaryBlock, successBBGates);
262 PrepareToScheduleNewGate(store, successBBGates);
263 PrepareToScheduleNewGate(addr, successBBGates);
264 PrepareToScheduleNewGate(ifFalse, successBBGates);
265 }
266 }
267 builder_.Bind(&callRuntime);
268 {
269 GateRef ifTrue = builder_.GetState();
270 GateRef taggedIntMask = circuit_->GetConstantGateWithoutCache(
271 MachineType::I64, JSTaggedValue::TAG_INT, GateType::NJSValue());
272 GateRef taggedSize = builder_.Int64Or(size, taggedIntMask);
273 GateRef target = Circuit::NullGate();
274 if (flag == RegionSpaceFlag::IN_SHARED_OLD_SPACE) {
275 target = circuit_->GetConstantGateWithoutCache(MachineType::ARCH, RTSTUB_ID(AllocateInSOld),
276 GateType::NJSValue());
277 } else if (flag == RegionSpaceFlag::IN_SHARED_NON_MOVABLE) {
278 target = circuit_->GetConstantGateWithoutCache(MachineType::ARCH, RTSTUB_ID(AllocateInSNonMovable),
279 GateType::NJSValue());
280 } else {
281 ASSERT(flag == RegionSpaceFlag::IN_YOUNG_SPACE);
282 target = circuit_->GetConstantGateWithoutCache(MachineType::ARCH, RTSTUB_ID(AllocateInYoung),
283 GateType::NJSValue());
284 }
285 const CallSignature *cs = RuntimeStubCSigns::Get(RTSTUB_ID(CallRuntime));
286 ASSERT(cs->IsRuntimeStub());
287 GateRef reseverdFrameArgs = circuit_->GetConstantGateWithoutCache(MachineType::I64, 0, GateType::NJSValue());
288 GateRef reseverdPc = circuit_->GetConstantGateWithoutCache(MachineType::I64, 0, GateType::NJSValue());
289 GateRef slowResult = builder_.Call(cs, glue, target, builder_.GetDepend(),
290 { taggedSize, reseverdFrameArgs, reseverdPc }, Circuit::NullGate(),
291 "Heap alloc");
292 result = slowResult;
293 builder_.Jump(&exit);
294 {
295 GateRef ordinaryBlock = callRuntime.GetControl();
296 PrepareToScheduleNewGate(ordinaryBlock, failBBGates);
297 PrepareToScheduleNewGate(slowResult, failBBGates);
298 PrepareToScheduleNewGate(target, failBBGates);
299 PrepareToScheduleNewGate(taggedSize, failBBGates);
300 PrepareToScheduleNewGate(reseverdFrameArgs, failBBGates);
301 PrepareToScheduleNewGate(reseverdPc, failBBGates);
302 PrepareToScheduleNewGate(taggedIntMask, failBBGates);
303 PrepareToScheduleNewGate(ifTrue, failBBGates);
304 }
305 }
306 builder_.Bind(&exit);
307 {
308 GateRef merge = builder_.GetState();
309 GateRef phi = *result;
310 PrepareToScheduleNewGate(merge, endBBGates);
311 PrepareToScheduleNewGate(phi, endBBGates);
312 }
313 acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), *result);
314 #endif
315 }
316
LoweringHeapAllocate(GateRef gate,std::vector<GateRef> & currentBBGates,std::vector<GateRef> & successBBGates,std::vector<GateRef> & failBBGates,std::vector<GateRef> & endBBGates,int64_t flag)317 void PostSchedule::LoweringHeapAllocate(GateRef gate,
318 std::vector<GateRef> ¤tBBGates,
319 std::vector<GateRef> &successBBGates,
320 std::vector<GateRef> &failBBGates,
321 std::vector<GateRef> &endBBGates,
322 int64_t flag)
323 {
324 Environment env(gate, circuit_, &builder_);
325 (void)successBBGates;
326 (void)failBBGates;
327 (void)endBBGates;
328 GateRef glue = acc_.GetValueIn(gate, 0);
329 GateRef size = acc_.GetValueIn(gate, 1);
330 GateRef taggedIntMask = circuit_->GetConstantGateWithoutCache(
331 MachineType::I64, JSTaggedValue::TAG_INT, GateType::NJSValue());
332 GateRef taggedSize = builder_.Int64Or(size, taggedIntMask);
333 auto id = RTSTUB_ID(AllocateInYoung);
334 if (flag == RegionSpaceFlag::IN_SHARED_OLD_SPACE) {
335 id = RTSTUB_ID(AllocateInSOld);
336 } else if (flag == RegionSpaceFlag::IN_SHARED_NON_MOVABLE) {
337 id = RTSTUB_ID(AllocateInSNonMovable);
338 } else {
339 ASSERT(flag == RegionSpaceFlag::IN_YOUNG_SPACE);
340 }
341 GateRef target = circuit_->GetConstantGateWithoutCache(MachineType::ARCH, id, GateType::NJSValue());
342 const CallSignature *cs = RuntimeStubCSigns::Get(RTSTUB_ID(CallRuntime));
343 ASSERT(cs->IsRuntimeStub());
344 GateRef reseverdFrameArgs = circuit_->GetConstantGateWithoutCache(MachineType::I64, 0, GateType::NJSValue());
345 GateRef reseverdPc = circuit_->GetConstantGateWithoutCache(MachineType::I64, 0, GateType::NJSValue());
346 GateRef result = builder_.Call(cs, glue, target, builder_.GetDepend(),
347 { taggedSize, reseverdFrameArgs, reseverdPc }, Circuit::NullGate(), "Heap alloc");
348 acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), result);
349
350 // Must keep the order of value-in/depend-in
351 PrepareToScheduleNewGate(result, currentBBGates);
352 PrepareToScheduleNewGate(target, currentBBGates);
353 PrepareToScheduleNewGate(taggedSize, currentBBGates);
354 PrepareToScheduleNewGate(reseverdFrameArgs, currentBBGates);
355 PrepareToScheduleNewGate(reseverdPc, currentBBGates);
356 PrepareToScheduleNewGate(taggedIntMask, currentBBGates);
357 return;
358 }
359
VisitStore(GateRef gate,ControlFlowGraph & cfg,size_t bbIdx,size_t instIdx)360 bool PostSchedule::VisitStore(GateRef gate, ControlFlowGraph &cfg, size_t bbIdx, size_t instIdx)
361 {
362 std::vector<GateRef> currentBBGates;
363 std::vector<GateRef> barrierBBGates;
364 std::vector<GateRef> endBBGates;
365 MemoryAttribute::Barrier kind = GetWriteBarrierKind(gate);
366 switch (kind) {
367 case MemoryAttribute::Barrier::UNKNOWN_BARRIER: {
368 LoweringStoreUnknownBarrierAndPrepareScheduleGate(gate, currentBBGates, barrierBBGates, endBBGates);
369 ReplaceBBState(cfg, bbIdx, currentBBGates, endBBGates);
370 ScheduleEndBB(endBBGates, cfg, bbIdx, instIdx);
371 ScheduleNewBB(barrierBBGates, cfg, bbIdx);
372 ScheduleCurrentBB(currentBBGates, cfg, bbIdx, instIdx);
373 return true;
374 }
375 case MemoryAttribute::Barrier::NEED_BARRIER: {
376 LoweringStoreWithBarrierAndPrepareScheduleGate(gate, currentBBGates);
377 ReplaceGateDirectly(currentBBGates, cfg, bbIdx, instIdx);
378 return false;
379 }
380 case MemoryAttribute::Barrier::NO_BARRIER: {
381 LoweringStoreNoBarrierAndPrepareScheduleGate(gate, currentBBGates);
382 ReplaceGateDirectly(currentBBGates, cfg, bbIdx, instIdx);
383 return false;
384 }
385 default: {
386 UNREACHABLE();
387 return false;
388 }
389 }
390 return false;
391 }
392
GetWriteBarrierKind(GateRef gate)393 MemoryAttribute::Barrier PostSchedule::GetWriteBarrierKind(GateRef gate)
394 {
395 MemoryAttribute mAttr = acc_.GetMemoryAttribute(gate);
396 if (!acc_.IsGCRelated(gate)) {
397 return MemoryAttribute::Barrier::NO_BARRIER;
398 }
399 return mAttr.GetBarrier();
400 }
401
SelectBarrier(MemoryAttribute::ShareFlag share,const CallSignature * & cs,std::string_view & comment)402 int PostSchedule::SelectBarrier(MemoryAttribute::ShareFlag share, const CallSignature*& cs, std::string_view& comment)
403 {
404 int index = 0;
405 switch (share) {
406 case MemoryAttribute::UNKNOWN:
407 if (fastBarrier_) {
408 index = RuntimeStubCSigns::ID_ASMFastWriteBarrier;
409 cs = RuntimeStubCSigns::Get(index);
410 comment = "asm store barrier\0";
411 } else {
412 index = CommonStubCSigns::SetValueWithBarrier;
413 cs = CommonStubCSigns::Get(index);
414 comment = "store barrier\0";
415 }
416 break;
417 case MemoryAttribute::SHARED:
418 index = CommonStubCSigns::SetSValueWithBarrier;
419 cs = CommonStubCSigns::Get(index);
420 comment = "store share barrier\0";
421 break;
422 case MemoryAttribute::NON_SHARE:
423 index = CommonStubCSigns::SetNonSValueWithBarrier;
424 cs = CommonStubCSigns::Get(index);
425 comment = "store not share barrier\0";
426 break;
427 default:
428 UNREACHABLE();
429 }
430 return index;
431 }
432
LoweringStoreNoBarrierAndPrepareScheduleGate(GateRef gate,std::vector<GateRef> & currentBBGates)433 void PostSchedule::LoweringStoreNoBarrierAndPrepareScheduleGate(GateRef gate, std::vector<GateRef> ¤tBBGates)
434 {
435 Environment env(gate, circuit_, &builder_);
436
437 GateRef base = acc_.GetValueIn(gate, 1); // 1: object
438 GateRef offset = acc_.GetValueIn(gate, 2); // 2: offset
439 GateRef value = acc_.GetValueIn(gate, 3); // 3: value
440 GateRef addr = builder_.PtrAdd(base, offset);
441 VariableType type = VariableType(acc_.GetMachineType(gate), acc_.GetGateType(gate));
442 builder_.StoreWithoutBarrier(type, addr, value, acc_.GetMemoryAttribute(gate));
443 GateRef store = builder_.GetDepend();
444 {
445 PrepareToScheduleNewGate(store, currentBBGates);
446 PrepareToScheduleNewGate(addr, currentBBGates);
447 }
448 acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), Circuit::NullGate());
449 }
450
GetShareKind(panda::ecmascript::kungfu::GateRef gate)451 MemoryAttribute::ShareFlag PostSchedule::GetShareKind(panda::ecmascript::kungfu::GateRef gate)
452 {
453 MemoryAttribute mAttr = acc_.GetMemoryAttribute(gate);
454 return mAttr.GetShare();
455 }
456
LoweringStoreWithBarrierAndPrepareScheduleGate(GateRef gate,std::vector<GateRef> & currentBBGates)457 void PostSchedule::LoweringStoreWithBarrierAndPrepareScheduleGate(GateRef gate, std::vector<GateRef> ¤tBBGates)
458 {
459 Environment env(gate, circuit_, &builder_);
460
461 GateRef glue = acc_.GetValueIn(gate, 0);
462 GateRef base = acc_.GetValueIn(gate, 1); // 1: object
463 GateRef offset = acc_.GetValueIn(gate, 2); // 2: offset
464 GateRef value = acc_.GetValueIn(gate, 3); // 3: value
465 GateRef addr = builder_.PtrAdd(base, offset);
466 VariableType type = VariableType(acc_.GetMachineType(gate), acc_.GetGateType(gate));
467 builder_.StoreWithoutBarrier(type, addr, value, acc_.GetMemoryAttribute(gate));
468 GateRef store = builder_.GetDepend();
469 MemoryAttribute::ShareFlag share = GetShareKind(gate);
470 std::string_view comment;
471 int index;
472 const CallSignature* cs = nullptr;
473 index = SelectBarrier(share, cs, comment);
474 ASSERT(cs && (cs->IsCommonStub() || cs->IsASMCallBarrierStub()) && "Invalid call signature for barrier");
475 GateRef target = circuit_->GetConstantGateWithoutCache(MachineType::ARCH, index, GateType::NJSValue());
476 GateRef reseverdFrameArgs = circuit_->GetConstantGateWithoutCache(MachineType::I64, 0, GateType::NJSValue());
477 GateRef reseverdPc = circuit_->GetConstantGateWithoutCache(MachineType::I64, 0, GateType::NJSValue());
478 GateRef storeBarrier = builder_.Call(cs, glue, target, builder_.GetDepend(),
479 {glue, base, offset, value, reseverdFrameArgs, reseverdPc},
480 Circuit::NullGate(), comment.data());
481 {
482 PrepareToScheduleNewGate(storeBarrier, currentBBGates);
483 PrepareToScheduleNewGate(reseverdPc, currentBBGates);
484 PrepareToScheduleNewGate(reseverdFrameArgs, currentBBGates);
485 PrepareToScheduleNewGate(target, currentBBGates);
486 PrepareToScheduleNewGate(store, currentBBGates);
487 PrepareToScheduleNewGate(addr, currentBBGates);
488 }
489 acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), Circuit::NullGate());
490 }
491
LoweringStoreUnknownBarrierAndPrepareScheduleGate(GateRef gate,std::vector<GateRef> & currentBBGates,std::vector<GateRef> & barrierBBGates,std::vector<GateRef> & endBBGates)492 void PostSchedule::LoweringStoreUnknownBarrierAndPrepareScheduleGate(GateRef gate,
493 std::vector<GateRef> ¤tBBGates,
494 std::vector<GateRef> &barrierBBGates,
495 std::vector<GateRef> &endBBGates)
496 {
497 Environment env(gate, circuit_, &builder_);
498
499 GateRef glue = acc_.GetValueIn(gate, 0);
500 GateRef base = acc_.GetValueIn(gate, 1); // 1: object
501 GateRef offset = acc_.GetValueIn(gate, 2); // 2: offset
502 GateRef value = acc_.GetValueIn(gate, 3); // 3: value
503 GateRef addr = builder_.PtrAdd(base, offset);
504 VariableType type = VariableType(acc_.GetMachineType(gate), acc_.GetGateType(gate));
505 builder_.StoreWithoutBarrier(type, addr, value, acc_.GetMemoryAttribute(gate));
506 GateRef store = builder_.GetDepend();
507
508 GateRef intVal = builder_.ChangeTaggedPointerToInt64(value);
509 GateRef objMask = circuit_->GetConstantGateWithoutCache(
510 MachineType::I64, JSTaggedValue::TAG_HEAPOBJECT_MASK, GateType::NJSValue());
511 GateRef masked = builder_.Int64And(intVal, objMask, GateType::Empty(), "checkHeapObject");
512 GateRef falseVal = circuit_->GetConstantGateWithoutCache(MachineType::I64, 0, GateType::NJSValue());
513 GateRef condition = builder_.Equal(masked, falseVal, "checkHeapObject");
514 Label exit(&builder_);
515 Label isHeapObject(&builder_);
516 Label *currentLabel = env.GetCurrentLabel();
517 builder_.Branch(condition, &isHeapObject, &exit);
518 {
519 GateRef ifBranch = currentLabel->GetControl();
520 PrepareToScheduleNewGate(ifBranch, currentBBGates);
521 PrepareToScheduleNewGate(condition, currentBBGates);
522 PrepareToScheduleNewGate(falseVal, currentBBGates);
523 PrepareToScheduleNewGate(masked, currentBBGates);
524 PrepareToScheduleNewGate(intVal, currentBBGates);
525 PrepareToScheduleNewGate(objMask, currentBBGates);
526 PrepareToScheduleNewGate(store, currentBBGates);
527 PrepareToScheduleNewGate(addr, currentBBGates);
528 }
529 GateRef ifTrue = isHeapObject.GetControl();
530 GateRef ifFalse = exit.GetControl();
531 builder_.Bind(&isHeapObject);
532 {
533 MemoryAttribute::ShareFlag share = GetShareKind(gate);
534 std::string_view comment;
535 int index;
536 const CallSignature* cs = nullptr;
537 index = SelectBarrier(share, cs, comment);
538 ASSERT(cs && (cs->IsCommonStub() || cs->IsASMCallBarrierStub()) && "Invalid call signature for barrier");
539 GateRef target = circuit_->GetConstantGateWithoutCache(MachineType::ARCH, index, GateType::NJSValue());
540 GateRef reseverdFrameArgs = circuit_->GetConstantGateWithoutCache(MachineType::I64, 0, GateType::NJSValue());
541 GateRef reseverdPc = circuit_->GetConstantGateWithoutCache(MachineType::I64, 0, GateType::NJSValue());
542 GateRef storeBarrier = builder_.Call(cs, glue, target, builder_.GetDepend(),
543 { glue, base, offset, value, reseverdFrameArgs, reseverdPc },
544 Circuit::NullGate(), comment.data());
545 builder_.Jump(&exit);
546 {
547 GateRef ordinaryBlock = isHeapObject.GetControl();
548 PrepareToScheduleNewGate(ordinaryBlock, barrierBBGates);
549 PrepareToScheduleNewGate(storeBarrier, barrierBBGates);
550 PrepareToScheduleNewGate(reseverdFrameArgs, barrierBBGates);
551 PrepareToScheduleNewGate(reseverdPc, barrierBBGates);
552 PrepareToScheduleNewGate(ifTrue, barrierBBGates);
553 }
554 }
555 builder_.Bind(&exit);
556 {
557 GateRef merge = builder_.GetState();
558 PrepareToScheduleNewGate(merge, endBBGates);
559 PrepareToScheduleNewGate(ifFalse, endBBGates);
560 }
561 acc_.ReplaceGate(gate, builder_.GetState(), builder_.GetDepend(), Circuit::NullGate());
562 }
563
PrintGraph(const char * title,ControlFlowGraph & cfg)564 void PostSchedule::PrintGraph(const char* title, ControlFlowGraph &cfg)
565 {
566 LOG_COMPILER(INFO) << "======================== " << title << " ========================";
567 for (size_t bbIdx = 0; bbIdx < cfg.size(); bbIdx++) {
568 LOG_COMPILER(INFO) << "B" << bbIdx << ":";
569 const std::vector<GateRef>& bb = cfg.at(bbIdx);
570 for (size_t instIdx = 0; instIdx < bb.size(); instIdx++) {
571 GateRef gate = bb[instIdx];
572 acc_.Print(gate);
573 }
574 LOG_COMPILER(INFO) << "";
575 }
576 }
577 } // namespace panda::ecmascript::kungfu
578