1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
18 #define ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
19
20 #include <memory>
21 #include <ostream>
22 #include <string_view>
23 #include <string>
24 #include <tuple>
25 #include <vector>
26 #include <variant>
27
28 #include "base/macros.h"
29 #include "base/indenter.h"
30 #include "base/malloc_arena_pool.h"
31 #include "base/scoped_arena_allocator.h"
32 #include "builder.h"
33 #include "common_compiler_test.h"
34 #include "dex/code_item_accessors-inl.h"
35 #include "dex/dex_file.h"
36 #include "dex/dex_instruction.h"
37 #include "dex/standard_dex_file.h"
38 #include "driver/dex_compilation_unit.h"
39 #include "graph_checker.h"
40 #include "gtest/gtest.h"
41 #include "handle_scope-inl.h"
42 #include "handle_scope.h"
43 #include "mirror/class_loader.h"
44 #include "mirror/dex_cache.h"
45 #include "nodes.h"
46 #include "scoped_thread_state_change.h"
47 #include "ssa_builder.h"
48 #include "ssa_liveness_analysis.h"
49
50 namespace art HIDDEN {
51
52 #define NUM_INSTRUCTIONS(...) \
53 (sizeof((uint16_t[]) {__VA_ARGS__}) /sizeof(uint16_t))
54
55 #define N_REGISTERS_CODE_ITEM(NUM_REGS, ...) \
56 { NUM_REGS, 0, 0, 0, 0, 0, NUM_INSTRUCTIONS(__VA_ARGS__), 0, __VA_ARGS__ }
57
58 #define ZERO_REGISTER_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(0, __VA_ARGS__)
59 #define ONE_REGISTER_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(1, __VA_ARGS__)
60 #define TWO_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(2, __VA_ARGS__)
61 #define THREE_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(3, __VA_ARGS__)
62 #define FOUR_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(4, __VA_ARGS__)
63 #define FIVE_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(5, __VA_ARGS__)
64 #define SIX_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(6, __VA_ARGS__)
65
66 struct InstructionDumper {
67 public:
68 HInstruction* ins_;
69 };
70
71 inline bool operator==(const InstructionDumper& a, const InstructionDumper& b) {
72 return a.ins_ == b.ins_;
73 }
74 inline bool operator!=(const InstructionDumper& a, const InstructionDumper& b) {
75 return !(a == b);
76 }
77
78 inline std::ostream& operator<<(std::ostream& os, const InstructionDumper& id) {
79 if (id.ins_ == nullptr) {
80 return os << "NULL";
81 } else {
82 return os << "(" << id.ins_ << "): " << id.ins_->DumpWithArgs();
83 }
84 }
85
86 #define EXPECT_INS_EQ(a, b) EXPECT_EQ(InstructionDumper{a}, InstructionDumper{b})
87 #define EXPECT_INS_REMOVED(a) EXPECT_TRUE(IsRemoved(a)) << "Not removed: " << (InstructionDumper{a})
88 #define EXPECT_INS_RETAINED(a) EXPECT_FALSE(IsRemoved(a)) << "Removed: " << (InstructionDumper{a})
89 #define ASSERT_INS_EQ(a, b) ASSERT_EQ(InstructionDumper{a}, InstructionDumper{b})
90 #define ASSERT_INS_REMOVED(a) ASSERT_TRUE(IsRemoved(a)) << "Not removed: " << (InstructionDumper{a})
91 #define ASSERT_INS_RETAINED(a) ASSERT_FALSE(IsRemoved(a)) << "Removed: " << (InstructionDumper{a})
92
93 inline LiveInterval* BuildInterval(const size_t ranges[][2],
94 size_t number_of_ranges,
95 ScopedArenaAllocator* allocator,
96 int reg = -1,
97 HInstruction* defined_by = nullptr) {
98 LiveInterval* interval =
99 LiveInterval::MakeInterval(allocator, DataType::Type::kInt32, defined_by);
100 if (defined_by != nullptr) {
101 defined_by->SetLiveInterval(interval);
102 }
103 for (size_t i = number_of_ranges; i > 0; --i) {
104 interval->AddRange(ranges[i - 1][0], ranges[i - 1][1]);
105 }
106 interval->SetRegister(reg);
107 return interval;
108 }
109
RemoveSuspendChecks(HGraph * graph)110 inline void RemoveSuspendChecks(HGraph* graph) {
111 for (HBasicBlock* block : graph->GetBlocks()) {
112 if (block != nullptr) {
113 if (block->GetLoopInformation() != nullptr) {
114 block->GetLoopInformation()->SetSuspendCheck(nullptr);
115 }
116 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
117 HInstruction* current = it.Current();
118 if (current->IsSuspendCheck()) {
119 current->GetBlock()->RemoveInstruction(current);
120 }
121 }
122 }
123 }
124 }
125
126 class ArenaPoolAndAllocator {
127 public:
ArenaPoolAndAllocator()128 ArenaPoolAndAllocator()
129 : pool_(), allocator_(&pool_), arena_stack_(&pool_), scoped_allocator_(&arena_stack_) { }
130
GetAllocator()131 ArenaAllocator* GetAllocator() { return &allocator_; }
GetArenaStack()132 ArenaStack* GetArenaStack() { return &arena_stack_; }
GetScopedAllocator()133 ScopedArenaAllocator* GetScopedAllocator() { return &scoped_allocator_; }
134
135 private:
136 MallocArenaPool pool_;
137 ArenaAllocator allocator_;
138 ArenaStack arena_stack_;
139 ScopedArenaAllocator scoped_allocator_;
140 };
141
142 class AdjacencyListGraph {
143 public:
144 using Edge = std::pair<const std::string_view, const std::string_view>;
AdjacencyListGraph(HGraph * graph,ArenaAllocator * alloc,const std::string_view entry_name,const std::string_view exit_name,const std::vector<Edge> & adj)145 AdjacencyListGraph(
146 HGraph* graph,
147 ArenaAllocator* alloc,
148 const std::string_view entry_name,
149 const std::string_view exit_name,
150 const std::vector<Edge>& adj) : graph_(graph) {
151 auto create_block = [&]() {
152 HBasicBlock* blk = new (alloc) HBasicBlock(graph_);
153 graph_->AddBlock(blk);
154 return blk;
155 };
156 HBasicBlock* entry = create_block();
157 HBasicBlock* exit = create_block();
158 graph_->SetEntryBlock(entry);
159 graph_->SetExitBlock(exit);
160 name_to_block_.Put(entry_name, entry);
161 name_to_block_.Put(exit_name, exit);
162 for (const auto& [src, dest] : adj) {
163 HBasicBlock* src_blk = name_to_block_.GetOrCreate(src, create_block);
164 HBasicBlock* dest_blk = name_to_block_.GetOrCreate(dest, create_block);
165 src_blk->AddSuccessor(dest_blk);
166 }
167 graph_->ClearReachabilityInformation();
168 graph_->ComputeDominanceInformation();
169 graph_->ComputeReachabilityInformation();
170 for (auto [name, blk] : name_to_block_) {
171 block_to_name_.Put(blk, name);
172 }
173 }
174
HasBlock(const HBasicBlock * blk)175 bool HasBlock(const HBasicBlock* blk) const {
176 return block_to_name_.find(blk) != block_to_name_.end();
177 }
178
GetName(const HBasicBlock * blk)179 std::string_view GetName(const HBasicBlock* blk) const {
180 return block_to_name_.Get(blk);
181 }
182
Get(const std::string_view & sv)183 HBasicBlock* Get(const std::string_view& sv) const {
184 return name_to_block_.Get(sv);
185 }
186
187 AdjacencyListGraph(AdjacencyListGraph&&) = default;
188 AdjacencyListGraph(const AdjacencyListGraph&) = default;
189 AdjacencyListGraph& operator=(AdjacencyListGraph&&) = default;
190 AdjacencyListGraph& operator=(const AdjacencyListGraph&) = default;
191
Dump(std::ostream & os)192 std::ostream& Dump(std::ostream& os) const {
193 struct Namer : public BlockNamer {
194 public:
195 explicit Namer(const AdjacencyListGraph& alg) : BlockNamer(), alg_(alg) {}
196 std::ostream& PrintName(std::ostream& os, HBasicBlock* blk) const override {
197 if (alg_.HasBlock(blk)) {
198 return os << alg_.GetName(blk) << " (" << blk->GetBlockId() << ")";
199 } else {
200 return os << "<Unnamed B" << blk->GetBlockId() << ">";
201 }
202 }
203
204 const AdjacencyListGraph& alg_;
205 };
206 Namer namer(*this);
207 return graph_->Dump(os, /* codegen_= */ nullptr, namer);
208 }
209
210 private:
211 HGraph* graph_;
212 SafeMap<const std::string_view, HBasicBlock*> name_to_block_;
213 SafeMap<const HBasicBlock*, const std::string_view> block_to_name_;
214 };
215
216 // Have a separate helper so the OptimizingCFITest can inherit it without causing
217 // multiple inheritance errors from having two gtest as a parent twice.
218 class OptimizingUnitTestHelper {
219 public:
OptimizingUnitTestHelper()220 OptimizingUnitTestHelper()
221 : pool_and_allocator_(new ArenaPoolAndAllocator()),
222 graph_(nullptr),
223 entry_block_(nullptr),
224 return_block_(nullptr),
225 exit_block_(nullptr) { }
226
GetAllocator()227 ArenaAllocator* GetAllocator() { return pool_and_allocator_->GetAllocator(); }
GetArenaStack()228 ArenaStack* GetArenaStack() { return pool_and_allocator_->GetArenaStack(); }
GetScopedAllocator()229 ScopedArenaAllocator* GetScopedAllocator() { return pool_and_allocator_->GetScopedAllocator(); }
230
ResetPoolAndAllocator()231 void ResetPoolAndAllocator() {
232 pool_and_allocator_.reset(new ArenaPoolAndAllocator());
233 }
234
235 HGraph* CreateGraph(VariableSizedHandleScope* handles = nullptr) {
236 ArenaAllocator* const allocator = pool_and_allocator_->GetAllocator();
237
238 // Reserve a big array of 0s so the dex file constructor can offsets from the header.
239 static constexpr size_t kDexDataSize = 4 * KB;
240 const uint8_t* dex_data = reinterpret_cast<uint8_t*>(allocator->Alloc(kDexDataSize));
241
242 // Create the dex file based on the fake data. Call the constructor so that we can use virtual
243 // functions. Don't use the arena for the StandardDexFile otherwise the dex location leaks.
244 auto container =
245 std::make_shared<MemoryDexFileContainer>(dex_data, sizeof(StandardDexFile::Header));
246 dex_files_.emplace_back(new StandardDexFile(dex_data,
247 sizeof(StandardDexFile::Header),
248 "no_location",
249 /*location_checksum*/ 0,
250 /*oat_dex_file*/ nullptr,
251 std::move(container)));
252
253 graph_ = new (allocator) HGraph(
254 allocator,
255 pool_and_allocator_->GetArenaStack(),
256 handles,
257 *dex_files_.back(),
258 /*method_idx*/-1,
259 kRuntimeISA);
260 return graph_;
261 }
262
263 // Create a control-flow graph from Dex instructions.
264 HGraph* CreateCFG(const std::vector<uint16_t>& data,
265 DataType::Type return_type = DataType::Type::kInt32) {
266 ScopedObjectAccess soa(Thread::Current());
267 VariableSizedHandleScope handles(soa.Self());
268 HGraph* graph = CreateGraph(&handles);
269
270 // The code item data might not aligned to 4 bytes, copy it to ensure that.
271 const size_t code_item_size = data.size() * sizeof(data.front());
272 void* aligned_data = GetAllocator()->Alloc(code_item_size);
273 memcpy(aligned_data, &data[0], code_item_size);
274 CHECK_ALIGNED(aligned_data, StandardDexFile::CodeItem::kAlignment);
275 const dex::CodeItem* code_item = reinterpret_cast<const dex::CodeItem*>(aligned_data);
276
277 {
278 const DexCompilationUnit* dex_compilation_unit =
279 new (graph->GetAllocator()) DexCompilationUnit(
280 /* class_loader= */ Handle<mirror::ClassLoader>(), // Invalid handle.
281 /* class_linker= */ nullptr,
282 graph->GetDexFile(),
283 code_item,
284 /* class_def_idx= */ DexFile::kDexNoIndex16,
285 /* method_idx= */ dex::kDexNoIndex,
286 /* access_flags= */ 0u,
287 /* verified_method= */ nullptr,
288 /* dex_cache= */ Handle<mirror::DexCache>()); // Invalid handle.
289 CodeItemDebugInfoAccessor accessor(graph->GetDexFile(), code_item, /*dex_method_idx*/ 0u);
290 HGraphBuilder builder(graph, dex_compilation_unit, accessor, return_type);
291 bool graph_built = (builder.BuildGraph() == kAnalysisSuccess);
292 return graph_built ? graph : nullptr;
293 }
294 }
295
296 void InitGraph(VariableSizedHandleScope* handles = nullptr) {
297 CreateGraph(handles);
298 entry_block_ = AddNewBlock();
299 return_block_ = AddNewBlock();
300 exit_block_ = AddNewBlock();
301
302 graph_->SetEntryBlock(entry_block_);
303 graph_->SetExitBlock(exit_block_);
304
305 entry_block_->AddSuccessor(return_block_);
306 return_block_->AddSuccessor(exit_block_);
307
308 return_block_->AddInstruction(new (GetAllocator()) HReturnVoid());
309 exit_block_->AddInstruction(new (GetAllocator()) HExit());
310 }
311
AddParameter(HInstruction * parameter)312 void AddParameter(HInstruction* parameter) {
313 entry_block_->AddInstruction(parameter);
314 parameters_.push_back(parameter);
315 }
316
AddNewBlock()317 HBasicBlock* AddNewBlock() {
318 HBasicBlock* block = new (GetAllocator()) HBasicBlock(graph_);
319 graph_->AddBlock(block);
320 return block;
321 }
322
323 // Run GraphChecker with all checks.
324 //
325 // Return: the status whether the run is successful.
326 bool CheckGraph(std::ostream& oss = std::cerr) {
327 return CheckGraph(graph_, oss);
328 }
329
ManuallyBuildEnvFor(HInstruction * instruction,ArenaVector<HInstruction * > * current_locals)330 HEnvironment* ManuallyBuildEnvFor(HInstruction* instruction,
331 ArenaVector<HInstruction*>* current_locals) {
332 HEnvironment* environment = new (GetAllocator()) HEnvironment(
333 (GetAllocator()),
334 current_locals->size(),
335 graph_->GetArtMethod(),
336 instruction->GetDexPc(),
337 instruction);
338
339 environment->CopyFrom(ArrayRef<HInstruction* const>(*current_locals));
340 instruction->SetRawEnvironment(environment);
341 return environment;
342 }
343
EnsurePredecessorOrder(HBasicBlock * target,std::initializer_list<HBasicBlock * > preds)344 void EnsurePredecessorOrder(HBasicBlock* target, std::initializer_list<HBasicBlock*> preds) {
345 // Make sure the given preds and block predecessors have the same blocks.
346 BitVector bv(preds.size(), false, Allocator::GetMallocAllocator());
347 auto preds_and_idx = ZipCount(MakeIterationRange(target->GetPredecessors()));
348 bool correct_preds = preds.size() == target->GetPredecessors().size() &&
349 std::all_of(preds.begin(), preds.end(), [&](HBasicBlock* pred) {
350 return std::any_of(preds_and_idx.begin(),
351 preds_and_idx.end(),
352 // Make sure every target predecessor is used only
353 // once.
354 [&](std::pair<HBasicBlock*, uint32_t> cur) {
355 if (cur.first == pred && !bv.IsBitSet(cur.second)) {
356 bv.SetBit(cur.second);
357 return true;
358 } else {
359 return false;
360 }
361 });
362 }) &&
363 bv.NumSetBits() == preds.size();
364 auto dump_list = [](auto it) {
365 std::ostringstream oss;
366 oss << "[";
367 bool first = true;
368 for (HBasicBlock* b : it) {
369 if (!first) {
370 oss << ", ";
371 }
372 first = false;
373 oss << b->GetBlockId();
374 }
375 oss << "]";
376 return oss.str();
377 };
378 ASSERT_TRUE(correct_preds) << "Predecessors of " << target->GetBlockId() << " are "
379 << dump_list(target->GetPredecessors()) << " not "
380 << dump_list(preds);
381 if (correct_preds) {
382 std::copy(preds.begin(), preds.end(), target->predecessors_.begin());
383 }
384 }
385
SetupFromAdjacencyList(const std::string_view entry_name,const std::string_view exit_name,const std::vector<AdjacencyListGraph::Edge> & adj)386 AdjacencyListGraph SetupFromAdjacencyList(const std::string_view entry_name,
387 const std::string_view exit_name,
388 const std::vector<AdjacencyListGraph::Edge>& adj) {
389 return AdjacencyListGraph(graph_, GetAllocator(), entry_name, exit_name, adj);
390 }
391
ManuallyBuildEnvFor(HInstruction * ins,const std::initializer_list<HInstruction * > & env)392 void ManuallyBuildEnvFor(HInstruction* ins, const std::initializer_list<HInstruction*>& env) {
393 ArenaVector<HInstruction*> current_locals(env, GetAllocator()->Adapter(kArenaAllocInstruction));
394 OptimizingUnitTestHelper::ManuallyBuildEnvFor(ins, ¤t_locals);
395 }
396
397 HLoadClass* MakeClassLoad(std::optional<dex::TypeIndex> ti = std::nullopt,
398 std::optional<Handle<mirror::Class>> klass = std::nullopt) {
399 return new (GetAllocator()) HLoadClass(graph_->GetCurrentMethod(),
400 ti ? *ti : dex::TypeIndex(class_idx_++),
401 graph_->GetDexFile(),
402 /* klass= */ klass ? *klass : null_klass_,
403 /* is_referrers_class= */ false,
404 /* dex_pc= */ 0,
405 /* needs_access_check= */ false);
406 }
407
408 HNewInstance* MakeNewInstance(HInstruction* cls, uint32_t dex_pc = 0u) {
409 EXPECT_TRUE(cls->IsLoadClass() || cls->IsClinitCheck()) << *cls;
410 HLoadClass* load =
411 cls->IsLoadClass() ? cls->AsLoadClass() : cls->AsClinitCheck()->GetLoadClass();
412 return new (GetAllocator()) HNewInstance(cls,
413 dex_pc,
414 load->GetTypeIndex(),
415 graph_->GetDexFile(),
416 /* finalizable= */ false,
417 QuickEntrypointEnum::kQuickAllocObjectInitialized);
418 }
419
420 HInstanceFieldSet* MakeIFieldSet(HInstruction* inst,
421 HInstruction* data,
422 MemberOffset off,
423 uint32_t dex_pc = 0u) {
424 return new (GetAllocator()) HInstanceFieldSet(inst,
425 data,
426 /* field= */ nullptr,
427 /* field_type= */ data->GetType(),
428 /* field_offset= */ off,
429 /* is_volatile= */ false,
430 /* field_idx= */ 0,
431 /* declaring_class_def_index= */ 0,
432 graph_->GetDexFile(),
433 dex_pc);
434 }
435
436 HInstanceFieldGet* MakeIFieldGet(HInstruction* inst,
437 DataType::Type type,
438 MemberOffset off,
439 uint32_t dex_pc = 0u) {
440 return new (GetAllocator()) HInstanceFieldGet(inst,
441 /* field= */ nullptr,
442 /* field_type= */ type,
443 /* field_offset= */ off,
444 /* is_volatile= */ false,
445 /* field_idx= */ 0,
446 /* declaring_class_def_index= */ 0,
447 graph_->GetDexFile(),
448 dex_pc);
449 }
450
MakeInvoke(DataType::Type return_type,const std::vector<HInstruction * > & args)451 HInvokeStaticOrDirect* MakeInvoke(DataType::Type return_type,
452 const std::vector<HInstruction*>& args) {
453 MethodReference method_reference{/* file= */ &graph_->GetDexFile(), /* index= */ method_idx_++};
454 HInvokeStaticOrDirect* res = new (GetAllocator())
455 HInvokeStaticOrDirect(GetAllocator(),
456 args.size(),
457 return_type,
458 /* dex_pc= */ 0,
459 method_reference,
460 /* resolved_method= */ nullptr,
461 HInvokeStaticOrDirect::DispatchInfo{},
462 InvokeType::kStatic,
463 /* resolved_method_reference= */ method_reference,
464 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone,
465 !graph_->IsDebuggable());
466 for (auto [ins, idx] : ZipCount(MakeIterationRange(args))) {
467 res->SetRawInputAt(idx, ins);
468 }
469 return res;
470 }
471
MakePhi(const std::vector<HInstruction * > & ins)472 HPhi* MakePhi(const std::vector<HInstruction*>& ins) {
473 EXPECT_GE(ins.size(), 2u) << "Phi requires at least 2 inputs";
474 HPhi* phi =
475 new (GetAllocator()) HPhi(GetAllocator(), kNoRegNumber, ins.size(), ins[0]->GetType());
476 for (auto [i, idx] : ZipCount(MakeIterationRange(ins))) {
477 phi->SetRawInputAt(idx, i);
478 }
479 return phi;
480 }
481
SetupExit(HBasicBlock * exit)482 void SetupExit(HBasicBlock* exit) {
483 exit->AddInstruction(new (GetAllocator()) HExit());
484 }
485
DefaultTypeIndexForType(DataType::Type type)486 dex::TypeIndex DefaultTypeIndexForType(DataType::Type type) {
487 switch (type) {
488 case DataType::Type::kBool:
489 return dex::TypeIndex(1);
490 case DataType::Type::kUint8:
491 case DataType::Type::kInt8:
492 return dex::TypeIndex(2);
493 case DataType::Type::kUint16:
494 case DataType::Type::kInt16:
495 return dex::TypeIndex(3);
496 case DataType::Type::kUint32:
497 case DataType::Type::kInt32:
498 return dex::TypeIndex(4);
499 case DataType::Type::kUint64:
500 case DataType::Type::kInt64:
501 return dex::TypeIndex(5);
502 case DataType::Type::kReference:
503 return dex::TypeIndex(6);
504 case DataType::Type::kFloat32:
505 return dex::TypeIndex(7);
506 case DataType::Type::kFloat64:
507 return dex::TypeIndex(8);
508 case DataType::Type::kVoid:
509 EXPECT_TRUE(false) << "No type for void!";
510 return dex::TypeIndex(1000);
511 }
512 }
513
514 // Creates a parameter. The instruction is automatically added to the entry-block
515 HParameterValue* MakeParam(DataType::Type type, std::optional<dex::TypeIndex> ti = std::nullopt) {
516 HParameterValue* val = new (GetAllocator()) HParameterValue(
517 graph_->GetDexFile(), ti ? *ti : DefaultTypeIndexForType(type), param_count_++, type);
518 graph_->GetEntryBlock()->AddInstruction(val);
519 return val;
520 }
521
522 protected:
CheckGraph(HGraph * graph,std::ostream & oss)523 bool CheckGraph(HGraph* graph, std::ostream& oss) {
524 GraphChecker checker(graph);
525 checker.Run();
526 checker.Dump(oss);
527 return checker.IsValid();
528 }
529
530 std::vector<std::unique_ptr<const StandardDexFile>> dex_files_;
531 std::unique_ptr<ArenaPoolAndAllocator> pool_and_allocator_;
532
533 HGraph* graph_;
534 HBasicBlock* entry_block_;
535 HBasicBlock* return_block_;
536 HBasicBlock* exit_block_;
537
538 std::vector<HInstruction*> parameters_;
539
540 size_t param_count_ = 0;
541 size_t class_idx_ = 42;
542 uint32_t method_idx_ = 100;
543
544 ScopedNullHandle<mirror::Class> null_klass_;
545 };
546
547 class OptimizingUnitTest : public CommonArtTest, public OptimizingUnitTestHelper {};
548
549 // Naive string diff data type.
550 using diff_t = std::list<std::pair<std::string, std::string>>;
551
552 // An alias for the empty string used to make it clear that a line is
553 // removed in a diff.
554 static const std::string removed = ""; // NOLINT [runtime/string] [4]
555
556 // Naive patch command: apply a diff to a string.
Patch(const std::string & original,const diff_t & diff)557 inline std::string Patch(const std::string& original, const diff_t& diff) {
558 std::string result = original;
559 for (const auto& p : diff) {
560 std::string::size_type pos = result.find(p.first);
561 DCHECK_NE(pos, std::string::npos)
562 << "Could not find: \"" << p.first << "\" in \"" << result << "\"";
563 result.replace(pos, p.first.size(), p.second);
564 }
565 return result;
566 }
567
568 // Returns if the instruction is removed from the graph.
IsRemoved(HInstruction * instruction)569 inline bool IsRemoved(HInstruction* instruction) {
570 return instruction->GetBlock() == nullptr;
571 }
572
573 inline std::ostream& operator<<(std::ostream& oss, const AdjacencyListGraph& alg) {
574 return alg.Dump(oss);
575 }
576
577 class PatternMatchGraphVisitor final : public HGraphVisitor {
578 private:
579 struct HandlerWrapper {
580 public:
~HandlerWrapperHandlerWrapper581 virtual ~HandlerWrapper() {}
582 virtual void operator()(HInstruction* h) = 0;
583 };
584
585 template <HInstruction::InstructionKind kKind, typename F>
586 struct KindWrapper;
587
588 #define GEN_HANDLER(nm, unused) \
589 template <typename F> \
590 struct KindWrapper<HInstruction::InstructionKind::k##nm, F> : public HandlerWrapper { \
591 public: \
592 explicit KindWrapper(F f) : f_(f) {} \
593 void operator()(HInstruction* h) override { \
594 if constexpr (std::is_invocable_v<F, H##nm*>) { \
595 f_(h->As##nm()); \
596 } else { \
597 LOG(FATAL) << "Incorrect call with " << #nm; \
598 } \
599 } \
600 \
601 private: \
602 F f_; \
603 };
604
FOR_EACH_CONCRETE_INSTRUCTION(GEN_HANDLER)605 FOR_EACH_CONCRETE_INSTRUCTION(GEN_HANDLER)
606 #undef GEN_HANDLER
607
608 template <typename F>
609 std::unique_ptr<HandlerWrapper> GetWrapper(HInstruction::InstructionKind kind, F f) {
610 switch (kind) {
611 #define GEN_GETTER(nm, unused) \
612 case HInstruction::InstructionKind::k##nm: \
613 return std::unique_ptr<HandlerWrapper>( \
614 new KindWrapper<HInstruction::InstructionKind::k##nm, F>(f));
615 FOR_EACH_CONCRETE_INSTRUCTION(GEN_GETTER)
616 #undef GEN_GETTER
617 default:
618 LOG(FATAL) << "Unable to handle kind " << kind;
619 return nullptr;
620 }
621 }
622
623 public:
624 template <typename... Inst>
PatternMatchGraphVisitor(HGraph * graph,Inst...handlers)625 explicit PatternMatchGraphVisitor(HGraph* graph, Inst... handlers) : HGraphVisitor(graph) {
626 FillHandlers(handlers...);
627 }
628
VisitInstruction(HInstruction * instruction)629 void VisitInstruction(HInstruction* instruction) override {
630 auto& h = handlers_[instruction->GetKind()];
631 if (h.get() != nullptr) {
632 (*h)(instruction);
633 }
634 }
635
636 private:
637 template <typename Func>
GetKind()638 constexpr HInstruction::InstructionKind GetKind() {
639 #define CHECK_INST(nm, unused) \
640 if constexpr (std::is_invocable_v<Func, H##nm*>) { \
641 return HInstruction::InstructionKind::k##nm; \
642 }
643 FOR_EACH_CONCRETE_INSTRUCTION(CHECK_INST);
644 #undef CHECK_INST
645 static_assert(!std::is_invocable_v<Func, HInstruction*>,
646 "Use on generic HInstruction not allowed");
647 #define STATIC_ASSERT_ABSTRACT(nm, unused) && !std::is_invocable_v<Func, H##nm*>
648 static_assert(true FOR_EACH_ABSTRACT_INSTRUCTION(STATIC_ASSERT_ABSTRACT),
649 "Must not be abstract instruction");
650 #undef STATIC_ASSERT_ABSTRACT
651 #define STATIC_ASSERT_CONCRETE(nm, unused) || std::is_invocable_v<Func, H##nm*>
652 static_assert(false FOR_EACH_CONCRETE_INSTRUCTION(STATIC_ASSERT_CONCRETE),
653 "Must be a concrete instruction");
654 #undef STATIC_ASSERT_CONCRETE
655 return HInstruction::InstructionKind::kLastInstructionKind;
656 }
657 template <typename First>
FillHandlers(First h1)658 void FillHandlers(First h1) {
659 HInstruction::InstructionKind type = GetKind<First>();
660 CHECK_NE(type, HInstruction::kLastInstructionKind)
661 << "Unknown instruction kind. Only concrete ones please.";
662 handlers_[type] = GetWrapper(type, h1);
663 }
664
665 template <typename First, typename... Inst>
FillHandlers(First h1,Inst...handlers)666 void FillHandlers(First h1, Inst... handlers) {
667 FillHandlers(h1);
668 FillHandlers<Inst...>(handlers...);
669 }
670
671 std::array<std::unique_ptr<HandlerWrapper>, HInstruction::InstructionKind::kLastInstructionKind>
672 handlers_;
673 };
674
675 template <typename... Target>
676 std::tuple<std::vector<Target*>...> FindAllInstructions(
677 HGraph* graph,
678 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
679 std::nullopt) {
680 std::tuple<std::vector<Target*>...> res;
681 PatternMatchGraphVisitor vis(
682 graph, [&](Target* t) { std::get<std::vector<Target*>>(res).push_back(t); }...);
683
684 if (std::holds_alternative<std::initializer_list<HBasicBlock*>>(blks)) {
685 for (HBasicBlock* blk : std::get<std::initializer_list<HBasicBlock*>>(blks)) {
686 vis.VisitBasicBlock(blk);
687 }
688 } else if (std::holds_alternative<std::nullopt_t>(blks)) {
689 vis.VisitInsertionOrder();
690 } else {
691 vis.VisitBasicBlock(std::get<HBasicBlock*>(blks));
692 }
693 return res;
694 }
695
696 template <typename... Target>
697 std::tuple<Target*...> FindSingleInstructions(
698 HGraph* graph,
699 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
700 std::nullopt) {
701 std::tuple<Target*...> res;
702 PatternMatchGraphVisitor vis(graph, [&](Target* t) {
703 EXPECT_EQ(std::get<Target*>(res), nullptr)
704 << *std::get<Target*>(res) << " already found but found " << *t << "!";
705 std::get<Target*>(res) = t;
706 }...);
707 if (std::holds_alternative<std::initializer_list<HBasicBlock*>>(blks)) {
708 for (HBasicBlock* blk : std::get<std::initializer_list<HBasicBlock*>>(blks)) {
709 vis.VisitBasicBlock(blk);
710 }
711 } else if (std::holds_alternative<std::nullopt_t>(blks)) {
712 vis.VisitInsertionOrder();
713 } else {
714 vis.VisitBasicBlock(std::get<HBasicBlock*>(blks));
715 }
716 return res;
717 }
718
719 template <typename Target>
720 Target* FindSingleInstruction(
721 HGraph* graph,
722 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
723 std::nullopt) {
724 return std::get<Target*>(FindSingleInstructions<Target>(graph, blks));
725 }
726
727 } // namespace art
728
729 #endif // ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
730