• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (C) 2019 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //      http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include <random>
16 
17 #include <benchmark/benchmark.h>
18 
19 #include "src/trace_processor/tables/py_tables_benchmark_py.h"
20 
21 namespace perfetto {
22 namespace trace_processor {
23 namespace tables {
24 
25 RootTestTable::~RootTestTable() = default;
26 ChildTestTable::~ChildTestTable() = default;
27 
28 }  // namespace tables
29 }  // namespace trace_processor
30 }  // namespace perfetto
31 
32 namespace {
33 
IsBenchmarkFunctionalOnly()34 bool IsBenchmarkFunctionalOnly() {
35   return getenv("BENCHMARK_FUNCTIONAL_TEST_ONLY") != nullptr;
36 }
37 
TableFilterArgs(benchmark::internal::Benchmark * b)38 void TableFilterArgs(benchmark::internal::Benchmark* b) {
39   if (IsBenchmarkFunctionalOnly()) {
40     b->Arg(1024);
41   } else {
42     b->RangeMultiplier(8);
43     b->Range(1024, 2 * 1024 * 1024);
44   }
45 }
46 
TableSortArgs(benchmark::internal::Benchmark * b)47 void TableSortArgs(benchmark::internal::Benchmark* b) {
48   if (IsBenchmarkFunctionalOnly()) {
49     b->Arg(64);
50   } else {
51     b->RangeMultiplier(8);
52     b->Range(1024, 256 * 1024);
53   }
54 }
55 
56 }  // namespace
57 
58 using perfetto::trace_processor::RowMap;
59 using perfetto::trace_processor::SqlValue;
60 using perfetto::trace_processor::StringPool;
61 using perfetto::trace_processor::Table;
62 using perfetto::trace_processor::tables::ChildTestTable;
63 using perfetto::trace_processor::tables::RootTestTable;
64 
BM_TableInsert(benchmark::State & state)65 static void BM_TableInsert(benchmark::State& state) {
66   StringPool pool;
67   RootTestTable root(&pool);
68 
69   for (auto _ : state) {
70     benchmark::DoNotOptimize(root.Insert({}));
71   }
72 }
73 BENCHMARK(BM_TableInsert);
74 
BM_TableIteratorChild(benchmark::State & state)75 static void BM_TableIteratorChild(benchmark::State& state) {
76   StringPool pool;
77   RootTestTable root(&pool);
78   ChildTestTable child(&pool, &root);
79 
80   uint32_t size = static_cast<uint32_t>(state.range(0));
81   for (uint32_t i = 0; i < size; ++i) {
82     child.Insert({});
83     root.Insert({});
84   }
85 
86   auto it = static_cast<Table&>(child).IterateRows();
87   for (auto _ : state) {
88     for (uint32_t i = 0; i < child.GetColumnCount(); ++i) {
89       benchmark::DoNotOptimize(it.Get(i));
90     }
91     it.Next();
92     if (!it)
93       it = static_cast<Table&>(child).IterateRows();
94   }
95 }
96 BENCHMARK(BM_TableIteratorChild)->Apply(TableFilterArgs);
97 
BM_TableFilterAndSortRoot(benchmark::State & state)98 static void BM_TableFilterAndSortRoot(benchmark::State& state) {
99   StringPool pool;
100   RootTestTable root(&pool);
101 
102   uint32_t size = static_cast<uint32_t>(state.range(0));
103   uint32_t partitions = 8;
104 
105   std::minstd_rand0 rnd_engine(45);
106   for (uint32_t i = 0; i < size; ++i) {
107     RootTestTable::Row row;
108     row.root_non_null = rnd_engine() % partitions;
109     row.root_non_null_2 = static_cast<uint32_t>(rnd_engine());
110     root.Insert(row);
111   }
112 
113   for (auto _ : state) {
114     Table filtered = root.Filter({root.root_non_null().eq(5)},
115                                  RowMap::OptimizeFor::kLookupSpeed);
116     benchmark::DoNotOptimize(
117         filtered.Sort({root.root_non_null_2().ascending()}));
118   }
119 }
120 BENCHMARK(BM_TableFilterAndSortRoot)->Apply(TableFilterArgs);
121 
BM_TableFilterRootId(benchmark::State & state)122 static void BM_TableFilterRootId(benchmark::State& state) {
123   StringPool pool;
124   RootTestTable root(&pool);
125 
126   uint32_t size = static_cast<uint32_t>(state.range(0));
127   for (uint32_t i = 0; i < size; ++i)
128     root.Insert({});
129 
130   for (auto _ : state) {
131     benchmark::DoNotOptimize(root.Filter({root.id().eq(30)}));
132   }
133 }
134 BENCHMARK(BM_TableFilterRootId)->Apply(TableFilterArgs);
135 
BM_TableFilterRootIdAndOther(benchmark::State & state)136 static void BM_TableFilterRootIdAndOther(benchmark::State& state) {
137   StringPool pool;
138   RootTestTable root(&pool);
139 
140   uint32_t size = static_cast<uint32_t>(state.range(0));
141 
142   for (uint32_t i = 0; i < size; ++i) {
143     RootTestTable::Row root_row;
144     root_row.root_non_null = i * 4;
145     root.Insert(root_row);
146   }
147 
148   for (auto _ : state) {
149     benchmark::DoNotOptimize(root.Filter(
150         {root.id().eq(root.row_count() - 1), root.root_non_null().gt(100)}));
151   }
152 }
153 BENCHMARK(BM_TableFilterRootIdAndOther)->Apply(TableFilterArgs);
154 
BM_TableFilterChildId(benchmark::State & state)155 static void BM_TableFilterChildId(benchmark::State& state) {
156   StringPool pool;
157   RootTestTable root(&pool);
158   ChildTestTable child(&pool, &root);
159 
160   uint32_t size = static_cast<uint32_t>(state.range(0));
161   for (uint32_t i = 0; i < size; ++i) {
162     root.Insert({});
163     child.Insert({});
164   }
165 
166   for (auto _ : state) {
167     benchmark::DoNotOptimize(child.Filter({child.id().eq(30)}));
168   }
169 }
170 BENCHMARK(BM_TableFilterChildId)->Apply(TableFilterArgs);
171 
BM_TableFilterChildIdAndSortedInRoot(benchmark::State & state)172 static void BM_TableFilterChildIdAndSortedInRoot(benchmark::State& state) {
173   StringPool pool;
174   RootTestTable root(&pool);
175   ChildTestTable child(&pool, &root);
176 
177   uint32_t size = static_cast<uint32_t>(state.range(0));
178   for (uint32_t i = 0; i < size; ++i) {
179     RootTestTable::Row root_row;
180     root_row.root_sorted = i * 2;
181     root.Insert(root_row);
182 
183     ChildTestTable::Row child_row;
184     child_row.root_sorted = i * 2 + 1;
185     child.Insert(child_row);
186   }
187 
188   for (auto _ : state) {
189     benchmark::DoNotOptimize(
190         child.Filter({child.id().eq(30), child.root_sorted().gt(1024)}));
191   }
192 }
193 BENCHMARK(BM_TableFilterChildIdAndSortedInRoot)->Apply(TableFilterArgs);
194 
BM_TableFilterRootNonNullEqMatchMany(benchmark::State & state)195 static void BM_TableFilterRootNonNullEqMatchMany(benchmark::State& state) {
196   StringPool pool;
197   RootTestTable root(&pool);
198 
199   uint32_t size = static_cast<uint32_t>(state.range(0));
200   uint32_t partitions = size / 1024;
201 
202   std::minstd_rand0 rnd_engine;
203   for (uint32_t i = 0; i < size; ++i) {
204     RootTestTable::Row row(static_cast<uint32_t>(rnd_engine() % partitions));
205     root.Insert(row);
206   }
207 
208   for (auto _ : state) {
209     benchmark::DoNotOptimize(root.Filter({root.root_non_null().eq(0)}));
210   }
211 }
212 BENCHMARK(BM_TableFilterRootNonNullEqMatchMany)->Apply(TableFilterArgs);
213 
BM_TableFilterRootMultipleNonNull(benchmark::State & state)214 static void BM_TableFilterRootMultipleNonNull(benchmark::State& state) {
215   StringPool pool;
216   RootTestTable root(&pool);
217 
218   uint32_t size = static_cast<uint32_t>(state.range(0));
219   uint32_t partitions = size / 512;
220 
221   std::minstd_rand0 rnd_engine;
222   for (uint32_t i = 0; i < size; ++i) {
223     RootTestTable::Row row;
224     row.root_non_null = rnd_engine() % partitions;
225     row.root_non_null_2 = rnd_engine() % partitions;
226     root.Insert(row);
227   }
228 
229   for (auto _ : state) {
230     benchmark::DoNotOptimize(root.Filter(
231         {root.root_non_null().lt(4), root.root_non_null_2().lt(10)}));
232   }
233 }
234 BENCHMARK(BM_TableFilterRootMultipleNonNull)->Apply(TableFilterArgs);
235 
BM_TableFilterRootNullableEqMatchMany(benchmark::State & state)236 static void BM_TableFilterRootNullableEqMatchMany(benchmark::State& state) {
237   StringPool pool;
238   RootTestTable root(&pool);
239 
240   uint32_t size = static_cast<uint32_t>(state.range(0));
241   uint32_t partitions = size / 512;
242 
243   std::minstd_rand0 rnd_engine;
244   for (uint32_t i = 0; i < size; ++i) {
245     uint32_t value = rnd_engine() % partitions;
246 
247     RootTestTable::Row row;
248     row.root_nullable =
249         value % 2 == 0 ? std::nullopt : std::make_optional(value);
250     root.Insert(row);
251   }
252 
253   for (auto _ : state) {
254     benchmark::DoNotOptimize(root.Filter({root.root_nullable().eq(1)}));
255   }
256 }
257 BENCHMARK(BM_TableFilterRootNullableEqMatchMany)->Apply(TableFilterArgs);
258 
BM_TableFilterChildNonNullEqMatchMany(benchmark::State & state)259 static void BM_TableFilterChildNonNullEqMatchMany(benchmark::State& state) {
260   StringPool pool;
261   RootTestTable root(&pool);
262   ChildTestTable child(&pool, &root);
263 
264   uint32_t size = static_cast<uint32_t>(state.range(0));
265   uint32_t partitions = size / 1024;
266 
267   std::minstd_rand0 rnd_engine;
268   for (uint32_t i = 0; i < size; ++i) {
269     ChildTestTable::Row row;
270     row.child_non_null = static_cast<uint32_t>(rnd_engine() % partitions);
271     root.Insert({});
272     child.Insert(row);
273   }
274 
275   for (auto _ : state) {
276     benchmark::DoNotOptimize(child.Filter({child.child_non_null().eq(0)}));
277   }
278 }
279 BENCHMARK(BM_TableFilterChildNonNullEqMatchMany)->Apply(TableFilterArgs);
280 
BM_TableFilterChildNullableEqMatchMany(benchmark::State & state)281 static void BM_TableFilterChildNullableEqMatchMany(benchmark::State& state) {
282   StringPool pool;
283   RootTestTable root(&pool);
284   ChildTestTable child(&pool, &root);
285 
286   uint32_t size = static_cast<uint32_t>(state.range(0));
287   uint32_t partitions = size / 512;
288 
289   std::minstd_rand0 rnd_engine;
290   for (uint32_t i = 0; i < size; ++i) {
291     uint32_t value = rnd_engine() % partitions;
292 
293     ChildTestTable::Row row;
294     row.child_nullable =
295         value % 2 == 0 ? std::nullopt : std::make_optional(value);
296     root.Insert({});
297     child.Insert(row);
298   }
299 
300   for (auto _ : state) {
301     benchmark::DoNotOptimize(child.Filter({child.child_nullable().eq(1)}));
302   }
303 }
304 BENCHMARK(BM_TableFilterChildNullableEqMatchMany)->Apply(TableFilterArgs);
305 
BM_TableFilterChildNonNullEqMatchManyInParent(benchmark::State & state)306 static void BM_TableFilterChildNonNullEqMatchManyInParent(
307     benchmark::State& state) {
308   StringPool pool;
309   RootTestTable root(&pool);
310   ChildTestTable child(&pool, &root);
311 
312   uint32_t size = static_cast<uint32_t>(state.range(0));
313   uint32_t partitions = size / 1024;
314 
315   std::minstd_rand0 rnd_engine;
316   for (uint32_t i = 0; i < size; ++i) {
317     ChildTestTable::Row row;
318     row.root_non_null = static_cast<uint32_t>(rnd_engine() % partitions);
319     root.Insert({});
320     child.Insert(row);
321   }
322 
323   for (auto _ : state) {
324     benchmark::DoNotOptimize(child.Filter({child.root_non_null().eq(0)}));
325   }
326 }
327 BENCHMARK(BM_TableFilterChildNonNullEqMatchManyInParent)
328     ->Apply(TableFilterArgs);
329 
BM_TableFilterChildNullableEqMatchManyInParent(benchmark::State & state)330 static void BM_TableFilterChildNullableEqMatchManyInParent(
331     benchmark::State& state) {
332   StringPool pool;
333   RootTestTable root(&pool);
334   ChildTestTable child(&pool, &root);
335 
336   uint32_t size = static_cast<uint32_t>(state.range(0));
337   uint32_t partitions = size / 512;
338 
339   std::minstd_rand0 rnd_engine;
340   for (uint32_t i = 0; i < size; ++i) {
341     ChildTestTable::Row row;
342     row.root_nullable = static_cast<uint32_t>(rnd_engine() % partitions);
343     root.Insert({});
344     child.Insert(row);
345   }
346 
347   for (auto _ : state) {
348     benchmark::DoNotOptimize(child.Filter({child.root_nullable().eq(1)}));
349   }
350 }
351 BENCHMARK(BM_TableFilterChildNullableEqMatchManyInParent)
352     ->Apply(TableFilterArgs);
353 
BM_TableFilterParentSortedEq(benchmark::State & state)354 static void BM_TableFilterParentSortedEq(benchmark::State& state) {
355   StringPool pool;
356   RootTestTable root(&pool);
357 
358   uint32_t size = static_cast<uint32_t>(state.range(0));
359 
360   for (uint32_t i = 0; i < size; ++i) {
361     RootTestTable::Row row;
362     row.root_sorted = i * 2;
363     root.Insert(row);
364   }
365 
366   for (auto _ : state) {
367     benchmark::DoNotOptimize(root.Filter({root.root_sorted().eq(22)}));
368   }
369 }
370 BENCHMARK(BM_TableFilterParentSortedEq)->Apply(TableFilterArgs);
371 
BM_TableFilterParentSortedAndOther(benchmark::State & state)372 static void BM_TableFilterParentSortedAndOther(benchmark::State& state) {
373   StringPool pool;
374   RootTestTable root(&pool);
375 
376   uint32_t size = static_cast<uint32_t>(state.range(0));
377 
378   for (uint32_t i = 0; i < size; ++i) {
379     // Group the rows into rows of 10. This emulates the behaviour of e.g.
380     // args.
381     RootTestTable::Row row;
382     row.root_sorted = (i / 10) * 10;
383     row.root_non_null = i;
384     root.Insert(row);
385   }
386 
387   // We choose to search for the last group as if there is O(n^2), it will
388   // be more easily visible.
389   uint32_t last_group = ((size - 1) / 10) * 10;
390   for (auto _ : state) {
391     benchmark::DoNotOptimize(root.Filter({root.root_sorted().eq(last_group),
392                                           root.root_non_null().eq(size - 1)}));
393   }
394 }
395 BENCHMARK(BM_TableFilterParentSortedAndOther)->Apply(TableFilterArgs);
396 
BM_TableFilterChildSortedEq(benchmark::State & state)397 static void BM_TableFilterChildSortedEq(benchmark::State& state) {
398   StringPool pool;
399   RootTestTable root(&pool);
400   ChildTestTable child(&pool, &root);
401 
402   uint32_t size = static_cast<uint32_t>(state.range(0));
403 
404   for (uint32_t i = 0; i < size; ++i) {
405     ChildTestTable::Row row;
406     row.child_sorted = i * 2;
407     root.Insert({});
408     child.Insert(row);
409   }
410 
411   for (auto _ : state) {
412     benchmark::DoNotOptimize(child.Filter({child.child_sorted().eq(22)}));
413   }
414 }
415 BENCHMARK(BM_TableFilterChildSortedEq)->Apply(TableFilterArgs);
416 
BM_TableFilterChildSortedEqInParent(benchmark::State & state)417 static void BM_TableFilterChildSortedEqInParent(benchmark::State& state) {
418   StringPool pool;
419   RootTestTable root(&pool);
420   ChildTestTable child(&pool, &root);
421 
422   uint32_t size = static_cast<uint32_t>(state.range(0));
423 
424   for (uint32_t i = 0; i < size; ++i) {
425     RootTestTable::Row root_row;
426     root_row.root_sorted = i * 4;
427     root.Insert(root_row);
428 
429     ChildTestTable::Row row;
430     row.root_sorted = i * 4 + 2;
431     child.Insert(row);
432   }
433 
434   for (auto _ : state) {
435     benchmark::DoNotOptimize(child.Filter({child.root_sorted().eq(22)}));
436   }
437 }
438 BENCHMARK(BM_TableFilterChildSortedEqInParent)->Apply(TableFilterArgs);
439 
BM_TableSortRootNonNull(benchmark::State & state)440 static void BM_TableSortRootNonNull(benchmark::State& state) {
441   StringPool pool;
442   RootTestTable root(&pool);
443 
444   uint32_t size = static_cast<uint32_t>(state.range(0));
445 
446   std::minstd_rand0 rnd_engine;
447   for (uint32_t i = 0; i < size; ++i) {
448     const uint32_t root_value = static_cast<uint32_t>(rnd_engine());
449 
450     RootTestTable::Row row;
451     row.root_non_null = root_value;
452     root.Insert(row);
453   }
454 
455   for (auto _ : state) {
456     benchmark::DoNotOptimize(root.Sort({root.root_non_null().ascending()}));
457   }
458 }
459 BENCHMARK(BM_TableSortRootNonNull)->Apply(TableSortArgs);
460 
BM_TableSortRootNullable(benchmark::State & state)461 static void BM_TableSortRootNullable(benchmark::State& state) {
462   StringPool pool;
463   RootTestTable root(&pool);
464 
465   uint32_t size = static_cast<uint32_t>(state.range(0));
466 
467   std::minstd_rand0 rnd_engine;
468   for (uint32_t i = 0; i < size; ++i) {
469     const uint32_t root_value = static_cast<uint32_t>(rnd_engine());
470 
471     RootTestTable::Row row;
472     row.root_nullable =
473         root_value % 2 == 0 ? std::nullopt : std::make_optional(root_value);
474     root.Insert(row);
475   }
476 
477   for (auto _ : state) {
478     benchmark::DoNotOptimize(root.Sort({root.root_nullable().ascending()}));
479   }
480 }
481 BENCHMARK(BM_TableSortRootNullable)->Apply(TableSortArgs);
482 
BM_TableSortChildNonNullInParent(benchmark::State & state)483 static void BM_TableSortChildNonNullInParent(benchmark::State& state) {
484   StringPool pool;
485   RootTestTable root(&pool);
486   ChildTestTable child(&pool, &root);
487 
488   uint32_t size = static_cast<uint32_t>(state.range(0));
489 
490   std::minstd_rand0 rnd_engine;
491   for (uint32_t i = 0; i < size; ++i) {
492     const uint32_t root_value = static_cast<uint32_t>(rnd_engine());
493 
494     RootTestTable::Row root_row;
495     root_row.root_non_null = root_value;
496     root.Insert(root_row);
497 
498     const uint32_t child_value = static_cast<uint32_t>(rnd_engine());
499 
500     ChildTestTable::Row child_row;
501     child_row.root_non_null = child_value;
502     child.Insert(child_row);
503   }
504 
505   for (auto _ : state) {
506     benchmark::DoNotOptimize(child.Sort({child.root_non_null().ascending()}));
507   }
508 }
509 BENCHMARK(BM_TableSortChildNonNullInParent)->Apply(TableSortArgs);
510 
BM_TableSortChildNullableInParent(benchmark::State & state)511 static void BM_TableSortChildNullableInParent(benchmark::State& state) {
512   StringPool pool;
513   RootTestTable root(&pool);
514   ChildTestTable child(&pool, &root);
515 
516   uint32_t size = static_cast<uint32_t>(state.range(0));
517 
518   std::minstd_rand0 rnd_engine;
519   for (uint32_t i = 0; i < size; ++i) {
520     const uint32_t root_value = static_cast<uint32_t>(rnd_engine());
521 
522     RootTestTable::Row root_row;
523     root_row.root_nullable =
524         root_value % 2 == 0 ? std::nullopt : std::make_optional(root_value);
525     root.Insert(root_row);
526 
527     const uint32_t child_value = static_cast<uint32_t>(rnd_engine());
528 
529     ChildTestTable::Row child_row;
530     child_row.root_nullable =
531         child_value % 2 == 0 ? std::nullopt : std::make_optional(child_value);
532     child.Insert(child_row);
533   }
534 
535   for (auto _ : state) {
536     benchmark::DoNotOptimize(child.Sort({child.root_nullable().ascending()}));
537   }
538 }
539 BENCHMARK(BM_TableSortChildNullableInParent)->Apply(TableSortArgs);
540