/external/llvm-project/compiler-rt/lib/xray/ |
D | xray_allocator.h | 168 static constexpr auto Size = nearest_boundary(N, kCacheLineSize); 195 reinterpret_cast<uintptr_t>(AlignedNextBlock), kCacheLineSize); in Alloc() 208 DCHECK_EQ(reinterpret_cast<uintptr_t>(AlignedNextBlock) % kCacheLineSize, in Alloc() 226 : MaxMemory(RoundUpTo(M, kCacheLineSize)), in Allocator()
|
D | xray_buffer_queue.h | 45 char Buffer[kCacheLineSize];
|
D | xray_buffer_queue.cpp | 62 unsigned char Storage[kCacheLineSize];
|
D | xray_segmented_array.h | 65 SegmentControlBlockSize + next_pow2(sizeof(T)), kCacheLineSize);
|
/external/llvm-project/compiler-rt/lib/xray/tests/unit/ |
D | allocator_test.cpp | 52 GetPageSizeCached() / RoundUpTo(sizeof(OddSizedData), kCacheLineSize); in TEST() 71 GetPageSizeCached() / RoundUpTo(sizeof(OddSizedData), kCacheLineSize); in TEST()
|
/external/compiler-rt/lib/sanitizer_common/ |
D | sanitizer_quarantine.h | 77 char pad0_[kCacheLineSize]; 81 char pad1_[kCacheLineSize]; 85 char pad2_[kCacheLineSize];
|
D | sanitizer_common.h | 40 const uptr kCacheLineSize = 128; variable 42 const uptr kCacheLineSize = 64; variable
|
D | sanitizer_allocator.h | 520 COMPILER_CHECK(sizeof(RegionInfo) >= kCacheLineSize); 864 char padding[kCacheLineSize - sizeof(uptr) - sizeof(IntrusiveList<Batch>)]; 866 COMPILER_CHECK(sizeof(SizeClassInfo) == kCacheLineSize);
|
/external/llvm-project/compiler-rt/lib/sanitizer_common/ |
D | sanitizer_quarantine.h | 142 char pad0_[kCacheLineSize]; 146 char pad1_[kCacheLineSize]; 150 char pad2_[kCacheLineSize];
|
D | sanitizer_allocator_primary64.h | 90 DCHECK_EQ(SpaceEnd() % kCacheLineSize, 0); in Init() 614 COMPILER_CHECK(sizeof(RegionInfo) % kCacheLineSize == 0);
|
D | sanitizer_allocator_primary32.h | 284 COMPILER_CHECK(sizeof(SizeClassInfo) % kCacheLineSize == 0);
|
D | sanitizer_common.h | 41 const uptr kCacheLineSize = SANITIZER_CACHE_LINE_SIZE; variable
|
/external/compiler-rt/lib/tsan/tests/unit/ |
D | tsan_mutex_test.cc | 61 char pad_[kCacheLineSize];
|
/external/llvm-project/compiler-rt/lib/tsan/tests/unit/ |
D | tsan_mutex_test.cpp | 60 char pad_[kCacheLineSize];
|
/external/llvm-project/compiler-rt/lib/sanitizer_common/tests/ |
D | sanitizer_mutex_test.cpp | 65 char pad_[kCacheLineSize];
|
/external/compiler-rt/lib/sanitizer_common/tests/ |
D | sanitizer_mutex_test.cc | 66 char pad_[kCacheLineSize];
|
/external/tensorflow/tensorflow/lite/kernels/ |
D | cpu_backend_gemm_custom_gemv.h | 339 static constexpr int kCacheLineSize = 64; 341 k += kCacheLineSize / sizeof(RhsScalar)) { 651 static constexpr int kCacheLineSize = 64; 653 k += kCacheLineSize / sizeof(float)) {
|
/external/stressapptest/src/ |
D | sattypes.h | 243 static const int kCacheLineSize = 64; variable
|
D | sat.cc | 673 channel_hash_ = kCacheLineSize; in Sat() 1418 if (line_size < kCacheLineSize) in InitializeThreads() 1419 line_size = kCacheLineSize; in InitializeThreads()
|
D | worker.cc | 1093 int length = size_in_bytes / kCacheLineSize; in AdlerAddrMemcpyWarm() 1094 int mem_increment = kCacheLineSize / sizeof(*dstmem64); in AdlerAddrMemcpyWarm()
|
/external/libgav1/libgav1/src/utils/ |
D | constants.h | 178 kCacheLineSize = 64, enumerator
|
/external/gemmlowp/standalone/ |
D | neon-gemm-kernel-benchmark.cc | 94 const int kCacheLineSize = 64; variable 5656 int res = posix_memalign(reinterpret_cast<void**>(&data_), kCacheLineSize, in CacheLineAlignedBuffer() 5812 cache_size_bytes - 2 * kCacheLineSize; in BenchmarkDepthToFitInCache() 5841 return RoundDown<kCacheLineSize>(clamped_unrounded_depth); in BenchmarkDepthToFitInCache()
|