Lines Matching refs:uptr
87 void Extend(uptr npcs);
88 void Add(uptr pc, u32 *guard);
89 void IndirCall(uptr caller, uptr callee, uptr callee_cache[],
90 uptr cache_size);
102 void InitializeGuards(s32 *guards, uptr n, const char *module_name,
103 uptr caller_pc);
104 void InitializeCounters(u8 *counters, uptr n);
106 uptr GetNumberOf8bitCounters();
107 uptr Update8bitCounterBitsetAndClearCounters(u8 *bitset);
109 uptr *data();
110 uptr size();
114 void UpdateModuleNameVec(uptr caller_pc, uptr range_beg, uptr range_end);
118 static const uptr kPcArrayMaxSize = FIRST_32_SECOND_64(
122 static const uptr kPcArrayMmapSize = 64 * 1024;
126 uptr *pc_array;
132 uptr pc_array_mapped_size;
141 uptr beg, end; // elements [beg,end) in pc_array.
150 uptr n;
154 uptr num_8bit_counters;
157 static const uptr kCcArrayMaxSize = FIRST_32_SECOND_64(1 << 18, 1 << 24);
158 uptr **cc_array;
169 static const uptr kTrEventArrayMaxSize = FIRST_32_SECOND_64(1 << 22, 1 << 30);
171 uptr tr_event_array_size;
173 static const uptr kTrPcArrayMaxSize = FIRST_32_SECOND_64(1 << 22, 1 << 27);
180 void CovUpdateMapping(const char *path, uptr caller_pc = 0);
203 pc_array = reinterpret_cast<uptr *>( in Enable()
204 MmapNoReserveOrDie(sizeof(uptr) * kPcArrayMaxSize, "CovInit")); in Enable()
212 cc_array = reinterpret_cast<uptr **>(MmapNoReserveOrDie( in Enable()
213 sizeof(uptr *) * kCcArrayMaxSize, "CovInit::cc_array")); in Enable()
222 reinterpret_cast<uptr>(&tr_event_array[kTrEventArrayMaxSize]), in Enable()
234 uptr idx = atomic_load_relaxed(&pc_array_index); in InitializeGuardArray()
242 UnmapOrDie(pc_array, sizeof(uptr) * kPcArrayMaxSize); in Disable()
246 UnmapOrDie(cc_array, sizeof(uptr *) * kCcArrayMaxSize); in Disable()
265 for (uptr i = 0; i < guard_array_vec.size(); i++) in ReinitializeGuards()
275 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); in ReInit()
276 uptr npcs = size / sizeof(uptr); in ReInit()
301 void CoverageData::Extend(uptr npcs) { in Extend()
305 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); in Extend()
306 size += npcs * sizeof(uptr); in Extend()
312 uptr new_mapped_size = pc_array_mapped_size; in Extend()
314 CHECK_LE(new_mapped_size, sizeof(uptr) * kPcArrayMaxSize); in Extend()
317 uptr res = internal_ftruncate(pc_fd, new_mapped_size); in Extend()
324 uptr next_map_base = ((uptr)pc_array) + pc_array_mapped_size; in Extend()
328 CHECK_EQ((uptr)p, next_map_base); in Extend()
335 void CoverageData::InitializeCounters(u8 *counters, uptr n) { in InitializeCounters()
337 CHECK_EQ(reinterpret_cast<uptr>(counters) % 16, 0); in InitializeCounters()
344 void CoverageData::UpdateModuleNameVec(uptr caller_pc, uptr range_beg, in UpdateModuleNameVec()
345 uptr range_end) { in UpdateModuleNameVec()
358 void CoverageData::InitializeGuards(s32 *guards, uptr n, in InitializeGuards()
360 uptr caller_pc) { in InitializeGuards()
367 uptr range_end = atomic_load(&pc_array_index, memory_order_relaxed); in InitializeGuards()
368 uptr range_beg = range_end - n; in InitializeGuards()
374 static const uptr kBundleCounterBits = 16;
378 uptr BundlePcAndCounter(uptr pc, uptr counter) { in BundlePcAndCounter()
381 static const uptr kMaxCounter = (1 << kBundleCounterBits) - 1; in BundlePcAndCounter()
388 uptr UnbundlePc(uptr bundle) { in UnbundlePc()
394 uptr UnbundleCounter(uptr bundle) { in UnbundleCounter()
402 void CoverageData::Add(uptr pc, u32 *guard) { in Add()
410 uptr idx = -guard_value - 1; in Add()
413 CHECK_LT(idx * sizeof(uptr), in Add()
415 uptr counter = atomic_fetch_add(&coverage_counter, 1, memory_order_relaxed); in Add()
426 void CoverageData::IndirCall(uptr caller, uptr callee, uptr callee_cache[], in IndirCall()
427 uptr cache_size) { in IndirCall()
431 uptr zero = 0; in IndirCall()
434 uptr idx = atomic_fetch_add(&cc_array_index, 1, memory_order_relaxed); in IndirCall()
435 CHECK_LT(idx * sizeof(uptr), in IndirCall()
441 for (uptr i = 2; i < cache_size; i++) { in IndirCall()
442 uptr was = 0; in IndirCall()
453 uptr CoverageData::GetNumberOf8bitCounters() { in GetNumberOf8bitCounters()
458 uptr CoverageData::Update8bitCounterBitsetAndClearCounters(u8 *bitset) { in Update8bitCounterBitsetAndClearCounters()
459 uptr num_new_bits = 0; in Update8bitCounterBitsetAndClearCounters()
460 uptr cur = 0; in Update8bitCounterBitsetAndClearCounters()
462 static const uptr kBatchSize = 8; in Update8bitCounterBitsetAndClearCounters()
463 CHECK_EQ(reinterpret_cast<uptr>(bitset) % kBatchSize, 0); in Update8bitCounterBitsetAndClearCounters()
464 for (uptr i = 0, len = counters_vec.size(); i < len; i++) { in Update8bitCounterBitsetAndClearCounters()
466 uptr n = counters_vec[i].n; in Update8bitCounterBitsetAndClearCounters()
469 CHECK_EQ(reinterpret_cast<uptr>(c) % kBatchSize, 0); in Update8bitCounterBitsetAndClearCounters()
475 for (uptr j = 0; j < n; j += kBatchSize, cur += kBatchSize) { in Update8bitCounterBitsetAndClearCounters()
484 for (uptr k = 0; k < kBatchSize; k++) { in Update8bitCounterBitsetAndClearCounters()
511 uptr *CoverageData::data() { in data()
515 uptr CoverageData::size() { in size()
542 CHECK_EQ((uptr)block.data(), (uptr)(CovHeader *)block.data()); in CovWritePacked()
543 uptr header_size_with_module = sizeof(header) + module_name_length; in CovWritePacked()
593 uptr max_idx = tr_event_pointer - tr_event_array; in DumpTrace()
599 for (uptr i = 0, n = size(); i < n; i++) { in DumpTrace()
601 uptr module_address = 0; in DumpTrace()
615 for (uptr i = 0; i < comp_unit_name_vec.size(); i++) in DumpTrace()
622 uptr bytes_to_write = max_idx * sizeof(tr_event_array[0]); in DumpTrace()
626 uptr actually_written; in DumpTrace()
643 uptr max_idx = atomic_load(&cc_array_index, memory_order_relaxed); in DumpCallerCalleePairs()
649 uptr total = 0; in DumpCallerCalleePairs()
650 for (uptr i = 0; i < max_idx; i++) { in DumpCallerCalleePairs()
651 uptr *cc_cache = cc_array[i]; in DumpCallerCalleePairs()
653 uptr caller = cc_cache[0]; in DumpCallerCalleePairs()
654 uptr n_callees = cc_cache[1]; in DumpCallerCalleePairs()
656 uptr caller_module_address = 0; in DumpCallerCalleePairs()
659 for (uptr j = 2; j < n_callees; j++) { in DumpCallerCalleePairs()
660 uptr callee = cc_cache[j]; in DumpCallerCalleePairs()
664 uptr callee_module_address = 0; in DumpCallerCalleePairs()
695 uptr n = coverage_data.GetNumberOf8bitCounters(); in DumpCounters()
701 for (uptr m = 0; m < module_name_vec.size(); m++) { in DumpCounters()
722 for (uptr m = 0; m < module_name_vec.size(); m++) { in DumpAsBitSet()
723 uptr n_set_bits = 0; in DumpAsBitSet()
728 for (uptr i = r.beg; i < r.end; i++) { in DumpAsBitSet()
729 uptr pc = UnbundlePc(pc_array[i]); in DumpAsBitSet()
749 InternalMmapVector<uptr> offsets(0); in DumpOffsets()
751 for (uptr m = 0; m < module_name_vec.size(); m++) { in DumpOffsets()
753 uptr num_words_for_magic = SANITIZER_WORDSIZE == 64 ? 1 : 2; in DumpOffsets()
754 for (uptr i = 0; i < num_words_for_magic; i++) in DumpOffsets()
760 for (uptr i = r.beg; i < r.end; i++) { in DumpOffsets()
761 uptr pc = UnbundlePc(pc_array[i]); in DumpOffsets()
762 uptr counter = UnbundleCounter(pc_array[i]); in DumpOffsets()
764 uptr offset = 0; in DumpOffsets()
771 for (uptr i = 0; i < offsets.size(); i++) in DumpOffsets()
774 uptr num_offsets = offsets.size() - num_words_for_magic; in DumpOffsets()
883 __sanitizer_cov_indir_call16(uptr callee, uptr callee_cache16[]) { in __sanitizer_cov_indir_call16()
896 __sanitizer_cov_module_init(s32 *guards, uptr npcs, u8 *counters, in __sanitizer_cov_module_init()
913 uptr __sanitizer_get_total_unique_coverage() { in __sanitizer_get_total_unique_coverage()
918 uptr __sanitizer_get_total_unique_caller_callee_pairs() { in __sanitizer_get_total_unique_caller_callee_pairs()
941 uptr __sanitizer_get_coverage_guards(uptr **data) { in __sanitizer_get_coverage_guards()
947 uptr __sanitizer_get_number_of_counters() { in __sanitizer_get_number_of_counters()
952 uptr __sanitizer_update_counter_bitset_and_clear_counters(u8 *bitset) { in __sanitizer_update_counter_bitset_and_clear_counters()