Lines Matching refs:uptr
52 static const uptr kNumWordsForMagic = SANITIZER_WORDSIZE == 64 ? 1 : 2;
89 void Extend(uptr npcs);
90 void Add(uptr pc, u32 *guard);
91 void IndirCall(uptr caller, uptr callee, uptr callee_cache[],
92 uptr cache_size);
104 void InitializeGuards(s32 *guards, uptr n, const char *module_name,
105 uptr caller_pc);
106 void InitializeCounters(u8 *counters, uptr n);
108 uptr GetNumberOf8bitCounters();
109 uptr Update8bitCounterBitsetAndClearCounters(u8 *bitset);
111 uptr *data();
112 uptr size() const;
113 uptr *buffer() const { return pc_buffer; } in buffer()
118 uptr beg, end; // elements [beg,end) in pc_array.
122 void UpdateModuleNameVec(uptr caller_pc, uptr range_beg, uptr range_end);
124 InternalMmapVector<uptr>* offsets) const;
128 static const uptr kPcArrayMaxSize = FIRST_32_SECOND_64(
132 static const uptr kPcArrayMmapSize = 64 * 1024;
136 uptr *pc_array;
142 uptr pc_array_mapped_size;
146 uptr *pc_buffer;
157 uptr n;
161 uptr num_8bit_counters;
164 static const uptr kCcArrayMaxSize = FIRST_32_SECOND_64(1 << 18, 1 << 24);
165 uptr **cc_array;
176 static const uptr kTrEventArrayMaxSize = FIRST_32_SECOND_64(1 << 22, 1 << 30);
178 uptr tr_event_array_size;
180 static const uptr kTrPcArrayMaxSize = FIRST_32_SECOND_64(1 << 22, 1 << 27);
187 void CovUpdateMapping(const char *path, uptr caller_pc = 0);
210 pc_array = reinterpret_cast<uptr *>( in Enable()
211 MmapNoReserveOrDie(sizeof(uptr) * kPcArrayMaxSize, "CovInit")); in Enable()
221 pc_buffer = reinterpret_cast<uptr *>(MmapNoReserveOrDie( in Enable()
222 sizeof(uptr) * kPcArrayMaxSize, "CovInit::pc_buffer")); in Enable()
224 cc_array = reinterpret_cast<uptr **>(MmapNoReserveOrDie( in Enable()
225 sizeof(uptr *) * kCcArrayMaxSize, "CovInit::cc_array")); in Enable()
234 reinterpret_cast<uptr>(&tr_event_array[kTrEventArrayMaxSize]), in Enable()
246 uptr idx = atomic_load_relaxed(&pc_array_index); in InitializeGuardArray()
254 UnmapOrDie(pc_array, sizeof(uptr) * kPcArrayMaxSize); in Disable()
258 UnmapOrDie(cc_array, sizeof(uptr *) * kCcArrayMaxSize); in Disable()
262 UnmapOrDie(pc_buffer, sizeof(uptr) * kPcArrayMaxSize); in Disable()
281 for (uptr i = 0; i < guard_array_vec.size(); i++) in ReinitializeGuards()
291 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); in ReInit()
292 uptr npcs = size / sizeof(uptr); in ReInit()
317 void CoverageData::Extend(uptr npcs) { in Extend()
321 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); in Extend()
322 size += npcs * sizeof(uptr); in Extend()
328 uptr new_mapped_size = pc_array_mapped_size; in Extend()
330 CHECK_LE(new_mapped_size, sizeof(uptr) * kPcArrayMaxSize); in Extend()
333 uptr res = internal_ftruncate(pc_fd, new_mapped_size); in Extend()
340 uptr next_map_base = ((uptr)pc_array) + pc_array_mapped_size; in Extend()
344 CHECK_EQ((uptr)p, next_map_base); in Extend()
351 void CoverageData::InitializeCounters(u8 *counters, uptr n) { in InitializeCounters()
353 CHECK_EQ(reinterpret_cast<uptr>(counters) % 16, 0); in InitializeCounters()
360 void CoverageData::UpdateModuleNameVec(uptr caller_pc, uptr range_beg, in UpdateModuleNameVec()
361 uptr range_end) { in UpdateModuleNameVec()
374 void CoverageData::InitializeGuards(s32 *guards, uptr n, in InitializeGuards()
376 uptr caller_pc) { in InitializeGuards()
383 uptr range_end = atomic_load(&pc_array_index, memory_order_relaxed); in InitializeGuards()
384 uptr range_beg = range_end - n; in InitializeGuards()
390 static const uptr kBundleCounterBits = 16;
394 uptr BundlePcAndCounter(uptr pc, uptr counter) { in BundlePcAndCounter()
397 static const uptr kMaxCounter = (1 << kBundleCounterBits) - 1; in BundlePcAndCounter()
404 uptr UnbundlePc(uptr bundle) { in UnbundlePc()
410 uptr UnbundleCounter(uptr bundle) { in UnbundleCounter()
418 void CoverageData::Add(uptr pc, u32 *guard) { in Add()
426 uptr idx = -guard_value - 1; in Add()
429 CHECK_LT(idx * sizeof(uptr), in Add()
431 uptr counter = atomic_fetch_add(&coverage_counter, 1, memory_order_relaxed); in Add()
443 void CoverageData::IndirCall(uptr caller, uptr callee, uptr callee_cache[], in IndirCall()
444 uptr cache_size) { in IndirCall()
448 uptr zero = 0; in IndirCall()
451 uptr idx = atomic_fetch_add(&cc_array_index, 1, memory_order_relaxed); in IndirCall()
452 CHECK_LT(idx * sizeof(uptr), in IndirCall()
458 for (uptr i = 2; i < cache_size; i++) { in IndirCall()
459 uptr was = 0; in IndirCall()
470 uptr CoverageData::GetNumberOf8bitCounters() { in GetNumberOf8bitCounters()
475 uptr CoverageData::Update8bitCounterBitsetAndClearCounters(u8 *bitset) { in Update8bitCounterBitsetAndClearCounters()
476 uptr num_new_bits = 0; in Update8bitCounterBitsetAndClearCounters()
477 uptr cur = 0; in Update8bitCounterBitsetAndClearCounters()
479 static const uptr kBatchSize = 8; in Update8bitCounterBitsetAndClearCounters()
480 CHECK_EQ(reinterpret_cast<uptr>(bitset) % kBatchSize, 0); in Update8bitCounterBitsetAndClearCounters()
481 for (uptr i = 0, len = counters_vec.size(); i < len; i++) { in Update8bitCounterBitsetAndClearCounters()
483 uptr n = counters_vec[i].n; in Update8bitCounterBitsetAndClearCounters()
486 CHECK_EQ(reinterpret_cast<uptr>(c) % kBatchSize, 0); in Update8bitCounterBitsetAndClearCounters()
492 for (uptr j = 0; j < n; j += kBatchSize, cur += kBatchSize) { in Update8bitCounterBitsetAndClearCounters()
501 for (uptr k = 0; k < kBatchSize; k++) { in Update8bitCounterBitsetAndClearCounters()
528 uptr *CoverageData::data() { in data()
532 uptr CoverageData::size() const { in size()
559 CHECK_EQ((uptr)block.data(), (uptr)(CovHeader *)block.data()); in CovWritePacked()
560 uptr header_size_with_module = sizeof(header) + module_name_length; in CovWritePacked()
610 uptr max_idx = tr_event_pointer - tr_event_array; in DumpTrace()
616 for (uptr i = 0, n = size(); i < n; i++) { in DumpTrace()
618 uptr module_address = 0; in DumpTrace()
632 for (uptr i = 0; i < comp_unit_name_vec.size(); i++) in DumpTrace()
639 uptr bytes_to_write = max_idx * sizeof(tr_event_array[0]); in DumpTrace()
643 uptr actually_written; in DumpTrace()
660 uptr max_idx = atomic_load(&cc_array_index, memory_order_relaxed); in DumpCallerCalleePairs()
666 uptr total = 0; in DumpCallerCalleePairs()
667 for (uptr i = 0; i < max_idx; i++) { in DumpCallerCalleePairs()
668 uptr *cc_cache = cc_array[i]; in DumpCallerCalleePairs()
670 uptr caller = cc_cache[0]; in DumpCallerCalleePairs()
671 uptr n_callees = cc_cache[1]; in DumpCallerCalleePairs()
673 uptr caller_module_address = 0; in DumpCallerCalleePairs()
676 for (uptr j = 2; j < n_callees; j++) { in DumpCallerCalleePairs()
677 uptr callee = cc_cache[j]; in DumpCallerCalleePairs()
681 uptr callee_module_address = 0; in DumpCallerCalleePairs()
712 uptr n = coverage_data.GetNumberOf8bitCounters(); in DumpCounters()
718 for (uptr m = 0; m < module_name_vec.size(); m++) { in DumpCounters()
739 for (uptr m = 0; m < module_name_vec.size(); m++) { in DumpAsBitSet()
740 uptr n_set_bits = 0; in DumpAsBitSet()
745 for (uptr i = r.beg; i < r.end; i++) { in DumpAsBitSet()
746 uptr pc = UnbundlePc(pc_array[i]); in DumpAsBitSet()
764 InternalMmapVector<uptr>* offsets) const { in GetRangeOffsets()
766 for (uptr i = 0; i < kNumWordsForMagic; i++) in GetRangeOffsets()
771 for (uptr i = r.beg; i < r.end; i++) { in GetRangeOffsets()
772 uptr pc = UnbundlePc(pc_array[i]); in GetRangeOffsets()
773 uptr counter = UnbundleCounter(pc_array[i]); in GetRangeOffsets()
775 uptr offset = 0; in GetRangeOffsets()
782 for (uptr i = 0; i < offsets->size(); i++) in GetRangeOffsets()
799 for (uptr i = 0; i < sancov_argv.size(); ++i) { in GenerateHtmlReport()
832 InternalMmapVector<uptr> offsets(0); in DumpOffsets()
837 for (uptr i = 0; i < cov_files.size(); ++i) { in DumpOffsets()
842 for (uptr m = 0; m < module_name_vec.size(); m++) { in DumpOffsets()
846 uptr num_offsets = offsets.size() - kNumWordsForMagic; in DumpOffsets()
958 __sanitizer_cov_indir_call16(uptr callee, uptr callee_cache16[]) { in __sanitizer_cov_indir_call16()
971 __sanitizer_cov_module_init(s32 *guards, uptr npcs, u8 *counters, in __sanitizer_cov_module_init()
988 uptr __sanitizer_get_total_unique_coverage() { in __sanitizer_get_total_unique_coverage()
993 uptr __sanitizer_get_total_unique_caller_callee_pairs() { in __sanitizer_get_total_unique_caller_callee_pairs()
1016 uptr __sanitizer_get_coverage_guards(uptr **data) { in __sanitizer_get_coverage_guards()
1022 uptr __sanitizer_get_coverage_pc_buffer(uptr **data) { in __sanitizer_get_coverage_pc_buffer()
1028 uptr __sanitizer_get_number_of_counters() { in __sanitizer_get_number_of_counters()
1033 uptr __sanitizer_update_counter_bitset_and_clear_counters(u8 *bitset) { in __sanitizer_update_counter_bitset_and_clear_counters()