• Home
  • Raw
  • Download

Lines Matching refs:ptr

210 check_access(const volatile void *ptr, size_t size, int type, unsigned long ip);
223 check_access(scoped_access->ptr, scoped_access->size, in kcsan_check_scoped_accesses()
231 is_atomic(struct kcsan_ctx *ctx, const volatile void *ptr, size_t size, int type) in is_atomic() argument
246 !(type & KCSAN_ACCESS_COMPOUND) && IS_ALIGNED((unsigned long)ptr, size)) in is_atomic()
268 should_watch(struct kcsan_ctx *ctx, const volatile void *ptr, size_t size, int type) in should_watch() argument
277 if (is_atomic(ctx, ptr, size, type)) in should_watch()
338 static __always_inline u64 read_instrumented_memory(const volatile void *ptr, size_t size) in read_instrumented_memory() argument
350 case 1: return *(const volatile u8 *)ptr; in read_instrumented_memory()
351 case 2: return *(const volatile u16 *)ptr; in read_instrumented_memory()
352 case 4: return *(const volatile u32 *)ptr; in read_instrumented_memory()
353 case 8: return *(const volatile u64 *)ptr; in read_instrumented_memory()
401 find_reorder_access(struct kcsan_ctx *ctx, const volatile void *ptr, size_t size, in find_reorder_access() argument
413 return reorder_access->ptr == ptr && reorder_access->size == size && in find_reorder_access()
418 set_reorder_access(struct kcsan_ctx *ctx, const volatile void *ptr, size_t size, in set_reorder_access() argument
433 reorder_access->ptr = ptr; in set_reorder_access()
455 static noinline void kcsan_found_watchpoint(const volatile void *ptr, in kcsan_found_watchpoint() argument
483 if (ctx->access_mask && !find_reorder_access(ctx, ptr, size, type, ip)) in kcsan_found_watchpoint()
493 if (!is_assert && kcsan_ignore_address(ptr)) in kcsan_found_watchpoint()
507 kcsan_report_set_info(ptr, size, type, ip, watchpoint - watchpoints); in kcsan_found_watchpoint()
527 kcsan_setup_watchpoint(const volatile void *ptr, size_t size, int type, unsigned long ip) in kcsan_setup_watchpoint() argument
554 if (!is_assert && kcsan_ignore_address(ptr)) in kcsan_setup_watchpoint()
557 if (!check_encodable((unsigned long)ptr, size)) { in kcsan_setup_watchpoint()
570 is_reorder_access = find_reorder_access(ctx, ptr, size, type, ip); in kcsan_setup_watchpoint()
591 watchpoint = insert_watchpoint((unsigned long)ptr, size, is_write); in kcsan_setup_watchpoint()
609 old = is_reorder_access ? 0 : read_instrumented_memory(ptr, size); in kcsan_setup_watchpoint()
622 new = read_instrumented_memory(ptr, size); in kcsan_setup_watchpoint()
678 kcsan_report_known_origin(ptr, size, type, ip, in kcsan_setup_watchpoint()
689 kcsan_report_unknown_origin(ptr, size, type, ip, in kcsan_setup_watchpoint()
713 set_reorder_access(ctx, ptr, size, type, ip); in kcsan_setup_watchpoint()
719 check_access(const volatile void *ptr, size_t size, int type, unsigned long ip) in check_access() argument
737 watchpoint = find_watchpoint((unsigned long)ptr, size, in check_access()
748 kcsan_found_watchpoint(ptr, size, type, ip, watchpoint, encoded_watchpoint); in check_access()
752 if (unlikely(should_watch(ctx, ptr, size, type))) { in check_access()
753 kcsan_setup_watchpoint(ptr, size, type, ip); in check_access()
765 ptr = reorder_access->ptr; in check_access()
907 kcsan_begin_scoped_access(const volatile void *ptr, size_t size, int type, in kcsan_begin_scoped_access() argument
912 check_access(ptr, size, type, _RET_IP_); in kcsan_begin_scoped_access()
917 sa->ptr = ptr; in kcsan_begin_scoped_access()
952 check_access(sa->ptr, sa->size, sa->type, sa->ip); in kcsan_end_scoped_access()
956 void __kcsan_check_access(const volatile void *ptr, size_t size, int type) in __kcsan_check_access() argument
958 check_access(ptr, size, type, _RET_IP_); in __kcsan_check_access()
992 void __tsan_read##size(void *ptr); \
993 void __tsan_read##size(void *ptr) \
995 check_access(ptr, size, 0, _RET_IP_); \
998 void __tsan_unaligned_read##size(void *ptr) \
1001 void __tsan_write##size(void *ptr); \
1002 void __tsan_write##size(void *ptr) \
1004 check_access(ptr, size, KCSAN_ACCESS_WRITE, _RET_IP_); \
1007 void __tsan_unaligned_write##size(void *ptr) \
1010 void __tsan_read_write##size(void *ptr); \
1011 void __tsan_read_write##size(void *ptr) \
1013 check_access(ptr, size, \
1018 void __tsan_unaligned_read_write##size(void *ptr) \
1028 void __tsan_read_range(void *ptr, size_t size);
1029 void __tsan_read_range(void *ptr, size_t size) in __tsan_read_range() argument
1031 check_access(ptr, size, 0, _RET_IP_); in __tsan_read_range()
1035 void __tsan_write_range(void *ptr, size_t size);
1036 void __tsan_write_range(void *ptr, size_t size) in __tsan_write_range() argument
1038 check_access(ptr, size, KCSAN_ACCESS_WRITE, _RET_IP_); in __tsan_write_range()
1052 void __tsan_volatile_read##size(void *ptr); \
1053 void __tsan_volatile_read##size(void *ptr) \
1056 IS_ALIGNED((unsigned long)ptr, size); \
1059 check_access(ptr, size, is_atomic ? KCSAN_ACCESS_ATOMIC : 0, \
1063 void __tsan_unaligned_volatile_read##size(void *ptr) \
1066 void __tsan_volatile_write##size(void *ptr); \
1067 void __tsan_volatile_write##size(void *ptr) \
1070 IS_ALIGNED((unsigned long)ptr, size); \
1073 check_access(ptr, size, \
1079 void __tsan_unaligned_volatile_write##size(void *ptr) \
1130 check_access(reorder_access->ptr, reorder_access->size, in __tsan_func_exit()
1171 u##bits __tsan_atomic##bits##_load(const u##bits *ptr, int memorder); \
1172 u##bits __tsan_atomic##bits##_load(const u##bits *ptr, int memorder) \
1176 check_access(ptr, bits / BITS_PER_BYTE, KCSAN_ACCESS_ATOMIC, _RET_IP_); \
1178 return __atomic_load_n(ptr, memorder); \
1181 void __tsan_atomic##bits##_store(u##bits *ptr, u##bits v, int memorder); \
1182 void __tsan_atomic##bits##_store(u##bits *ptr, u##bits v, int memorder) \
1186 check_access(ptr, bits / BITS_PER_BYTE, \
1189 __atomic_store_n(ptr, v, memorder); \
1194 u##bits __tsan_atomic##bits##_##op(u##bits *ptr, u##bits v, int memorder); \
1195 u##bits __tsan_atomic##bits##_##op(u##bits *ptr, u##bits v, int memorder) \
1199 check_access(ptr, bits / BITS_PER_BYTE, \
1203 return __atomic_##op##suffix(ptr, v, memorder); \
1225 int __tsan_atomic##bits##_compare_exchange_##strength(u##bits *ptr, u##bits *exp, \
1227 int __tsan_atomic##bits##_compare_exchange_##strength(u##bits *ptr, u##bits *exp, \
1232 check_access(ptr, bits / BITS_PER_BYTE, \
1236 return __atomic_compare_exchange_n(ptr, exp, val, weak, mo, fail_mo); \
1241 u##bits __tsan_atomic##bits##_compare_exchange_val(u##bits *ptr, u##bits exp, u##bits val, \
1243 u##bits __tsan_atomic##bits##_compare_exchange_val(u##bits *ptr, u##bits exp, u##bits val, \
1248 check_access(ptr, bits / BITS_PER_BYTE, \
1252 __atomic_compare_exchange_n(ptr, &exp, val, 0, mo, fail_mo); \