/lib/ |
D | test_kasan.c | 128 char *ptr; in kmalloc_oob_right() local 131 ptr = kmalloc(size, GFP_KERNEL); in kmalloc_oob_right() 132 KUNIT_ASSERT_NOT_ERR_OR_NULL(test, ptr); in kmalloc_oob_right() 134 OPTIMIZER_HIDE_VAR(ptr); in kmalloc_oob_right() 140 KUNIT_EXPECT_KASAN_FAIL(test, ptr[size] = 'x'); in kmalloc_oob_right() 146 KUNIT_EXPECT_KASAN_FAIL(test, ptr[size + 5] = 'y'); in kmalloc_oob_right() 149 KUNIT_EXPECT_KASAN_FAIL(test, ptr[0] = in kmalloc_oob_right() 150 ptr[size + KASAN_GRANULE_SIZE + 5]); in kmalloc_oob_right() 152 kfree(ptr); in kmalloc_oob_right() 157 char *ptr; in kmalloc_oob_left() local [all …]
|
D | assoc_array.c | 27 const struct assoc_array_ptr *cursor, *ptr, *parent; in assoc_array_subtree_iterate() local 52 ptr = READ_ONCE(node->slots[slot]); /* Address dependency. */ in assoc_array_subtree_iterate() 53 has_meta |= (unsigned long)ptr; in assoc_array_subtree_iterate() 54 if (ptr && assoc_array_ptr_is_leaf(ptr)) { in assoc_array_subtree_iterate() 59 ret = iterator(assoc_array_ptr_to_leaf(ptr), in assoc_array_subtree_iterate() 81 ptr = READ_ONCE(node->slots[slot]); /* Address dependency. */ in assoc_array_subtree_iterate() 82 if (assoc_array_ptr_is_meta(ptr)) { in assoc_array_subtree_iterate() 83 cursor = ptr; in assoc_array_subtree_iterate() 177 struct assoc_array_ptr *cursor, *ptr; in assoc_array_walk() local 209 ptr = READ_ONCE(node->slots[slot]); /* Address dependency. */ in assoc_array_walk() [all …]
|
D | test_vmalloc.c | 78 void *ptr; in random_size_align_alloc_test() local 94 ptr = __vmalloc_node(size, align, GFP_KERNEL | __GFP_ZERO, 0, in random_size_align_alloc_test() 96 if (!ptr) in random_size_align_alloc_test() 99 vfree(ptr); in random_size_align_alloc_test() 111 void *ptr; in align_shift_alloc_test() local 117 ptr = __vmalloc_node(PAGE_SIZE, align, GFP_KERNEL|__GFP_ZERO, 0, in align_shift_alloc_test() 119 if (!ptr) in align_shift_alloc_test() 122 vfree(ptr); in align_shift_alloc_test() 130 void *ptr; in fix_align_alloc_test() local 134 ptr = __vmalloc_node(5 * PAGE_SIZE, THREAD_ALIGN << 1, in fix_align_alloc_test() [all …]
|
D | ubsan.c | 197 unsigned long ptr) in handle_misaligned_access() argument 206 (void *)ptr, data->type->type_name); in handle_misaligned_access() 213 unsigned long ptr) in handle_object_size_mismatch() argument 221 (void *) ptr); in handle_object_size_mismatch() 227 unsigned long ptr) in ubsan_type_mismatch_common() argument 231 if (!ptr) in ubsan_type_mismatch_common() 233 else if (data->alignment && !IS_ALIGNED(ptr, data->alignment)) in ubsan_type_mismatch_common() 234 handle_misaligned_access(data, ptr); in ubsan_type_mismatch_common() 236 handle_object_size_mismatch(data, ptr); in ubsan_type_mismatch_common() 242 void *ptr) in __ubsan_handle_type_mismatch() argument [all …]
|
D | devres.c | 30 void __iomem **ptr, *addr = NULL; in __devm_ioremap() local 32 ptr = devres_alloc(devm_ioremap_release, sizeof(*ptr), GFP_KERNEL); in __devm_ioremap() 33 if (!ptr) in __devm_ioremap() 52 *ptr = addr; in __devm_ioremap() 53 devres_add(dev, ptr); in __devm_ioremap() 55 devres_free(ptr); in __devm_ioremap() 293 void __iomem **ptr, *addr; in devm_ioport_map() local 295 ptr = devres_alloc(devm_ioport_map_release, sizeof(*ptr), GFP_KERNEL); in devm_ioport_map() 296 if (!ptr) in devm_ioport_map() 301 *ptr = addr; in devm_ioport_map() [all …]
|
D | test_ubsan.c | 61 bool val, val2, *ptr; in test_ubsan_load_invalid_value() local 70 ptr = &val2; in test_ubsan_load_invalid_value() 85 volatile int *ptr, val = 6; in test_ubsan_misaligned_access() local 88 ptr = (int *)(arr + 1); in test_ubsan_misaligned_access() 89 *ptr = val; in test_ubsan_misaligned_access()
|
D | vsprintf.c | 653 static char *err_ptr(char *buf, char *end, void *ptr, in err_ptr() argument 656 int err = PTR_ERR(ptr); in err_ptr() 692 static const char *check_pointer_msg(const void *ptr) in check_pointer_msg() argument 694 if (!ptr) in check_pointer_msg() 697 if ((unsigned long)ptr < PAGE_SIZE || IS_ERR_VALUE(ptr)) in check_pointer_msg() 703 static int check_pointer(char **buf, char *end, const void *ptr, in check_pointer() argument 708 err_msg = check_pointer_msg(ptr); in check_pointer() 728 const void *ptr, in pointer_string() argument 734 spec.field_width = 2 * sizeof(ptr); in pointer_string() 738 return number(buf, end, (unsigned long int)ptr, spec); in pointer_string() [all …]
|
D | hexdump.c | 138 const u8 *ptr = buf; in hex_dump_to_buffer() local 201 ch = ptr[j]; in hex_dump_to_buffer() 224 ch = ptr[j]; in hex_dump_to_buffer() 273 const u8 *ptr = buf; in print_hex_dump() local 284 hex_dump_to_buffer(ptr + i, linelen, rowsize, groupsize, in print_hex_dump() 290 level, prefix_str, ptr + i, linebuf); in print_hex_dump()
|
D | test_hmm.c | 44 void *ptr; member 108 bounce->ptr = vmalloc(size); in dmirror_bounce_init() 109 if (!bounce->ptr) in dmirror_bounce_init() 116 vfree(bounce->ptr); in dmirror_bounce_fini() 319 void *ptr; in dmirror_do_read() local 321 ptr = bounce->ptr + ((start - bounce->addr) & PAGE_MASK); in dmirror_do_read() 334 memcpy(ptr, tmp, PAGE_SIZE); in dmirror_do_read() 337 ptr += PAGE_SIZE; in dmirror_do_read() 375 if (copy_to_user(u64_to_user_ptr(cmd->ptr), bounce.ptr, in dmirror_read() 388 void *ptr; in dmirror_do_write() local [all …]
|
D | test_kasan_module.c | 81 struct kasan_rcu_info *ptr; in kasan_rcu_uaf() local 84 ptr = kmalloc(sizeof(struct kasan_rcu_info), GFP_KERNEL); in kasan_rcu_uaf() 85 if (!ptr) { in kasan_rcu_uaf() 90 global_rcu_ptr = rcu_dereference_protected(ptr, NULL); in kasan_rcu_uaf()
|
D | idr.c | 33 int idr_alloc_u32(struct idr *idr, void *ptr, u32 *nextid, in idr_alloc_u32() argument 52 radix_tree_iter_replace(&idr->idr_rt, &iter, slot, ptr); in idr_alloc_u32() 79 int idr_alloc(struct idr *idr, void *ptr, int start, int end, gfp_t gfp) in idr_alloc() argument 87 ret = idr_alloc_u32(idr, ptr, &id, end > 0 ? end - 1 : INT_MAX, gfp); in idr_alloc() 117 int idr_alloc_cyclic(struct idr *idr, void *ptr, int start, int end, gfp_t gfp) in idr_alloc_cyclic() argument 125 err = idr_alloc_u32(idr, ptr, &id, max, gfp); in idr_alloc_cyclic() 128 err = idr_alloc_u32(idr, ptr, &id, max, gfp); in idr_alloc_cyclic() 290 void *idr_replace(struct idr *idr, void *ptr, unsigned long id) in idr_replace() argument 302 __radix_tree_replace(&idr->idr_rt, node, slot, ptr); in idr_replace()
|
D | decompress_unxz.c | 158 #define kfree(ptr) free(ptr) argument 160 #define vfree(ptr) do { if (ptr != NULL) free(ptr); } while (0) argument
|
D | decompress_unlzma.c | 42 static long long INIT read_int(unsigned char *ptr, int size) in read_int() argument 48 ret = (ret << 8) | ptr[size-i-1]; in read_int() 69 uint8_t *ptr; member 96 rc->ptr = rc->buffer; in rc_read() 112 rc->ptr = rc->buffer; in rc_init() 123 if (rc->ptr >= rc->buffer_end) in rc_init_code() 125 rc->code = (rc->code << 8) | *rc->ptr++; in rc_init_code() 133 if (rc->ptr >= rc->buffer_end) in rc_do_normalize() 136 rc->code = (rc->code << 8) | *rc->ptr++; in rc_do_normalize() 581 if (rc.ptr >= rc.buffer_end) in unlzma() [all …]
|
D | random32.c | 77 u8 *ptr = buf; in prandom_bytes_state() local 80 put_unaligned(prandom_u32_state(state), (u32 *) ptr); in prandom_bytes_state() 81 ptr += sizeof(u32); in prandom_bytes_state() 88 *ptr++ = (u8) rem; in prandom_bytes_state() 403 u8 *ptr = buf; in prandom_bytes() local 406 put_unaligned(siprand_u32(state), (u32 *)ptr); in prandom_bytes() 407 ptr += sizeof(u32); in prandom_bytes() 415 *ptr++ = (u8)rem; in prandom_bytes()
|
D | test_overflow.c | 495 #define free0(free, arg, ptr) free(ptr) argument 496 #define free1(free, arg, ptr) free(arg, ptr) argument 506 void *ptr; \ 509 ptr = alloc ## want_arg ## want_gfp ## want_node (func, arg, 1);\ 510 if (!ptr) { \ 514 free ## want_arg (free_func, arg, ptr); \ 517 ptr = alloc ## want_arg ## want_gfp ## want_node (func, arg, \ 519 if (!ptr) { \ 523 free ## want_arg (free_func, arg, ptr); \ 526 ptr = alloc ## want_arg ## want_gfp ## want_node (func, arg, \ [all …]
|
D | memweight.c | 11 size_t memweight(const void *ptr, size_t bytes) in memweight() argument 15 const unsigned char *bitmap = ptr; in memweight()
|
D | test_stackinit.c | 106 #define DO_NOTHING_RETURN_SCALAR(ptr) *(ptr) argument 107 #define DO_NOTHING_RETURN_STRING(ptr) /**/ argument 108 #define DO_NOTHING_RETURN_STRUCT(ptr) /**/ argument 260 do_nothing_ ## name(var_type *ptr) \ 263 if ((unsigned long)ptr > 0x2) \ 264 return DO_NOTHING_RETURN_ ## which(ptr); \ 266 return DO_NOTHING_RETURN_ ## which(ptr + 1); \
|
D | test_meminit.c | 29 static int __init count_nonzero_bytes(void *ptr, size_t size) in count_nonzero_bytes() argument 32 unsigned char *p = (unsigned char *)ptr; in count_nonzero_bytes() 41 static void __init fill_with_garbage_skip(void *ptr, int size, size_t skip) in fill_with_garbage_skip() argument 43 unsigned int *p = (unsigned int *)((char *)ptr + skip); in fill_with_garbage_skip() 58 static void __init fill_with_garbage(void *ptr, size_t size) in fill_with_garbage() argument 60 fill_with_garbage_skip(ptr, size, 0); in fill_with_garbage()
|
D | test_lockup.c | 413 void *ptr = (void *)addr; in test_kernel_ptr() local 421 (access_ok((void __user *)ptr, 1) || in test_kernel_ptr() 422 access_ok((void __user *)ptr + size - 1, 1))) { in test_kernel_ptr() 427 if (get_kernel_nofault(buf, ptr) || in test_kernel_ptr() 428 get_kernel_nofault(buf, ptr + size - 1)) { in test_kernel_ptr() 439 void *ptr = (void *)addr + offset; in test_magic() local 445 if (get_kernel_nofault(magic, ptr) || magic != expected) { in test_magic()
|
/lib/zstd/ |
D | zstd_common.c | 30 void *const ptr = ZSTD_PTR_ALIGN((stack)->ptr); \ 31 (stack)->ptr = (char *)ptr + (size); \ 32 (stack)->ptr <= (stack)->end ? ptr : NULL; \ 45 stack->ptr = workspace; in ZSTD_initStack() 54 *size = (BYTE const *)stack->end - (BYTE *)ZSTD_PTR_ALIGN(stack->ptr); in ZSTD_stackAllocAll() 71 void ZSTD_free(void *ptr, ZSTD_customMem customMem) in ZSTD_free() argument 73 if (ptr != NULL) in ZSTD_free() 74 customMem.customFree(customMem.opaque, ptr); in ZSTD_free()
|
D | bitstream.h | 74 char *ptr; member 106 const char *ptr; member 167 bitC->ptr = bitC->startPtr; in BIT_initCStream() 168 bitC->endPtr = bitC->startPtr + dstCapacity - sizeof(bitC->ptr); in BIT_initCStream() 169 if (dstCapacity <= sizeof(bitC->ptr)) in BIT_initCStream() 196 ZSTD_writeLEST(bitC->ptr, bitC->bitContainer); in BIT_flushBitsFast() 197 bitC->ptr += nbBytes; in BIT_flushBitsFast() 208 ZSTD_writeLEST(bitC->ptr, bitC->bitContainer); in BIT_flushBits() 209 bitC->ptr += nbBytes; in BIT_flushBits() 210 if (bitC->ptr > bitC->endPtr) in BIT_flushBits() [all …]
|
D | fse_decompress.c | 165 void *ptr = dt; in FSE_buildDTable_rle() local 166 FSE_DTableHeader *const DTableH = (FSE_DTableHeader *)ptr; in FSE_buildDTable_rle() 182 void *ptr = dt; in FSE_buildDTable_raw() local 183 FSE_DTableHeader *const DTableH = (FSE_DTableHeader *)ptr; in FSE_buildDTable_raw() 277 const void *ptr = dt; in FSE_decompress_usingDTable() local 278 const FSE_DTableHeader *DTableH = (const FSE_DTableHeader *)ptr; in FSE_decompress_usingDTable()
|
D | fse.h | 429 const void *ptr = ct; in FSE_initCState() local 430 const U16 *u16ptr = (const U16 *)ptr; in FSE_initCState() 431 const U32 tableLog = ZSTD_read16(ptr); in FSE_initCState() 483 const void *ptr = dt; in FSE_initDState() local 484 const FSE_DTableHeader *const DTableH = (const FSE_DTableHeader *)ptr; in FSE_initDState()
|
/lib/lz4/ |
D | lz4defs.h | 105 static FORCE_INLINE U16 LZ4_read16(const void *ptr) in LZ4_read16() argument 107 return get_unaligned((const U16 *)ptr); in LZ4_read16() 110 static FORCE_INLINE U32 LZ4_read32(const void *ptr) in LZ4_read32() argument 112 return get_unaligned((const U32 *)ptr); in LZ4_read32() 115 static FORCE_INLINE size_t LZ4_read_ARCH(const void *ptr) in LZ4_read_ARCH() argument 117 return get_unaligned((const size_t *)ptr); in LZ4_read_ARCH()
|
/lib/livepatch/ |
D | test_klp_shadow_vars.c | 20 void *ptr; member 35 static int ptr_id(void *ptr) in ptr_id() argument 41 if (sp->ptr == ptr) in ptr_id() 48 sp->ptr = ptr; in ptr_id()
|