| /kernel/dma/ |
| D | ops_helpers.c | 23 struct page *page = dma_common_vaddr_to_page(cpu_addr); in dma_common_get_sgtable() local 43 struct page *page = dma_common_vaddr_to_page(cpu_addr); in dma_common_mmap() local 66 struct page *page; in dma_common_alloc_pages() local 89 void dma_common_free_pages(struct device *dev, size_t size, struct page *page, in dma_common_free_pages()
|
| D | direct.c | 100 static void __dma_direct_free_pages(struct device *dev, struct page *page, in __dma_direct_free_pages() 110 struct page *page = swiotlb_alloc(dev, size); in dma_direct_alloc_swiotlb() local 124 struct page *page = NULL; in __dma_direct_alloc_pages() local 177 struct page *page; in dma_direct_alloc_from_pool() local 195 struct page *page; in dma_direct_alloc_no_mapping() local 214 struct page *page; in dma_direct_alloc() local 365 struct page *page; in dma_direct_alloc_pages() local 386 struct page *page, dma_addr_t dma_addr, in dma_direct_free_pages() 529 struct page *page = dma_direct_to_page(dev, dma_addr); in dma_direct_get_sgtable() local
|
| D | debug.h | 65 static inline void debug_dma_map_page(struct device *dev, struct page *page, in debug_dma_map_page() 137 static inline void debug_dma_alloc_pages(struct device *dev, struct page *page, in debug_dma_alloc_pages() 144 static inline void debug_dma_free_pages(struct device *dev, struct page *page, in debug_dma_free_pages()
|
| D | mapping.c | 155 dma_addr_t dma_map_page_attrs(struct device *dev, struct page *page, in dma_map_page_attrs() 710 struct page *page = __dma_alloc_pages(dev, size, dma_handle, dir, gfp); in dma_alloc_pages() local 723 static void __dma_free_pages(struct device *dev, size_t size, struct page *page, in __dma_free_pages() 737 void dma_free_pages(struct device *dev, size_t size, struct page *page, in dma_free_pages() 747 size_t size, struct page *page) in dma_mmap_pages() 763 struct page *page; in alloc_single_sgt() local
|
| D | pool.c | 83 struct page *page = NULL; in atomic_pool_expand() local 270 struct page *page; in dma_alloc_from_pool() local
|
| D | contiguous.c | 373 struct page *page; in dma_alloc_contiguous() local 406 void dma_free_contiguous(struct device *dev, struct page *page, size_t size) in dma_free_contiguous()
|
| D | remap.c | 40 void *dma_common_contiguous_remap(struct page *page, size_t size, in dma_common_contiguous_remap()
|
| D | debug.c | 1058 struct page *page, size_t offset) in check_for_stack() 1210 void debug_dma_map_page(struct device *dev, struct page *page, size_t offset, in debug_dma_map_page() 1388 struct page *page; in virt_to_paddr() local 1590 void debug_dma_alloc_pages(struct device *dev, struct page *page, in debug_dma_alloc_pages() 1614 void debug_dma_free_pages(struct device *dev, struct page *page, in debug_dma_free_pages()
|
| D | dummy.c | 14 static dma_addr_t dma_dummy_map_page(struct device *dev, struct page *page, in dma_dummy_map_page()
|
| D | direct.h | 84 struct page *page, unsigned long offset, size_t size, in dma_direct_map_page()
|
| D | swiotlb.c | 577 struct page *page; in alloc_dma_pages() local 615 struct page *page; in swiotlb_alloc_tlb() local 898 struct page *page; in swiotlb_bounce() local 1779 bool swiotlb_free(struct device *dev, struct page *page, size_t size) in swiotlb_free()
|
| D | coherent.c | 204 int page = (vaddr - mem->virt_base) >> PAGE_SHIFT; in __dma_release_from_coherent() local
|
| /kernel/ |
| D | kexec_core.c | 294 static void kimage_free_pages(struct page *page) in kimage_free_pages() 310 struct page *page, *next; in kimage_free_page_list() local 479 struct page *page; in kimage_add_entry() local 507 static int kimage_add_page(struct kimage *image, unsigned long page) in kimage_add_page() 540 struct page *page; in kimage_free_entry() local 595 unsigned long page) in kimage_dst_used() 635 struct page *page; in kimage_alloc_page() local 731 struct page *page; in kimage_load_normal_segment() local 802 struct page *page; in kimage_load_crash_segment() local
|
| D | watch_queue.c | 61 struct page *page; in watch_queue_pipe_buf_release() local 103 struct page *page; in post_one_notification() local
|
| D | panic.c | 120 char *page) in warn_count_show()
|
| /kernel/power/ |
| D | snapshot.c | 88 static inline void hibernate_map_page(struct page *page) in hibernate_map_page() 100 static inline void hibernate_unmap_page(struct page *page) in hibernate_unmap_page() 229 struct page *page; in alloc_image_page() local 257 struct page *page; in free_image_page() local 1042 void swsusp_set_page_free(struct page *page) in swsusp_set_page_free() 1048 static int swsusp_page_is_free(struct page *page) in swsusp_page_is_free() 1054 void swsusp_unset_page_free(struct page *page) in swsusp_unset_page_free() 1060 static void swsusp_set_page_forbidden(struct page *page) in swsusp_set_page_forbidden() 1066 int swsusp_page_is_forbidden(struct page *page) in swsusp_page_is_forbidden() 1072 static void swsusp_unset_page_forbidden(struct page *page) in swsusp_unset_page_forbidden() [all …]
|
| D | swap.c | 250 struct page *page = bio_first_page_all(bio); in hib_end_io() local 275 struct page *page = virt_to_page(addr); in hib_submit_io() local 712 unsigned char *page = NULL; in save_compressed_image() local 1237 unsigned char **page = NULL; in load_compressed_image() local
|
| /kernel/events/ |
| D | ring_buffer.c | 617 struct page *page; in rb_alloc_aux_page() local 643 struct page *page = virt_to_page(rb->aux_pages[idx]); in rb_free_aux_page() local 738 struct page *page; in rb_alloc_aux() local 759 struct page *page = virt_to_page(rb->aux_pages[0]); in rb_alloc_aux() local 818 struct page *page; in perf_mmap_alloc_page() local 831 struct page *page = virt_to_page(addr); in perf_mmap_free_page() local 906 struct page *page = vmalloc_to_page(addr); in perf_mmap_unmark_page() local
|
| D | uprobes.c | 109 struct page *page; member 262 static void copy_from_page(struct page *page, unsigned long vaddr, void *dst, int len) in copy_from_page() 269 static void copy_to_page(struct page *page, unsigned long vaddr, const void *src, int len) in copy_to_page() 276 static int verify_opcode(struct page *page, unsigned long vaddr, uprobe_opcode_t *new_opcode) in verify_opcode() 390 struct page *page; in __update_ref_ctr() local 867 struct page *page; in __copy_insn() local 1737 void __weak arch_uprobe_copy_ixol(struct page *page, unsigned long vaddr, in arch_uprobe_copy_ixol() 2093 struct page *page; in is_trap_at_addr() local
|
| /kernel/module/ |
| D | decompress.c | 36 struct page *page; in module_get_next_page() local 115 struct page *page = module_get_next_page(info); in module_gzip_decompress() local 174 struct page *page = module_get_next_page(info); in module_xz_decompress() local 258 struct page *page = module_get_next_page(info); in module_zstd_decompress() local
|
| /kernel/bpf/ |
| D | arena.c | 146 struct page *page; in existing_page_cb() local 260 struct page *page; in arena_vm_fault() local 503 struct page *page; in arena_free_pages() local
|
| /kernel/trace/ |
| D | ring_buffer.c | 337 struct buffer_data_page *page; /* Actual data page */ member 595 struct buffer_page *page = cpu_buffer->commit_page; in verify_event() local 1188 rb_is_head_page(struct buffer_page *page, struct list_head *list) in rb_is_head_page() 1207 static bool rb_is_reader_page(struct buffer_page *page) in rb_is_reader_page() 1331 struct buffer_page *page; in rb_set_head_page() local 2074 struct page *page; in __rb_allocate_pages() local 2186 struct page *page; in rb_allocate_cpu_buffer() local 5995 static void rb_clear_buffer_page(struct buffer_page *page) in rb_clear_buffer_page() 6025 struct buffer_page *page; in rb_reset_cpu() local 6450 struct page *page; in ring_buffer_alloc_read_page() local [all …]
|
| D | trace_sched_switch.c | 204 struct page *page; in allocate_cmdlines_buffer() local
|
| /kernel/debug/kdb/ |
| D | kdb_support.c | 321 struct page *page; in kdb_getphys() local
|
| /kernel/futex/ |
| D | core.c | 228 struct page *page; in get_futex_key() local
|