Home
last modified time | relevance | path

Searched refs:vaddr (Results 1 – 8 of 8) sorted by relevance

/kernel/events/
Duprobes.c110 unsigned long vaddr; /* Page(s) of instruction slots */ member
136 static loff_t vaddr_to_offset(struct vm_area_struct *vma, unsigned long vaddr) in vaddr_to_offset() argument
138 return ((loff_t)vma->vm_pgoff << PAGE_SHIFT) + (vaddr - vma->vm_start); in vaddr_to_offset()
244 static void copy_from_page(struct page *page, unsigned long vaddr, void *dst, int len) in copy_from_page() argument
247 memcpy(dst, kaddr + (vaddr & ~PAGE_MASK), len); in copy_from_page()
251 static void copy_to_page(struct page *page, unsigned long vaddr, const void *src, int len) in copy_to_page() argument
254 memcpy(kaddr + (vaddr & ~PAGE_MASK), src, len); in copy_to_page()
258 static int verify_opcode(struct page *page, unsigned long vaddr, uprobe_opcode_t *new_opcode) in verify_opcode() argument
272 copy_from_page(page, vaddr, &old_opcode, UPROBE_SWBP_INSN_SIZE); in verify_opcode()
345 unsigned long vaddr = offset_to_vaddr(vma, uprobe->ref_ctr_offset); in valid_ref_ctr_vma() local
[all …]
/kernel/dma/
Dremap.c25 void *vaddr; in dma_common_pages_remap() local
27 vaddr = vmap(pages, PAGE_ALIGN(size) >> PAGE_SHIFT, in dma_common_pages_remap()
29 if (vaddr) in dma_common_pages_remap()
30 find_vm_area(vaddr)->pages = pages; in dma_common_pages_remap()
31 return vaddr; in dma_common_pages_remap()
43 void *vaddr; in dma_common_contiguous_remap() local
51 vaddr = vmap(pages, count, VM_DMA_COHERENT, prot); in dma_common_contiguous_remap()
54 return vaddr; in dma_common_contiguous_remap()
Dcoherent.c201 int order, void *vaddr) in __dma_release_from_coherent() argument
203 if (mem && vaddr >= mem->virt_base && vaddr < in __dma_release_from_coherent()
205 int page = (vaddr - mem->virt_base) >> PAGE_SHIFT; in __dma_release_from_coherent()
228 int dma_release_from_dev_coherent(struct device *dev, int order, void *vaddr) in dma_release_from_dev_coherent() argument
232 return __dma_release_from_coherent(mem, order, vaddr); in dma_release_from_dev_coherent()
236 struct vm_area_struct *vma, void *vaddr, size_t size, int *ret) in __dma_mmap_from_coherent() argument
238 if (mem && vaddr >= mem->virt_base && vaddr + size <= in __dma_mmap_from_coherent()
241 int start = (vaddr - mem->virt_base) >> PAGE_SHIFT; in __dma_mmap_from_coherent()
273 void *vaddr, size_t size, int *ret) in dma_mmap_from_dev_coherent() argument
277 return __dma_mmap_from_coherent(mem, vma, vaddr, size, ret); in dma_mmap_from_dev_coherent()
[all …]
Dmapping.c26 void *vaddr; member
35 dma_free_attrs(dev, this->size, this->vaddr, this->dma_handle, in dmam_release()
43 if (this->vaddr == match->vaddr) { in dmam_match()
60 void dmam_free_coherent(struct device *dev, size_t size, void *vaddr, in dmam_free_coherent() argument
63 struct dma_devres match_data = { size, vaddr, dma_handle }; in dmam_free_coherent()
65 dma_free_coherent(dev, size, vaddr, dma_handle); in dmam_free_coherent()
88 void *vaddr; in dmam_alloc_attrs() local
94 vaddr = dma_alloc_attrs(dev, size, dma_handle, gfp, attrs); in dmam_alloc_attrs()
95 if (!vaddr) { in dmam_alloc_attrs()
100 dr->vaddr = vaddr; in dmam_alloc_attrs()
[all …]
Dswiotlb.c167 void *vaddr; in swiotlb_update_mem_attributes() local
172 vaddr = phys_to_virt(mem->start); in swiotlb_update_mem_attributes()
174 set_memory_decrypted((unsigned long)vaddr, bytes >> PAGE_SHIFT); in swiotlb_update_mem_attributes()
175 memset(vaddr, 0, bytes); in swiotlb_update_mem_attributes()
181 void *vaddr = phys_to_virt(start); in swiotlb_init_io_tlb_mem() local
199 memset(vaddr, 0, bytes); in swiotlb_init_io_tlb_mem()
374 unsigned char *vaddr = phys_to_virt(tlb_addr); in swiotlb_bounce() local
420 memcpy(vaddr, buffer + offset, sz); in swiotlb_bounce()
422 memcpy(buffer + offset, vaddr, sz); in swiotlb_bounce()
428 vaddr += sz; in swiotlb_bounce()
[all …]
Ddirect.c79 static int dma_set_decrypted(struct device *dev, void *vaddr, size_t size) in dma_set_decrypted() argument
83 return set_memory_decrypted((unsigned long)vaddr, PFN_UP(size)); in dma_set_decrypted()
86 static int dma_set_encrypted(struct device *dev, void *vaddr, size_t size) in dma_set_encrypted() argument
92 ret = set_memory_encrypted((unsigned long)vaddr, PFN_UP(size)); in dma_set_encrypted()
365 void *vaddr = page_address(page); in dma_direct_free_pages() local
369 dma_free_from_pool(dev, vaddr, size)) in dma_direct_free_pages()
372 if (dma_set_encrypted(dev, vaddr, size)) in dma_direct_free_pages()
/kernel/trace/
Dtrace_uprobe.c27 unsigned long vaddr[]; member
129 void __user *vaddr = (void __force __user *)src; in probe_mem_read() local
131 return copy_from_user(dest, vaddr, size) ? -EFAULT : 0; in probe_mem_read()
188 void __user *vaddr = (void __force __user *) addr; in fetch_store_strlen() local
193 len = strnlen_user(vaddr, MAX_STRING_SIZE); in fetch_store_strlen()
209 udd = (void *) current->utask->vaddr; in translate_user_vaddr()
976 entry->vaddr[0] = func; in __uprobe_trace_func()
977 entry->vaddr[1] = instruction_pointer(regs); in __uprobe_trace_func()
980 entry->vaddr[0] = instruction_pointer(regs); in __uprobe_trace_func()
1036 entry->vaddr[1], entry->vaddr[0]); in print_uprobe_event()
[all …]
/kernel/debug/kdb/
Dkdb_support.c321 void *vaddr; in kdb_getphys() local
328 vaddr = kmap_atomic(page); in kdb_getphys()
329 memcpy(res, vaddr + (addr & (PAGE_SIZE - 1)), size); in kdb_getphys()
330 kunmap_atomic(vaddr); in kdb_getphys()