Lines Matching refs:start
220 static int vmcoredd_copy_dumps(void *dst, u64 start, size_t size, int userbuf) in vmcoredd_copy_dumps() argument
230 if (start < offset + dump->size) { in vmcoredd_copy_dumps()
231 tsz = min(offset + (u64)dump->size - start, (u64)size); in vmcoredd_copy_dumps()
232 buf = dump->buf + start - offset; in vmcoredd_copy_dumps()
239 start += tsz; in vmcoredd_copy_dumps()
256 u64 start, size_t size) in vmcoredd_mmap_dumps() argument
266 if (start < offset + dump->size) { in vmcoredd_mmap_dumps()
267 tsz = min(offset + (u64)dump->size - start, (u64)size); in vmcoredd_mmap_dumps()
268 buf = dump->buf + start - offset; in vmcoredd_mmap_dumps()
275 start += tsz; in vmcoredd_mmap_dumps()
300 u64 start; in __read_vmcore() local
343 start = *fpos - elfcorebuf_sz; in __read_vmcore()
344 if (vmcoredd_copy_dumps(buffer, start, tsz, userbuf)) in __read_vmcore()
379 start = m->paddr + *fpos - m->offset; in __read_vmcore()
380 tmp = read_from_oldmem(buffer, tsz, &start, in __read_vmcore()
553 u64 start, end, len, tsz; in mmap_vmcore() local
556 start = (u64)vma->vm_pgoff << PAGE_SHIFT; in mmap_vmcore()
557 end = start + size; in mmap_vmcore()
571 if (start < elfcorebuf_sz) { in mmap_vmcore()
574 tsz = min(elfcorebuf_sz - (size_t)start, size); in mmap_vmcore()
575 pfn = __pa(elfcorebuf + start) >> PAGE_SHIFT; in mmap_vmcore()
580 start += tsz; in mmap_vmcore()
587 if (start < elfcorebuf_sz + elfnotes_sz) { in mmap_vmcore()
603 if (start < elfcorebuf_sz + vmcoredd_orig_sz) { in mmap_vmcore()
607 (size_t)start, size); in mmap_vmcore()
608 start_off = start - elfcorebuf_sz; in mmap_vmcore()
614 start += tsz; in mmap_vmcore()
624 tsz = min(elfcorebuf_sz + elfnotes_sz - (size_t)start, size); in mmap_vmcore()
625 kaddr = elfnotes_buf + start - elfcorebuf_sz - vmcoredd_orig_sz; in mmap_vmcore()
631 start += tsz; in mmap_vmcore()
639 if (start < m->offset + m->size) { in mmap_vmcore()
643 m->offset + m->size - start, size); in mmap_vmcore()
644 paddr = m->paddr + start - m->offset; in mmap_vmcore()
650 start += tsz; in mmap_vmcore()
1096 u64 paddr, start, end, size; in process_ptload_program_headers_elf64() local
1102 start = rounddown(paddr, PAGE_SIZE); in process_ptload_program_headers_elf64()
1104 size = end - start; in process_ptload_program_headers_elf64()
1110 new->paddr = start; in process_ptload_program_headers_elf64()
1115 phdr_ptr->p_offset = vmcore_off + (paddr - start); in process_ptload_program_headers_elf64()
1139 u64 paddr, start, end, size; in process_ptload_program_headers_elf32() local
1145 start = rounddown(paddr, PAGE_SIZE); in process_ptload_program_headers_elf32()
1147 size = end - start; in process_ptload_program_headers_elf32()
1153 new->paddr = start; in process_ptload_program_headers_elf32()
1158 phdr_ptr->p_offset = vmcore_off + (paddr - start); in process_ptload_program_headers_elf32()
1371 u64 start, end, size; in vmcoredd_update_program_headers() local
1390 start = rounddown(phdr->p_offset, PAGE_SIZE); in vmcoredd_update_program_headers()
1393 size = end - start; in vmcoredd_update_program_headers()
1394 phdr->p_offset = vmcore_off + (phdr->p_offset - start); in vmcoredd_update_program_headers()
1410 start = rounddown(phdr->p_offset, PAGE_SIZE); in vmcoredd_update_program_headers()
1413 size = end - start; in vmcoredd_update_program_headers()
1414 phdr->p_offset = vmcore_off + (phdr->p_offset - start); in vmcoredd_update_program_headers()