Searched refs:last_addr (Results 1 – 13 of 13) sorted by relevance
/arch/mips/mm/ |
D | ioremap.c | 52 phys_addr_t last_addr; in ioremap_prot() local 63 last_addr = phys_addr + size - 1; in ioremap_prot() 64 if (!size || last_addr < phys_addr) in ioremap_prot() 71 if (IS_LOW512(phys_addr) && IS_LOW512(last_addr) && in ioremap_prot() 80 last_pfn = PFN_DOWN(last_addr); in ioremap_prot() 84 &phys_addr, &last_addr); in ioremap_prot() 93 size = PAGE_ALIGN(last_addr + 1) - phys_addr; in ioremap_prot()
|
/arch/sh/mm/ |
D | ioremap.c | 42 phys_addr_t last_addr = offset + size - 1; in __ioremap_29bit() local 50 if (likely(PXSEG(offset) < P3SEG && PXSEG(last_addr) < P3SEG)) { in __ioremap_29bit() 89 unsigned long offset, last_addr, addr, orig_addr; in __ioremap_caller() local 101 last_addr = phys_addr + size - 1; in __ioremap_caller() 102 if (!size || last_addr < phys_addr) in __ioremap_caller() 124 size = PAGE_ALIGN(last_addr+1) - phys_addr; in __ioremap_caller()
|
D | pmb.c | 414 phys_addr_t offset, last_addr; in pmb_remap_caller() local 435 last_addr = phys + size; in pmb_remap_caller() 439 aligned = ALIGN(last_addr, pmb_sizes[i].size) - phys; in pmb_remap_caller()
|
/arch/parisc/mm/ |
D | ioremap.c | 32 unsigned long offset, last_addr; in ioremap() local 44 last_addr = phys_addr + size - 1; in ioremap() 45 if (!size || last_addr < phys_addr) in ioremap() 73 size = PAGE_ALIGN(last_addr + 1) - phys_addr; in ioremap()
|
/arch/openrisc/mm/ |
D | ioremap.c | 41 unsigned long offset, last_addr; in ioremap() local 45 last_addr = addr + size - 1; in ioremap() 46 if (!size || last_addr < addr) in ioremap() 54 size = PAGE_ALIGN(last_addr + 1) - p; in ioremap()
|
/arch/hexagon/mm/ |
D | ioremap.c | 14 unsigned long last_addr, addr; in ioremap() local 21 last_addr = phys_addr + size - 1; in ioremap() 24 if (!size || (last_addr < phys_addr)) in ioremap()
|
/arch/arm64/mm/ |
D | ioremap.c | 24 unsigned long last_addr; in __ioremap_caller() local 40 last_addr = phys_addr + size - 1; in __ioremap_caller() 41 if (!size || last_addr < phys_addr || (last_addr & ~PHYS_MASK)) in __ioremap_caller()
|
/arch/nios2/mm/ |
D | ioremap.c | 123 unsigned long last_addr; in ioremap() local 127 last_addr = phys_addr + size - 1; in ioremap() 129 if (!size || last_addr < phys_addr) in ioremap() 150 IS_MAPPABLE_UNCACHEABLE(last_addr)) in ioremap() 156 size = PAGE_ALIGN(last_addr + 1) - phys_addr; in ioremap()
|
/arch/powerpc/boot/ |
D | devtree.c | 240 u32 last_addr[MAX_ADDR_CELLS]; in dt_xlate() local 261 copy_val(last_addr, prop_buf + offset, naddr); in dt_xlate() 287 offset = find_range(last_addr, prop_buf, prev_naddr, in dt_xlate() 294 if (!sub_reg(last_addr, this_addr)) in dt_xlate() 299 if (!add_reg(last_addr, this_addr, naddr)) in dt_xlate() 306 ret_addr = ((u64)be32_to_cpu(last_addr[2]) << 32) | be32_to_cpu(last_addr[3]); in dt_xlate()
|
/arch/x86/mm/ |
D | ioremap.c | 182 resource_size_t last_addr; in __ioremap_caller() local 193 last_addr = phys_addr + size - 1; in __ioremap_caller() 194 if (!size || last_addr < phys_addr) in __ioremap_caller() 211 &phys_addr, &last_addr); in __ioremap_caller() 220 size = PAGE_ALIGN(last_addr+1) - phys_addr; in __ioremap_caller()
|
/arch/arm/mm/ |
D | ioremap.c | 325 phys_addr_t last_addr; in __arm_ioremap_caller() local 332 last_addr = phys_addr + size - 1; in __arm_ioremap_caller() 333 if (!size || last_addr < phys_addr) in __arm_ioremap_caller()
|
/arch/x86/kernel/ |
D | e820.c | 327 unsigned long long last_addr; in e820__update_table() local 369 last_addr = 0; /* Start with 0 as last starting address */ in e820__update_table() 399 new_entries[new_nr_entries].size = change_point[chg_idx]->addr - last_addr; in e820__update_table() 409 last_addr = change_point[chg_idx]->addr; in e820__update_table()
|
/arch/s390/pci/ |
D | pci.c | 235 phys_addr_t last_addr; in __ioremap() local 237 last_addr = addr + size - 1; in __ioremap() 238 if (!size || last_addr < addr) in __ioremap()
|