Home
last modified time | relevance | path

Searched refs:ia64_ctx (Results 1 – 4 of 4) sorted by relevance

/arch/ia64/include/asm/
Dmmu_context.h34 struct ia64_ctx { struct
44 extern struct ia64_ctx ia64_ctx; argument
68 spin_lock_irqsave(&ia64_ctx.lock, flags); in delayed_tlb_flush()
73 spin_unlock_irqrestore(&ia64_ctx.lock, flags); in delayed_tlb_flush()
86 spin_lock_irqsave(&ia64_ctx.lock, flags); in get_mmu_context()
91 if (ia64_ctx.next >= ia64_ctx.limit) { in get_mmu_context()
92 ia64_ctx.next = find_next_zero_bit(ia64_ctx.bitmap, in get_mmu_context()
93 ia64_ctx.max_ctx, ia64_ctx.next); in get_mmu_context()
94 ia64_ctx.limit = find_next_bit(ia64_ctx.bitmap, in get_mmu_context()
95 ia64_ctx.max_ctx, ia64_ctx.next); in get_mmu_context()
[all …]
Dtlbflush.h56 set_bit(mm->context, ia64_ctx.flushmap); in flush_tlb_mm()
/arch/ia64/mm/
Dtlb.c42 struct ia64_ctx ia64_ctx = { variable
43 .lock = __SPIN_LOCK_UNLOCKED(ia64_ctx.lock),
62 ia64_ctx.bitmap = alloc_bootmem((ia64_ctx.max_ctx+1)>>3); in mmu_context_init()
63 ia64_ctx.flushmap = alloc_bootmem((ia64_ctx.max_ctx+1)>>3); in mmu_context_init()
75 for (i=0; i <= ia64_ctx.max_ctx / BITS_PER_LONG; i++) { in wrap_mmu_context()
76 flush_bit = xchg(&ia64_ctx.flushmap[i], 0); in wrap_mmu_context()
77 ia64_ctx.bitmap[i] ^= flush_bit; in wrap_mmu_context()
81 ia64_ctx.next = find_next_zero_bit(ia64_ctx.bitmap, in wrap_mmu_context()
82 ia64_ctx.max_ctx, 300); in wrap_mmu_context()
83 ia64_ctx.limit = find_next_bit(ia64_ctx.bitmap, in wrap_mmu_context()
[all …]
/arch/ia64/kernel/
Dsetup.c1026 while (max_ctx < ia64_ctx.max_ctx) { in cpu_init()
1027 unsigned int old = ia64_ctx.max_ctx; in cpu_init()
1028 if (cmpxchg(&ia64_ctx.max_ctx, old, max_ctx) == old) in cpu_init()