/arch/x86/include/asm/ |
D | xor.h | 52 #define BLK64(pf, op, i) \ argument 53 pf(i) \ 54 op(i, 0) \ 55 op(i + 1, 1) \ 56 op(i + 2, 2) \ 57 op(i + 3, 3) 68 #define BLOCK(i) \ in xor_sse_2() argument 69 LD(i, 0) \ in xor_sse_2() 70 LD(i + 1, 1) \ in xor_sse_2() 71 PF1(i) \ in xor_sse_2() [all …]
|
D | xor_32.h | 32 #define BLOCK(i) \ in xor_pII_mmx_2() argument 33 LD(i, 0) \ in xor_pII_mmx_2() 34 LD(i + 1, 1) \ in xor_pII_mmx_2() 35 LD(i + 2, 2) \ in xor_pII_mmx_2() 36 LD(i + 3, 3) \ in xor_pII_mmx_2() 37 XO1(i, 0) \ in xor_pII_mmx_2() 38 ST(i, 0) \ in xor_pII_mmx_2() 39 XO1(i+1, 1) \ in xor_pII_mmx_2() 40 ST(i+1, 1) \ in xor_pII_mmx_2() 41 XO1(i + 2, 2) \ in xor_pII_mmx_2() [all …]
|
/arch/arm/mach-pxa/ |
D | mfp-pxa2xx.c | 135 int i, gpio; in pxa2xx_mfp_config() local 137 for (i = 0, c = mfp_cfgs; i < num; i++, c++) { in pxa2xx_mfp_config() 225 int i; in pxa25x_mfp_init() local 233 for (i = 0; i <= pxa_last_gpio; i++) in pxa25x_mfp_init() 234 gpio_desc[i].valid = 1; in pxa25x_mfp_init() 236 for (i = 0; i <= 15; i++) { in pxa25x_mfp_init() 237 gpio_desc[i].can_wakeup = 1; in pxa25x_mfp_init() 238 gpio_desc[i].mask = GPIO_bit(i); in pxa25x_mfp_init() 244 for (i = 86; i <= pxa_last_gpio; i++) in pxa25x_mfp_init() 245 gpio_desc[i].dir_inverted = 1; in pxa25x_mfp_init() [all …]
|
D | am300epd.c | 109 int i; in am300_init_gpio_regs() local 113 for (i = 0; i < ARRAY_SIZE(gpios); i++) { in am300_init_gpio_regs() 114 err = gpio_request(gpios[i], gpio_names[i]); in am300_init_gpio_regs() 117 "gpio %s, err=%d\n", gpio_names[i], err); in am300_init_gpio_regs() 123 for (i = DB0_GPIO_PIN; i <= DB15_GPIO_PIN; i++) { in am300_init_gpio_regs() 124 sprintf(dbname, "DB%d", i); in am300_init_gpio_regs() 125 err = gpio_request(i, dbname); in am300_init_gpio_regs() 128 "gpio %d, err=%d\n", i, err); in am300_init_gpio_regs() 147 for (i = DB0_GPIO_PIN; i <= DB15_GPIO_PIN; i++) in am300_init_gpio_regs() 148 gpio_direction_output(i, 0); in am300_init_gpio_regs() [all …]
|
/arch/x86/oprofile/ |
D | op_model_ppro.c | 35 int i; in ppro_shutdown() local 37 for (i = 0; i < num_counters; ++i) { in ppro_shutdown() 38 if (!msrs->counters[i].addr) in ppro_shutdown() 40 release_perfctr_nmi(MSR_P6_PERFCTR0 + i); in ppro_shutdown() 41 release_evntsel_nmi(MSR_P6_EVNTSEL0 + i); in ppro_shutdown() 47 int i; in ppro_fill_in_addresses() local 49 for (i = 0; i < num_counters; i++) { in ppro_fill_in_addresses() 50 if (!reserve_perfctr_nmi(MSR_P6_PERFCTR0 + i)) in ppro_fill_in_addresses() 52 if (!reserve_evntsel_nmi(MSR_P6_EVNTSEL0 + i)) { in ppro_fill_in_addresses() 53 release_perfctr_nmi(MSR_P6_PERFCTR0 + i); in ppro_fill_in_addresses() [all …]
|
D | op_model_p4.c | 384 #define VIRT_CTR(stagger, i) ((i) + ((num_counters) * (stagger))) argument 390 int i; in p4_shutdown() local 392 for (i = 0; i < num_counters; ++i) { in p4_shutdown() 393 if (msrs->counters[i].addr) in p4_shutdown() 394 release_perfctr_nmi(msrs->counters[i].addr); in p4_shutdown() 401 for (i = num_counters; i < num_controls; ++i) { in p4_shutdown() 402 if (msrs->controls[i].addr) in p4_shutdown() 403 release_evntsel_nmi(msrs->controls[i].addr); in p4_shutdown() 409 unsigned int i; in p4_fill_in_addresses() local 416 for (i = 0; i < num_counters; ++i) { in p4_fill_in_addresses() [all …]
|
/arch/csky/mm/ |
D | cachev2.c | 17 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in icache_inv_range() local 19 for (; i < end; i += L1_CACHE_BYTES) in icache_inv_range() 20 asm volatile("icache.iva %0\n"::"r"(i):"memory"); in icache_inv_range() 32 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in dcache_wb_range() local 34 for (; i < end; i += L1_CACHE_BYTES) in dcache_wb_range() 35 asm volatile("dcache.cval1 %0\n"::"r"(i):"memory"); in dcache_wb_range() 41 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in dcache_inv_range() local 43 for (; i < end; i += L1_CACHE_BYTES) in dcache_inv_range() 44 asm volatile("dcache.civa %0\n"::"r"(i):"memory"); in dcache_inv_range() 50 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in cache_wbinv_range() local [all …]
|
/arch/ia64/include/asm/ |
D | atomic.h | 22 #define ATOMIC_INIT(i) { (i) } argument 23 #define ATOMIC64_INIT(i) { (i) } argument 28 #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) argument 29 #define atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i)) argument 33 ia64_atomic_##op (int i, atomic_t *v) \ 41 new = old c_op i; \ 48 ia64_atomic_fetch_##op (int i, atomic_t *v) \ 56 new = old c_op i; \ 69 #define __ia64_atomic_const(i) \ argument 70 static const int __ia64_atomic_p = __builtin_constant_p(i) ? \ [all …]
|
/arch/sparc/kernel/ |
D | kgdb_32.c | 22 int i; in pt_regs_to_gdb_regs() local 25 for (i = 0; i < 15; i++) in pt_regs_to_gdb_regs() 26 gdb_regs[GDB_G1 + i] = regs->u_regs[UREG_G1 + i]; in pt_regs_to_gdb_regs() 29 for (i = 0; i < 8; i++) in pt_regs_to_gdb_regs() 30 gdb_regs[GDB_L0 + i] = win->locals[i]; in pt_regs_to_gdb_regs() 31 for (i = 0; i < 8; i++) in pt_regs_to_gdb_regs() 32 gdb_regs[GDB_I0 + i] = win->ins[i]; in pt_regs_to_gdb_regs() 34 for (i = GDB_F0; i <= GDB_F31; i++) in pt_regs_to_gdb_regs() 35 gdb_regs[i] = 0; in pt_regs_to_gdb_regs() 51 int i; in sleeping_thread_to_gdb_regs() local [all …]
|
D | kgdb_64.c | 22 int i; in pt_regs_to_gdb_regs() local 25 for (i = 0; i < 15; i++) in pt_regs_to_gdb_regs() 26 gdb_regs[GDB_G1 + i] = regs->u_regs[UREG_G1 + i]; in pt_regs_to_gdb_regs() 29 for (i = 0; i < 8; i++) in pt_regs_to_gdb_regs() 30 gdb_regs[GDB_L0 + i] = win->locals[i]; in pt_regs_to_gdb_regs() 31 for (i = 0; i < 8; i++) in pt_regs_to_gdb_regs() 32 gdb_regs[GDB_I0 + i] = win->ins[i]; in pt_regs_to_gdb_regs() 34 for (i = GDB_F0; i <= GDB_F62; i++) in pt_regs_to_gdb_regs() 35 gdb_regs[i] = 0; in pt_regs_to_gdb_regs() 52 int i; in sleeping_thread_to_gdb_regs() local [all …]
|
/arch/x86/mm/ |
D | testmmiotrace.c | 23 static unsigned v16(unsigned i) in v16() argument 25 return i * 12 + 7; in v16() 28 static unsigned v32(unsigned i) in v32() argument 30 return i * 212371 + 13; in v32() 35 unsigned int i; in do_write_test() local 39 for (i = 0; i < 256; i++) in do_write_test() 40 iowrite8(i, p + i); in do_write_test() 42 for (i = 1024; i < (5 * 1024); i += 2) in do_write_test() 43 iowrite16(v16(i), p + i); in do_write_test() 45 for (i = (5 * 1024); i < (16 * 1024); i += 4) in do_write_test() [all …]
|
/arch/mips/lantiq/xway/ |
D | prom.c | 41 void __init ltq_soc_detect(struct ltq_soc_info *i) in ltq_soc_detect() argument 43 i->partnum = (ltq_r32(LTQ_MPS_CHIPID) & PART_MASK) >> PART_SHIFT; in ltq_soc_detect() 44 i->rev = (ltq_r32(LTQ_MPS_CHIPID) & REV_MASK) >> REV_SHIFT; in ltq_soc_detect() 45 sprintf(i->rev_type, "1.%d", i->rev); in ltq_soc_detect() 46 switch (i->partnum) { in ltq_soc_detect() 49 i->name = SOC_DANUBE; in ltq_soc_detect() 50 i->type = SOC_TYPE_DANUBE; in ltq_soc_detect() 51 i->compatible = COMP_DANUBE; in ltq_soc_detect() 55 i->name = SOC_TWINPASS; in ltq_soc_detect() 56 i->type = SOC_TYPE_DANUBE; in ltq_soc_detect() [all …]
|
/arch/sparc/mm/ |
D | extable.c | 21 int i; in search_extable() local 41 for (i = 0; i < num; i++) { in search_extable() 42 if (base[i].fixup == 0) { in search_extable() 44 i++; in search_extable() 49 if (base[i].fixup == -1) in search_extable() 52 if (base[i].insn == value) in search_extable() 53 return &base[i]; in search_extable() 57 for (i = 0; i < (num - 1); i++) { in search_extable() 58 if (base[i].fixup) in search_extable() 61 if (base[i].insn <= value && base[i + 1].insn > value) in search_extable() [all …]
|
D | init_32.c | 62 int i; in calc_highpages() local 65 for (i = 0; sp_banks[i].num_bytes != 0; i++) { in calc_highpages() 66 unsigned long start_pfn = sp_banks[i].base_addr >> PAGE_SHIFT; in calc_highpages() 67 unsigned long end_pfn = (sp_banks[i].base_addr + sp_banks[i].num_bytes) >> PAGE_SHIFT; in calc_highpages() 83 int i; in calc_max_low_pfn() local 88 for (i = 1; sp_banks[i].num_bytes != 0; i++) { in calc_max_low_pfn() 89 curr_pfn = sp_banks[i].base_addr >> PAGE_SHIFT; in calc_max_low_pfn() 97 last_pfn = (sp_banks[i].base_addr + sp_banks[i].num_bytes) >> PAGE_SHIFT; in calc_max_low_pfn() 138 int i; in bootmem_init() local 144 for (i = 0; sp_banks[i].num_bytes != 0; i++) { in bootmem_init() [all …]
|
/arch/unicore32/kernel/ |
D | dma.c | 38 int i, found = 0; in puv3_request_dma() local 48 for (i = 0; i < MAX_DMA_CHANNELS; i++) { in puv3_request_dma() 49 if ((dma_channels[i].prio == prio) && in puv3_request_dma() 50 !dma_channels[i].name) { in puv3_request_dma() 59 dma_channels[i].name = name; in puv3_request_dma() 60 dma_channels[i].irq_handler = irq_handler; in puv3_request_dma() 61 dma_channels[i].err_handler = err_handler; in puv3_request_dma() 62 dma_channels[i].data = data; in puv3_request_dma() 66 i = -ENODEV; in puv3_request_dma() 70 return i; in puv3_request_dma() [all …]
|
/arch/arm/tools/ |
D | gen-mach-types | 38 for (i = 0; i < nr; i++) 39 if (num[i] ~ /..*/) 40 printf("#define %-30s %d\n", mach_type[i], num[i]); 44 for (i = 0; i < nr; i++) 45 if (num[i] ~ /..*/) { 46 printf("#ifdef %s\n", config[i]); 51 printf("# define machine_arch_type\t%s\n", mach_type[i]); 53 printf("# define %s()\t(machine_arch_type == %s)\n", machine_is[i], mach_type[i]); 55 printf("# define %s()\t(0)\n", machine_is[i]); 60 for (i = 0; i < nr; i++) [all …]
|
/arch/m68k/atari/ |
D | nvram.c | 37 static unsigned char __nvram_read_byte(int i) in __nvram_read_byte() argument 39 return CMOS_READ(NVRAM_FIRST_BYTE + i); in __nvram_read_byte() 43 static void __nvram_write_byte(unsigned char c, int i) in __nvram_write_byte() argument 45 CMOS_WRITE(c, NVRAM_FIRST_BYTE + i); in __nvram_write_byte() 57 int i; in __nvram_check_checksum() local 60 for (i = ATARI_CKS_RANGE_START; i <= ATARI_CKS_RANGE_END; ++i) in __nvram_check_checksum() 61 sum += __nvram_read_byte(i); in __nvram_check_checksum() 68 int i; in __nvram_set_checksum() local 71 for (i = ATARI_CKS_RANGE_START; i <= ATARI_CKS_RANGE_END; ++i) in __nvram_set_checksum() 72 sum += __nvram_read_byte(i); in __nvram_set_checksum() [all …]
|
/arch/xtensa/kernel/ |
D | hw_breakpoint.c | 136 size_t i; in alloc_slot() local 138 for (i = 0; i < n; ++i) { in alloc_slot() 139 if (!slot[i]) { in alloc_slot() 140 slot[i] = bp; in alloc_slot() 141 return i; in alloc_slot() 173 int i; in arch_install_hw_breakpoint() local 177 i = alloc_slot(this_cpu_ptr(bp_on_reg), XCHAL_NUM_IBREAK, bp); in arch_install_hw_breakpoint() 178 if (i < 0) in arch_install_hw_breakpoint() 179 return i; in arch_install_hw_breakpoint() 180 set_ibreak_regs(i, bp); in arch_install_hw_breakpoint() [all …]
|
/arch/mips/ar7/ |
D | prom.c | 32 int i; in prom_getenv() local 34 for (i = 0; (i < MAX_ENTRY) && adam2_env[i].name; i++) in prom_getenv() 35 if (!strcmp(name, adam2_env[i].name)) in prom_getenv() 36 return adam2_env[i].value; in prom_getenv() 44 int i; in ar7_init_cmdline() local 46 for (i = 1; i < argc; i++) { in ar7_init_cmdline() 47 strlcat(arcs_cmdline, argv[i], COMMAND_LINE_SIZE); in ar7_init_cmdline() 48 if (i < (argc - 1)) in ar7_init_cmdline() 120 int i; in lookup_psp_var_map() local 122 for (i = 0; i < ARRAY_SIZE(psp_var_map); i++) in lookup_psp_var_map() [all …]
|
/arch/powerpc/net/ |
D | bpf_jit32.h | 75 #define PPC_LBZ_OFFS(r, base, i) do { if ((i) < 32768) PPC_LBZ(r, base, i); \ argument 76 else { PPC_ADDIS(r, base, IMM_HA(i)); \ 77 PPC_LBZ(r, r, IMM_L(i)); } } while(0) 79 #define PPC_LD_OFFS(r, base, i) do { if ((i) < 32768) PPC_LD(r, base, i); \ argument 80 else { PPC_ADDIS(r, base, IMM_HA(i)); \ 81 PPC_LD(r, r, IMM_L(i)); } } while(0) 83 #define PPC_LWZ_OFFS(r, base, i) do { if ((i) < 32768) PPC_LWZ(r, base, i); \ argument 84 else { PPC_ADDIS(r, base, IMM_HA(i)); \ 85 PPC_LWZ(r, r, IMM_L(i)); } } while(0) 87 #define PPC_LHZ_OFFS(r, base, i) do { if ((i) < 32768) PPC_LHZ(r, base, i); \ argument [all …]
|
D | bpf_jit.h | 26 #define IMM_H(i) ((uintptr_t)(i)>>16) argument 27 #define IMM_HA(i) (((uintptr_t)(i)>>16) + \ argument 28 (((uintptr_t)(i) & 0x8000) >> 15)) 29 #define IMM_L(i) ((uintptr_t)(i) & 0xffff) argument 41 #define PPC_ADDI(d, a, i) EMIT(PPC_INST_ADDI | ___PPC_RT(d) | \ argument 42 ___PPC_RA(a) | IMM_L(i)) 44 #define PPC_LI(r, i) PPC_ADDI(r, 0, i) argument 45 #define PPC_ADDIS(d, a, i) EMIT(PPC_INST_ADDIS | \ argument 46 ___PPC_RT(d) | ___PPC_RA(a) | IMM_L(i)) 47 #define PPC_LIS(r, i) PPC_ADDIS(r, 0, i) argument [all …]
|
/arch/mips/sgi-ip22/ |
D | ip22-int.c | 248 int i; in arch_init_irq() local 251 for (i = 0; i < 256; i++) { in arch_init_irq() 252 if (i & 0x80) { in arch_init_irq() 253 lc0msk_to_irqnr[i] = SGINT_LOCAL0 + 7; in arch_init_irq() 254 lc1msk_to_irqnr[i] = SGINT_LOCAL1 + 7; in arch_init_irq() 255 lc2msk_to_irqnr[i] = SGINT_LOCAL2 + 7; in arch_init_irq() 256 lc3msk_to_irqnr[i] = SGINT_LOCAL3 + 7; in arch_init_irq() 257 } else if (i & 0x40) { in arch_init_irq() 258 lc0msk_to_irqnr[i] = SGINT_LOCAL0 + 6; in arch_init_irq() 259 lc1msk_to_irqnr[i] = SGINT_LOCAL1 + 6; in arch_init_irq() [all …]
|
/arch/s390/include/asm/ |
D | atomic.h | 18 #define ATOMIC_INIT(i) { (i) } argument 30 static inline void atomic_set(atomic_t *v, int i) in atomic_set() argument 34 : "=Q" (v->counter) : "d" (i)); in atomic_set() 37 static inline int atomic_add_return(int i, atomic_t *v) in atomic_add_return() argument 39 return __atomic_add_barrier(i, &v->counter) + i; in atomic_add_return() 42 static inline int atomic_fetch_add(int i, atomic_t *v) in atomic_fetch_add() argument 44 return __atomic_add_barrier(i, &v->counter); in atomic_fetch_add() 47 static inline void atomic_add(int i, atomic_t *v) in atomic_add() argument 50 if (__builtin_constant_p(i) && (i > -129) && (i < 128)) { in atomic_add() 51 __atomic_add_const(i, &v->counter); in atomic_add() [all …]
|
/arch/mips/kernel/ |
D | rtlx-cmp.c | 24 int i; in rtlx_interrupt() local 34 for (i = 0; i < RTLX_CHANNELS; i++) { in rtlx_interrupt() 35 wake_up(&channel_wqs[i].lx_queue); in rtlx_interrupt() 36 wake_up(&channel_wqs[i].rt_queue); in rtlx_interrupt() 50 int i, err; in rtlx_module_init() local 71 for (i = 0; i < RTLX_CHANNELS; i++) { in rtlx_module_init() 72 init_waitqueue_head(&channel_wqs[i].rt_queue); in rtlx_module_init() 73 init_waitqueue_head(&channel_wqs[i].lx_queue); in rtlx_module_init() 74 atomic_set(&channel_wqs[i].in_open, 0); in rtlx_module_init() 75 mutex_init(&channel_wqs[i].mutex); in rtlx_module_init() [all …]
|
/arch/x86/kernel/cpu/mtrr/ |
D | cleanup.c | 71 int i; in x86_get_mtrr_mem_range() local 73 for (i = 0; i < num_var_ranges; i++) { in x86_get_mtrr_mem_range() 74 type = range_state[i].type; in x86_get_mtrr_mem_range() 77 base = range_state[i].base_pfn; in x86_get_mtrr_mem_range() 78 size = range_state[i].size_pfn; in x86_get_mtrr_mem_range() 84 for (i = 0; i < nr_range; i++) in x86_get_mtrr_mem_range() 86 range[i].start, range[i].end); in x86_get_mtrr_mem_range() 90 for (i = 0; i < num_var_ranges; i++) { in x86_get_mtrr_mem_range() 91 type = range_state[i].type; in x86_get_mtrr_mem_range() 95 size = range_state[i].size_pfn; in x86_get_mtrr_mem_range() [all …]
|