Home
last modified time | relevance | path

Searched refs:addrs (Results 1 – 25 of 61) sorted by relevance

123

/drivers/char/ipmi/
Dipmi_si_hardcode.c19 static unsigned long addrs[SI_MAX_PARMS]; variable
39 module_param_hw_array(addrs, ulong, iomem, &num_addrs, 0);
40 MODULE_PARM_DESC(addrs, "Sets the memory address of each interface, the"
143 if (i < num_addrs && addrs[i]) in ipmi_hardcode_init()
144 ipmi_hardcode_init_one(si_type[i], i, addrs[i], in ipmi_hardcode_init()
170 if (addrs[i] == addr) in ipmi_si_hardcode_match()
/drivers/gpu/drm/armada/
Darmada_plane.c37 void armada_drm_plane_calc(struct drm_plane_state *state, u32 addrs[2][3], in armada_drm_plane_calc()
54 addrs[0][0] = addr + fb->offsets[0] + y * fb->pitches[0] + in armada_drm_plane_calc()
62 addrs[0][i] = addr + fb->offsets[i] + y * fb->pitches[i] + in armada_drm_plane_calc()
67 addrs[0][i] = 0; in armada_drm_plane_calc()
72 addrs[1][i] = addrs[0][i] + pitches[i]; in armada_drm_plane_calc()
77 addrs[1][i] = addrs[0][i]; in armada_drm_plane_calc()
152 armada_drm_plane_calc(state, st->addrs, st->pitches, interlace); in armada_drm_plane_atomic_check()
Darmada_plane.h9 u32 addrs[2][3]; member
19 #define armada_addr(state, f, p) to_armada_plane_state(state)->addrs[f][p]
22 void armada_drm_plane_calc(struct drm_plane_state *state, u32 addrs[2][3],
/drivers/net/wireless/intel/iwlwifi/mvm/
Doffloading.c119 struct iwl_targ_addr *addrs; in iwl_mvm_send_proto_offload() local
127 addrs = cmd.v3s.targ_addrs; in iwl_mvm_send_proto_offload()
132 addrs = cmd.v3l.targ_addrs; in iwl_mvm_send_proto_offload()
161 addrs[i].addr = mvmvif->target_ipv6_addrs[i]; in iwl_mvm_send_proto_offload()
162 addrs[i].config_num = cpu_to_le32(j); in iwl_mvm_send_proto_offload()
/drivers/xen/xenbus/
Dxenbus_client.c64 unsigned long addrs[XENBUS_MAX_RING_GRANTS]; member
462 phys_addr_t *addrs, in __xenbus_map_ring() argument
476 gnttab_set_map_op(&map[i], addrs[i], flags, gnt_refs[i], in __xenbus_map_ring()
500 gnttab_set_unmap_op(&unmap[j], (phys_addr_t)addrs[i], in __xenbus_map_ring()
526 unsigned long addrs[XENBUS_MAX_RING_GRANTS]; member
538 info->addrs[info->idx] = vaddr; in xenbus_map_ring_setup_grant_hvm()
599 xenbus_unmap_ring(dev, node->handles, nr_grefs, info.addrs); in xenbus_map_ring_valloc_hvm()
799 unsigned long addrs[XENBUS_MAX_RING_GRANTS]; member
809 info->addrs[info->idx] = (unsigned long)gfn_to_virt(gfn); in xenbus_unmap_ring_setup_grant_hvm()
849 info.addrs); in xenbus_unmap_ring_vfree_hvm()
/drivers/staging/vc04_services/interface/vchiq_arm/
Dvchiq_2835_arm.c361 u32 *addrs; in create_pagelist() local
398 addrs = pagelist->addrs; in create_pagelist()
399 pages = (struct page **)(addrs + num_pages); in create_pagelist()
508 ((addrs[k - 1] & PAGE_MASK) + in create_pagelist()
509 (((addrs[k - 1] & ~PAGE_MASK) + 1) << PAGE_SHIFT)) in create_pagelist()
511 addrs[k - 1] += ((len + PAGE_SIZE - 1) >> PAGE_SHIFT); in create_pagelist()
513 addrs[k++] = (addr & PAGE_MASK) | in create_pagelist()
Dvchiq_pagelist.h15 u32 addrs[1]; /* N.B. 12 LSBs hold the number member
/drivers/net/xen-netback/
Dhash.c193 memcpy(&data[0], &flow.addrs.v4addrs.src, 4); in xenvif_set_skb_hash()
194 memcpy(&data[4], &flow.addrs.v4addrs.dst, 4); in xenvif_set_skb_hash()
203 memcpy(&data[0], &flow.addrs.v4addrs.src, 4); in xenvif_set_skb_hash()
204 memcpy(&data[4], &flow.addrs.v4addrs.dst, 4); in xenvif_set_skb_hash()
217 memcpy(&data[0], &flow.addrs.v6addrs.src, 16); in xenvif_set_skb_hash()
218 memcpy(&data[16], &flow.addrs.v6addrs.dst, 16); in xenvif_set_skb_hash()
227 memcpy(&data[0], &flow.addrs.v6addrs.src, 16); in xenvif_set_skb_hash()
228 memcpy(&data[16], &flow.addrs.v6addrs.dst, 16); in xenvif_set_skb_hash()
/drivers/net/ethernet/cisco/enic/
Denic_clsf.c38 data.u.ipv4.src_addr = ntohl(keys->addrs.v4addrs.src); in enic_addfltr_5t()
39 data.u.ipv4.dst_addr = ntohl(keys->addrs.v4addrs.dst); in enic_addfltr_5t()
162 if (tpos->keys.addrs.v4addrs.src == k->addrs.v4addrs.src && in htbl_key_search()
163 tpos->keys.addrs.v4addrs.dst == k->addrs.v4addrs.dst && in htbl_key_search()
/drivers/gpu/drm/omapdrm/
Domap_gem.c228 dma_addr_t *addrs; in omap_gem_attach_pages() local
249 addrs = kmalloc_array(npages, sizeof(*addrs), GFP_KERNEL); in omap_gem_attach_pages()
250 if (!addrs) { in omap_gem_attach_pages()
256 addrs[i] = dma_map_page(dev->dev, pages[i], in omap_gem_attach_pages()
259 if (dma_mapping_error(dev->dev, addrs[i])) { in omap_gem_attach_pages()
264 dma_unmap_page(dev->dev, addrs[i], in omap_gem_attach_pages()
273 addrs = kcalloc(npages, sizeof(*addrs), GFP_KERNEL); in omap_gem_attach_pages()
274 if (!addrs) { in omap_gem_attach_pages()
280 omap_obj->dma_addrs = addrs; in omap_gem_attach_pages()
286 kfree(addrs); in omap_gem_attach_pages()
/drivers/net/ethernet/seeq/
Dether3.c589 unsigned char addrs[16]; in ether3_rx() local
610 ether3_readbuffer(dev, addrs+2, 12); in ether3_rx()
617 printk("%02X ", addrs[i]); in ether3_rx()
625 if (!(*(unsigned long *)&dev->dev_addr[0] ^ *(unsigned long *)&addrs[2+6]) && in ether3_rx()
626 !(*(unsigned short *)&dev->dev_addr[4] ^ *(unsigned short *)&addrs[2+10])) { in ether3_rx()
645 *(unsigned short *)(buf + 0) = *(unsigned short *)(addrs + 2); in ether3_rx()
646 *(unsigned long *)(buf + 2) = *(unsigned long *)(addrs + 4); in ether3_rx()
647 *(unsigned long *)(buf + 6) = *(unsigned long *)(addrs + 8); in ether3_rx()
648 *(unsigned short *)(buf + 10) = *(unsigned short *)(addrs + 12); in ether3_rx()
/drivers/gpu/drm/arm/
Dmalidp_mw.c26 dma_addr_t addrs[2]; member
174 mw_state->addrs[i] = obj->paddr + fb->offsets[i]; in malidp_mw_encoder_atomic_check()
258 &mw_state->addrs[0], in malidp_mw_atomic_commit()
262 hwdev->hw->enable_memwrite(hwdev, mw_state->addrs, in malidp_mw_atomic_commit()
Dmalidp_hw.c504 dma_addr_t *addrs, s32 *pitches, in malidp500_enable_memwrite() argument
523 malidp_hw_write(hwdev, lower_32_bits(addrs[1]), base + MALIDP_MW_P2_PTR_LOW); in malidp500_enable_memwrite()
524 malidp_hw_write(hwdev, upper_32_bits(addrs[1]), base + MALIDP_MW_P2_PTR_HIGH); in malidp500_enable_memwrite()
528 malidp_hw_write(hwdev, lower_32_bits(addrs[0]), base + MALIDP_MW_P1_PTR_LOW); in malidp500_enable_memwrite()
529 malidp_hw_write(hwdev, upper_32_bits(addrs[0]), base + MALIDP_MW_P1_PTR_HIGH); in malidp500_enable_memwrite()
845 dma_addr_t *addrs, s32 *pitches, in malidp550_enable_memwrite() argument
860 malidp_hw_write(hwdev, lower_32_bits(addrs[1]), base + MALIDP_MW_P2_PTR_LOW); in malidp550_enable_memwrite()
861 malidp_hw_write(hwdev, upper_32_bits(addrs[1]), base + MALIDP_MW_P2_PTR_HIGH); in malidp550_enable_memwrite()
865 malidp_hw_write(hwdev, lower_32_bits(addrs[0]), base + MALIDP_MW_P1_PTR_LOW); in malidp550_enable_memwrite()
866 malidp_hw_write(hwdev, upper_32_bits(addrs[0]), base + MALIDP_MW_P1_PTR_HIGH); in malidp550_enable_memwrite()
/drivers/net/ethernet/mellanox/mlx5/core/
Den_arfs.c619 tuple->src_ipv4 = fk->addrs.v4addrs.src; in arfs_alloc_rule()
620 tuple->dst_ipv4 = fk->addrs.v4addrs.dst; in arfs_alloc_rule()
622 memcpy(&tuple->src_ipv6, &fk->addrs.v6addrs.src, in arfs_alloc_rule()
624 memcpy(&tuple->dst_ipv6, &fk->addrs.v6addrs.dst, in arfs_alloc_rule()
646 return tuple->src_ipv4 == fk->addrs.v4addrs.src && in arfs_cmp()
647 tuple->dst_ipv4 == fk->addrs.v4addrs.dst; in arfs_cmp()
649 return !memcmp(&tuple->src_ipv6, &fk->addrs.v6addrs.src, in arfs_cmp()
651 !memcmp(&tuple->dst_ipv6, &fk->addrs.v6addrs.dst, in arfs_cmp()
/drivers/i3c/master/
Ddw-i3c-master.c243 u8 addrs[MAX_DEVS]; member
318 if (addr == master->addrs[pos]) in dw_i3c_master_get_addr_pos()
781 master->addrs[pos] = ret; in dw_i3c_master_daa()
814 i3c_master_add_i3c_dev_locked(m, master->addrs[pos]); in dw_i3c_master_daa()
907 master->addrs[data->index] = dev->info.dyn_addr; in dw_i3c_master_reattach_i3c_dev()
928 master->addrs[pos] = dev->info.dyn_addr ? : dev->info.static_addr; in dw_i3c_master_attach_i3c_dev()
932 writel(DEV_ADDR_TABLE_DYNAMIC_ADDR(master->addrs[pos]), in dw_i3c_master_attach_i3c_dev()
950 master->addrs[data->index] = 0; in dw_i3c_master_detach_i3c_dev()
1036 master->addrs[pos] = dev->addr; in dw_i3c_master_attach_i2c_dev()
1059 master->addrs[data->index] = 0; in dw_i3c_master_detach_i2c_dev()
/drivers/watchdog/
Df71808e_wdt.c850 static const unsigned short addrs[] = { 0x2e, 0x4e }; in f71808e_init() local
854 for (i = 0; i < ARRAY_SIZE(addrs); i++) { in f71808e_init()
855 err = f71808e_find(addrs[i]); in f71808e_init()
859 if (i == ARRAY_SIZE(addrs)) in f71808e_init()
862 return watchdog_init(addrs[i]); in f71808e_init()
/drivers/mtd/nand/raw/
Dnand_base.c970 static int nand_fill_column_cycles(struct nand_chip *chip, u8 *addrs, in nand_fill_column_cycles() argument
998 addrs[0] = offset_in_page; in nand_fill_column_cycles()
1007 addrs[1] = offset_in_page >> 8; in nand_fill_column_cycles()
1019 u8 addrs[4]; in nand_sp_exec_read_page_op() local
1022 NAND_OP_ADDR(3, addrs, PSEC_TO_NSEC(sdr->tWB_max)), in nand_sp_exec_read_page_op()
1040 ret = nand_fill_column_cycles(chip, addrs, offset_in_page); in nand_sp_exec_read_page_op()
1044 addrs[1] = page; in nand_sp_exec_read_page_op()
1045 addrs[2] = page >> 8; in nand_sp_exec_read_page_op()
1048 addrs[3] = page >> 16; in nand_sp_exec_read_page_op()
1061 u8 addrs[5]; in nand_lp_exec_read_page_op() local
[all …]
Dmeson_nand.c140 u32 addrs[MAX_CYCLE_ADDRS]; member
580 u32 *addrs = nfc->cmdfifo.rw.addrs; in meson_nfc_rw_cmd_prepare_and_execute() local
590 addrs[0] = cs | NFC_CMD_ALE | 0; in meson_nfc_rw_cmd_prepare_and_execute()
595 addrs[1] = cs | NFC_CMD_ALE | 0; in meson_nfc_rw_cmd_prepare_and_execute()
599 addrs[row_start] = cs | NFC_CMD_ALE | ROW_ADDER(page, 0); in meson_nfc_rw_cmd_prepare_and_execute()
600 addrs[row_start + 1] = cs | NFC_CMD_ALE | ROW_ADDER(page, 1); in meson_nfc_rw_cmd_prepare_and_execute()
603 addrs[row_start + 2] = in meson_nfc_rw_cmd_prepare_and_execute()
919 cmd |= instr->ctx.addr.addrs[i] & 0xff; in meson_nfc_exec_op()
/drivers/net/dsa/
Dbcm_sf2_cfp.c262 struct flow_dissector_key_ipv4_addrs *addrs, in bcm_sf2_cfp_slice_ipv4() argument
297 (be32_to_cpu(addrs->dst) & 0x0000ff00) >> 8; in bcm_sf2_cfp_slice_ipv4()
308 reg = (u32)(be32_to_cpu(addrs->dst) & 0xff) << 24 | in bcm_sf2_cfp_slice_ipv4()
309 (u32)(be32_to_cpu(addrs->dst) >> 16) << 8 | in bcm_sf2_cfp_slice_ipv4()
310 (be32_to_cpu(addrs->src) & 0x0000ff00) >> 8; in bcm_sf2_cfp_slice_ipv4()
323 reg = (u32)(be32_to_cpu(addrs->src) & 0xff) << 24 | in bcm_sf2_cfp_slice_ipv4()
324 (u32)(be32_to_cpu(addrs->src) >> 16) << 8 | in bcm_sf2_cfp_slice_ipv4()
/drivers/media/platform/
Drcar_fdp1.c522 dma_addr_t addrs[3]; member
926 fdp1_write(fdp1, job->previous->addrs[0], FD1_RPF0_ADDR_Y); in fdp1_configure_rpf()
929 fdp1_write(fdp1, job->active->addrs[0], FD1_RPF1_ADDR_Y); in fdp1_configure_rpf()
930 fdp1_write(fdp1, job->active->addrs[1], FD1_RPF1_ADDR_C0); in fdp1_configure_rpf()
931 fdp1_write(fdp1, job->active->addrs[2], FD1_RPF1_ADDR_C1); in fdp1_configure_rpf()
935 fdp1_write(fdp1, job->next->addrs[0], FD1_RPF2_ADDR_Y); in fdp1_configure_rpf()
994 fdp1_write(fdp1, job->dst->addrs[0], FD1_WPF_ADDR_Y); in fdp1_configure_wpf()
995 fdp1_write(fdp1, job->dst->addrs[1], FD1_WPF_ADDR_C0); in fdp1_configure_wpf()
996 fdp1_write(fdp1, job->dst->addrs[2], FD1_WPF_ADDR_C1); in fdp1_configure_wpf()
1804 fbuf->addrs[i] = vb2_dma_contig_plane_dma_addr(&vbuf->vb2_buf, i); in fdp1_buf_prepare_field()
[all …]
/drivers/mtd/nand/raw/atmel/
Dnand-controller.c187 u8 addrs[5]; member
551 u8 *addrs = nc->op.addrs; in atmel_nfc_exec_op() local
562 regmap_write(nc->base.smc, ATMEL_HSMC_NFC_ADDR, *addrs++); in atmel_nfc_exec_op()
570 addr = addrs[0] | (addrs[1] << 8) | (addrs[2] << 16) | in atmel_nfc_exec_op()
571 (addrs[3] << 24); in atmel_nfc_exec_op()
611 nc->op.addrs[nc->op.naddrs++] = dat; in atmel_hsmc_nand_cmd_ctrl()
699 nc->op.addrs[nc->op.naddrs++] = column; in atmel_nfc_set_op_addr()
705 nc->op.addrs[nc->op.naddrs++] = column >> 8; in atmel_nfc_set_op_addr()
709 nc->op.addrs[nc->op.naddrs++] = page; in atmel_nfc_set_op_addr()
710 nc->op.addrs[nc->op.naddrs++] = page >> 8; in atmel_nfc_set_op_addr()
[all …]
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_ttm_buffer.c305 return viter->addrs[viter->i]; in __vmw_piter_dma_addr()
339 viter->addrs = vsgt->addrs; in vmw_piter_start()
429 vsgt->addrs = vmw_tt->dma_ttm.dma_address; in vmw_ttm_map_dma()
/drivers/net/ipvlan/
Dipvlan_main.c177 list_for_each_entry_rcu(addr, &ipvlan->addrs, anode) in ipvlan_open()
196 list_for_each_entry_rcu(addr, &ipvlan->addrs, anode) in ipvlan_stop()
556 INIT_LIST_HEAD(&ipvlan->addrs); in ipvlan_link_new()
636 list_for_each_entry_safe(addr, next, &ipvlan->addrs, anode) { in ipvlan_link_delete()
795 list_add_tail_rcu(&addr->anode, &ipvlan->addrs); in ipvlan_add_addr()
/drivers/gpu/drm/
Ddrm_prime.c948 dma_addr_t *addrs, int max_entries) in drm_prime_sg_to_page_addr_arrays() argument
967 if (addrs) in drm_prime_sg_to_page_addr_arrays()
968 addrs[index] = addr; in drm_prime_sg_to_page_addr_arrays()
/drivers/net/ethernet/sfc/falcon/
Drx.c861 spec.rem_host[0] = fk.addrs.v4addrs.src; in ef4_filter_rfs()
862 spec.loc_host[0] = fk.addrs.v4addrs.dst; in ef4_filter_rfs()
864 memcpy(spec.rem_host, &fk.addrs.v6addrs.src, sizeof(struct in6_addr)); in ef4_filter_rfs()
865 memcpy(spec.loc_host, &fk.addrs.v6addrs.dst, sizeof(struct in6_addr)); in ef4_filter_rfs()

123