Searched refs:dma_buff (Results 1 – 6 of 6) sorted by relevance
/drivers/net/ethernet/cirrus/ |
D | cs89x0.c | 153 unsigned char *dma_buff; /* points to the beginning of the buffer */ member 452 memcpy(skb_put(skb, length - semi_cnt), lp->dma_buff, in dma_rx() 475 if (lp->dma_buff) { in release_dma_buff() 476 free_pages((unsigned long)(lp->dma_buff), in release_dma_buff() 478 lp->dma_buff = NULL; in release_dma_buff() 886 lp->dma_buff = (unsigned char *)__get_dma_pages(GFP_KERNEL, in net_open() 888 if (!lp->dma_buff) { in net_open() 895 (unsigned long)lp->dma_buff, in net_open() 896 (unsigned long)isa_virt_to_bus(lp->dma_buff)); in net_open() 897 if ((unsigned long)lp->dma_buff >= MAX_DMA_ADDRESS || in net_open() [all …]
|
/drivers/net/ethernet/intel/i40evf/ |
D | i40e_adminq.c | 676 struct i40e_dma_mem *dma_buff = NULL; in i40evf_asq_send_command() local 764 dma_buff = &(hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]); in i40evf_asq_send_command() 766 memcpy(dma_buff->va, buff, buff_size); in i40evf_asq_send_command() 773 cpu_to_le32(upper_32_bits(dma_buff->pa)); in i40evf_asq_send_command() 775 cpu_to_le32(lower_32_bits(dma_buff->pa)); in i40evf_asq_send_command() 809 memcpy(buff, dma_buff->va, buff_size); in i40evf_asq_send_command()
|
/drivers/net/ethernet/intel/i40e/ |
D | i40e_adminq.c | 743 struct i40e_dma_mem *dma_buff = NULL; in i40e_asq_send_command() local 831 dma_buff = &(hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]); in i40e_asq_send_command() 833 memcpy(dma_buff->va, buff, buff_size); in i40e_asq_send_command() 840 cpu_to_le32(upper_32_bits(dma_buff->pa)); in i40e_asq_send_command() 842 cpu_to_le32(lower_32_bits(dma_buff->pa)); in i40e_asq_send_command() 876 memcpy(buff, dma_buff->va, buff_size); in i40e_asq_send_command()
|
/drivers/video/fbdev/ |
D | pxafb.c | 521 fbi->palette_cpu = (u16 *)&fbi->dma_buff->palette[0]; in pxafb_set_par() 1071 dma_desc = &fbi->dma_buff->dma_desc[dma]; in setup_frame_dma() 1082 pal_desc = &fbi->dma_buff->pal_desc[pal]; in setup_frame_dma() 1132 dma_desc = &fbi->dma_buff->dma_desc[DMA_CMD]; in setup_smart_dma() 1300 fbi->smart_cmds = (uint16_t *) fbi->dma_buff->cmd_buff; in pxafb_smart_init() 2164 fbi->dma_buff = dma_alloc_coherent(fbi->dev, fbi->dma_buff_size, in pxafb_probe() 2166 if (fbi->dma_buff == NULL) { in pxafb_probe() 2251 fbi->dma_buff, fbi->dma_buff_phys); in pxafb_probe() 2289 fbi->dma_buff, fbi->dma_buff_phys); in pxafb_remove()
|
D | pxafb.h | 116 struct pxafb_dma_buff *dma_buff; member
|
/drivers/net/ethernet/ti/ |
D | netcp_core.c | 625 dma_addr_t dma_desc, dma_buff; in netcp_process_one_rx_packet() local 641 get_pkt_info(&dma_buff, &buf_len, &dma_desc, desc); in netcp_process_one_rx_packet() 651 dma_unmap_single(netcp->dev, dma_buff, buf_len, DMA_FROM_DEVICE); in netcp_process_one_rx_packet() 674 get_pkt_info(&dma_buff, &buf_len, &dma_desc, ndesc); in netcp_process_one_rx_packet() 677 if (likely(dma_buff && buf_len && page)) { in netcp_process_one_rx_packet() 678 dma_unmap_page(netcp->dev, dma_buff, PAGE_SIZE, in netcp_process_one_rx_packet() 682 (void *)dma_buff, buf_len, page); in netcp_process_one_rx_packet() 687 offset_in_page(dma_buff), buf_len, PAGE_SIZE); in netcp_process_one_rx_packet()
|