/drivers/dma/ |
D | dmaengine.c | 191 bitmap_and(has.bits, want->bits, device->cap_mask.bits, in __dma_device_satisfies_mask() 245 } else if (!dma_has_cap(DMA_PRIVATE, chan->device->cap_mask)) in dma_chan_get() 375 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_issue_pending_all() 412 if (!dma_has_cap(cap, device->cap_mask) || in min_chan() 413 dma_has_cap(DMA_PRIVATE, device->cap_mask)) in min_chan() 457 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_channel_rebalance() 488 if (dev->chancnt > 1 && !dma_has_cap(DMA_PRIVATE, dev->cap_mask)) in private_candidate() 591 dma_cap_set(DMA_PRIVATE, device->cap_mask); in __dma_request_channel() 605 dma_cap_clear(DMA_PRIVATE, device->cap_mask); in __dma_request_channel() 667 dma_cap_clear(DMA_PRIVATE, chan->device->cap_mask); in dma_release_channel() [all …]
|
D | iop-adma.c | 487 iop_chan->device->common.cap_mask)) in iop_adma_alloc_chan_resources() 490 iop_chan->device->common.cap_mask)) in iop_adma_alloc_chan_resources() 1018 if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask)) in iop_adma_xor_val_self_test() 1322 dma_dev->cap_mask = plat_data->cap_mask; in iop_adma_probe() 1337 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) in iop_adma_probe() 1339 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in iop_adma_probe() 1343 if (dma_has_cap(DMA_XOR_VAL, dma_dev->cap_mask)) in iop_adma_probe() 1346 if (dma_has_cap(DMA_PQ, dma_dev->cap_mask)) { in iop_adma_probe() 1350 if (dma_has_cap(DMA_PQ_VAL, dma_dev->cap_mask)) in iop_adma_probe() 1353 if (dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask)) in iop_adma_probe() [all …]
|
D | mv_xor.c | 986 int idx, dma_cap_mask_t cap_mask, int irq) in mv_xor_channel_add() argument 1022 dma_dev->cap_mask = cap_mask; in mv_xor_channel_add() 1035 if (dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask)) in mv_xor_channel_add() 1037 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) in mv_xor_channel_add() 1039 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in mv_xor_channel_add() 1070 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) { in mv_xor_channel_add() 1077 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in mv_xor_channel_add() 1085 dma_has_cap(DMA_XOR, dma_dev->cap_mask) ? "xor " : "", in mv_xor_channel_add() 1086 dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask) ? "cpy " : "", in mv_xor_channel_add() 1087 dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask) ? "intr " : ""); in mv_xor_channel_add() [all …]
|
D | at_hdmac.c | 1396 dma_cap_set(DMA_MEMCPY, at91sam9rl_config.cap_mask); in at_dma_probe() 1397 dma_cap_set(DMA_MEMCPY, at91sam9g45_config.cap_mask); in at_dma_probe() 1398 dma_cap_set(DMA_SLAVE, at91sam9g45_config.cap_mask); in at_dma_probe() 1420 atdma->dma_common.cap_mask = plat_dat->cap_mask; in at_dma_probe() 1500 if (dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask)) in at_dma_probe() 1503 if (dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask)) { in at_dma_probe() 1506 dma_cap_set(DMA_CYCLIC, atdma->dma_common.cap_mask); in at_dma_probe() 1514 dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask) ? "cpy " : "", in at_dma_probe() 1515 dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask) ? "slave " : "", in at_dma_probe()
|
D | ste_dma40.c | 1974 dma_cap_mask_t cap = d40c->chan.device->cap_mask; in d40_config_memcpy() 2850 if (dma_has_cap(DMA_SLAVE, dev->cap_mask)) in d40_ops_init() 2853 if (dma_has_cap(DMA_MEMCPY, dev->cap_mask)) { in d40_ops_init() 2863 if (dma_has_cap(DMA_SG, dev->cap_mask)) in d40_ops_init() 2866 if (dma_has_cap(DMA_CYCLIC, dev->cap_mask)) in d40_ops_init() 2885 dma_cap_zero(base->dma_slave.cap_mask); in d40_dmaengine_init() 2886 dma_cap_set(DMA_SLAVE, base->dma_slave.cap_mask); in d40_dmaengine_init() 2887 dma_cap_set(DMA_CYCLIC, base->dma_slave.cap_mask); in d40_dmaengine_init() 2901 dma_cap_zero(base->dma_memcpy.cap_mask); in d40_dmaengine_init() 2902 dma_cap_set(DMA_MEMCPY, base->dma_memcpy.cap_mask); in d40_dmaengine_init() [all …]
|
D | ep93xx_dma.c | 1348 dma_cap_zero(dma_dev->cap_mask); in ep93xx_dma_probe() 1349 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask); in ep93xx_dma_probe() 1350 dma_cap_set(DMA_CYCLIC, dma_dev->cap_mask); in ep93xx_dma_probe() 1364 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in ep93xx_dma_probe() 1372 dma_cap_set(DMA_PRIVATE, dma_dev->cap_mask); in ep93xx_dma_probe()
|
D | bcm2835-dma.c | 613 dma_cap_set(DMA_SLAVE, od->ddev.cap_mask); in bcm2835_dma_probe() 614 dma_cap_set(DMA_PRIVATE, od->ddev.cap_mask); in bcm2835_dma_probe() 615 dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask); in bcm2835_dma_probe()
|
D | mmp_tdma.c | 578 dma_cap_mask_t mask = tdev->device.cap_mask; in mmp_tdma_xlate() 661 dma_cap_set(DMA_SLAVE, tdev->device.cap_mask); in mmp_tdma_probe() 662 dma_cap_set(DMA_CYCLIC, tdev->device.cap_mask); in mmp_tdma_probe()
|
D | moxart-dma.c | 612 dma_cap_zero(mdc->dma_slave.cap_mask); in moxart_probe() 613 dma_cap_set(DMA_SLAVE, mdc->dma_slave.cap_mask); in moxart_probe() 614 dma_cap_set(DMA_PRIVATE, mdc->dma_slave.cap_mask); in moxart_probe()
|
D | s3c24xx-dma.c | 1293 dma_cap_set(DMA_MEMCPY, s3cdma->memcpy.cap_mask); in s3c24xx_dma_probe() 1294 dma_cap_set(DMA_PRIVATE, s3cdma->memcpy.cap_mask); in s3c24xx_dma_probe() 1306 dma_cap_set(DMA_SLAVE, s3cdma->slave.cap_mask); in s3c24xx_dma_probe() 1307 dma_cap_set(DMA_CYCLIC, s3cdma->slave.cap_mask); in s3c24xx_dma_probe() 1308 dma_cap_set(DMA_PRIVATE, s3cdma->slave.cap_mask); in s3c24xx_dma_probe()
|
D | sirf-dma.c | 750 dma_cap_set(DMA_SLAVE, dma->cap_mask); in sirfsoc_dma_probe() 751 dma_cap_set(DMA_CYCLIC, dma->cap_mask); in sirfsoc_dma_probe() 752 dma_cap_set(DMA_INTERLEAVE, dma->cap_mask); in sirfsoc_dma_probe() 753 dma_cap_set(DMA_PRIVATE, dma->cap_mask); in sirfsoc_dma_probe()
|
D | imx-dma.c | 1053 return dma_request_channel(imxdma->dma_device.cap_mask, in imxdma_xlate() 1132 dma_cap_set(DMA_SLAVE, imxdma->dma_device.cap_mask); in imxdma_probe() 1133 dma_cap_set(DMA_CYCLIC, imxdma->dma_device.cap_mask); in imxdma_probe() 1134 dma_cap_set(DMA_MEMCPY, imxdma->dma_device.cap_mask); in imxdma_probe() 1135 dma_cap_set(DMA_INTERLEAVE, imxdma->dma_device.cap_mask); in imxdma_probe()
|
D | mic_x100_dma.c | 574 dma_cap_zero(mic_dma_dev->dma_dev.cap_mask); in mic_dma_register_dma_device() 579 dma_cap_set(DMA_MEMCPY, mic_dma_dev->dma_dev.cap_mask); in mic_dma_register_dma_device() 582 dma_cap_set(DMA_PRIVATE, mic_dma_dev->dma_dev.cap_mask); in mic_dma_register_dma_device()
|
D | edma.c | 1062 dma_cap_zero(ecc->dma_slave.cap_mask); in edma_probe() 1063 dma_cap_set(DMA_SLAVE, ecc->dma_slave.cap_mask); in edma_probe() 1064 dma_cap_set(DMA_CYCLIC, ecc->dma_slave.cap_mask); in edma_probe() 1065 dma_cap_set(DMA_MEMCPY, ecc->dma_slave.cap_mask); in edma_probe()
|
D | txx9dmac.c | 1117 dma_cap_set(DMA_MEMCPY, dc->dma.cap_mask); in txx9dmac_chan_probe() 1120 dma_cap_set(DMA_SLAVE, dc->dma.cap_mask); in txx9dmac_chan_probe() 1121 dma_cap_set(DMA_PRIVATE, dc->dma.cap_mask); in txx9dmac_chan_probe() 1163 dma_has_cap(DMA_MEMCPY, dc->dma.cap_mask) ? " memcpy" : "", in txx9dmac_chan_probe() 1164 dma_has_cap(DMA_SLAVE, dc->dma.cap_mask) ? " slave" : ""); in txx9dmac_chan_probe()
|
D | mmp_pdma.c | 1052 dma_cap_set(DMA_SLAVE, pdev->device.cap_mask); in mmp_pdma_probe() 1053 dma_cap_set(DMA_MEMCPY, pdev->device.cap_mask); in mmp_pdma_probe() 1054 dma_cap_set(DMA_CYCLIC, pdev->device.cap_mask); in mmp_pdma_probe() 1055 dma_cap_set(DMA_PRIVATE, pdev->device.cap_mask); in mmp_pdma_probe()
|
D | mxs-dma.c | 771 dma_cap_mask_t mask = mxs_dma->dma_device.cap_mask; in mxs_dma_xlate() 825 dma_cap_set(DMA_SLAVE, mxs_dma->dma_device.cap_mask); in mxs_dma_probe() 826 dma_cap_set(DMA_CYCLIC, mxs_dma->dma_device.cap_mask); in mxs_dma_probe()
|
D | intel_mid_dma.c | 1136 dma_cap_zero(dma->common.cap_mask); in mid_setup_dma() 1137 dma_cap_set(DMA_MEMCPY, dma->common.cap_mask); in mid_setup_dma() 1138 dma_cap_set(DMA_SLAVE, dma->common.cap_mask); in mid_setup_dma() 1139 dma_cap_set(DMA_PRIVATE, dma->common.cap_mask); in mid_setup_dma()
|
/drivers/pci/pcie/ |
D | portdrv_core.c | 258 int cap_mask = 0; in get_port_device_capability() local 264 cap_mask = PCIE_PORT_SERVICE_PME | PCIE_PORT_SERVICE_HP in get_port_device_capability() 267 cap_mask |= PCIE_PORT_SERVICE_AER; in get_port_device_capability() 270 err = pcie_port_platform_notify(dev, &cap_mask); in get_port_device_capability() 276 if ((cap_mask & PCIE_PORT_SERVICE_HP) && in get_port_device_capability() 291 if ((cap_mask & PCIE_PORT_SERVICE_AER) in get_port_device_capability() 304 if ((cap_mask & PCIE_PORT_SERVICE_PME) in get_port_device_capability()
|
/drivers/net/ethernet/qlogic/qlcnic/ |
D | qlcnic_minidump.c | 296 hdr->drv_cap_mask = hdr->cap_mask; in qlcnic_82xx_cache_tmpl_hdr_values() 297 fw_dump->cap_mask = hdr->cap_mask; in qlcnic_82xx_cache_tmpl_hdr_values() 350 hdr->drv_cap_mask = hdr->cap_mask; in qlcnic_83xx_cache_tmpl_hdr_values() 351 fw_dump->cap_mask = hdr->cap_mask; in qlcnic_83xx_cache_tmpl_hdr_values() 1278 fw_dump->cap_mask); in qlcnic_fw_cmd_get_minidump_temp() 1322 if (i & fw_dump->cap_mask) in qlcnic_dump_fw() 1353 if (!(entry->hdr.mask & fw_dump->cap_mask)) { in qlcnic_dump_fw()
|
/drivers/dma/ppc4xx/ |
D | adma.c | 1418 if (dma_has_cap(cap, ref->chan->device->cap_mask)) { in ppc440spe_async_tx_find_best_channel() 3794 dma_cap_set(DMA_MEMCPY, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3795 dma_cap_set(DMA_INTERRUPT, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3796 dma_cap_set(DMA_PQ, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3797 dma_cap_set(DMA_PQ_VAL, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3798 dma_cap_set(DMA_XOR_VAL, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3801 dma_cap_set(DMA_XOR, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3802 dma_cap_set(DMA_PQ, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3803 dma_cap_set(DMA_INTERRUPT, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3804 adev->common.cap_mask = adev->common.cap_mask; in ppc440spe_adma_init_capabilities() [all …]
|
/drivers/dma/ioat/ |
D | dma_v3.c | 1223 if (!dma_has_cap(DMA_XOR, dma->cap_mask)) in ioat_xor_val_self_test() 1317 if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask)) in ioat_xor_val_self_test() 1582 dma_cap_set(DMA_INTERRUPT, dma->cap_mask); in ioat3_dma_probe() 1598 dma_cap_set(DMA_XOR, dma->cap_mask); in ioat3_dma_probe() 1601 dma_cap_set(DMA_XOR_VAL, dma->cap_mask); in ioat3_dma_probe() 1610 dma_cap_set(DMA_PQ, dma->cap_mask); in ioat3_dma_probe() 1611 dma_cap_set(DMA_PQ_VAL, dma->cap_mask); in ioat3_dma_probe() 1622 dma_cap_set(DMA_XOR, dma->cap_mask); in ioat3_dma_probe() 1623 dma_cap_set(DMA_XOR_VAL, dma->cap_mask); in ioat3_dma_probe()
|
D | dma.c | 1043 dma_cap_set(DMA_MEMCPY, dma->cap_mask); in ioat_probe() 1119 dma_has_cap(DMA_PQ, dma->cap_mask) ? " pq" : "", in cap_show() 1120 dma_has_cap(DMA_PQ_VAL, dma->cap_mask) ? " pq_val" : "", in cap_show() 1121 dma_has_cap(DMA_XOR, dma->cap_mask) ? " xor" : "", in cap_show() 1122 dma_has_cap(DMA_XOR_VAL, dma->cap_mask) ? " xor_val" : "", in cap_show() 1123 dma_has_cap(DMA_INTERRUPT, dma->cap_mask) ? " intr" : ""); in cap_show()
|
/drivers/rapidio/devices/ |
D | tsi721_dma.c | 894 dma_cap_zero(mport->dma.cap_mask); in tsi721_register_dma() 895 dma_cap_set(DMA_PRIVATE, mport->dma.cap_mask); in tsi721_register_dma() 896 dma_cap_set(DMA_SLAVE, mport->dma.cap_mask); in tsi721_register_dma()
|
/drivers/net/ethernet/mellanox/mlx4/ |
D | fw.h | 203 u32 cap_mask; member
|