/drivers/dma/ |
D | dmaengine.c | 328 if (!dma_has_cap(cap, device->cap_mask) || in min_chan() 329 dma_has_cap(DMA_PRIVATE, device->cap_mask)) in min_chan() 374 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_channel_rebalance() 397 bitmap_and(has.bits, want->bits, device->cap_mask.bits, in dma_device_satisfies_mask() 476 if (!dma_has_cap(DMA_PRIVATE, chan->device->cap_mask)) in dma_chan_get() 561 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_issue_pending_all() 581 if (!(test_bit(DMA_SLAVE, device->cap_mask.bits) || in dma_get_slave_caps() 582 test_bit(DMA_CYCLIC, device->cap_mask.bits))) in dma_get_slave_caps() 632 if (dev->chancnt > 1 && !dma_has_cap(DMA_PRIVATE, dev->cap_mask)) in private_candidate() 669 dma_cap_set(DMA_PRIVATE, device->cap_mask); in find_candidate() [all …]
|
D | mv_xor.c | 1036 int idx, dma_cap_mask_t cap_mask, int irq) in mv_xor_channel_add() argument 1078 dma_dev->cap_mask = cap_mask; in mv_xor_channel_add() 1089 if (dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask)) in mv_xor_channel_add() 1091 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) in mv_xor_channel_add() 1093 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in mv_xor_channel_add() 1127 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) { in mv_xor_channel_add() 1134 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in mv_xor_channel_add() 1143 dma_has_cap(DMA_XOR, dma_dev->cap_mask) ? "xor " : "", in mv_xor_channel_add() 1144 dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask) ? "cpy " : "", in mv_xor_channel_add() 1145 dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask) ? "intr " : ""); in mv_xor_channel_add() [all …]
|
D | at_hdmac.c | 510 dma_cap_mask_t cap_mask; member 1937 dma_cap_set(DMA_MEMCPY, at91sam9rl_config.cap_mask); in at_dma_probe() 1938 dma_cap_set(DMA_INTERLEAVE, at91sam9g45_config.cap_mask); in at_dma_probe() 1939 dma_cap_set(DMA_MEMCPY, at91sam9g45_config.cap_mask); in at_dma_probe() 1940 dma_cap_set(DMA_MEMSET, at91sam9g45_config.cap_mask); in at_dma_probe() 1941 dma_cap_set(DMA_MEMSET_SG, at91sam9g45_config.cap_mask); in at_dma_probe() 1942 dma_cap_set(DMA_PRIVATE, at91sam9g45_config.cap_mask); in at_dma_probe() 1943 dma_cap_set(DMA_SLAVE, at91sam9g45_config.cap_mask); in at_dma_probe() 1965 atdma->dma_device.cap_mask = plat_dat->cap_mask; in at_dma_probe() 2033 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_device.cap_mask)) in at_dma_probe() [all …]
|
D | bcm-sba-raid.c | 1553 dma_cap_zero(dma_dev->cap_mask); in sba_async_register() 1554 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in sba_async_register() 1555 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in sba_async_register() 1556 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in sba_async_register() 1557 dma_cap_set(DMA_PQ, dma_dev->cap_mask); in sba_async_register() 1573 if (dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask)) in sba_async_register() 1577 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) in sba_async_register() 1581 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in sba_async_register() 1587 if (dma_has_cap(DMA_PQ, dma_dev->cap_mask)) { in sba_async_register() 1605 dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask) ? "interrupt " : "", in sba_async_register() [all …]
|
D | mcf-edma-main.c | 215 dma_cap_set(DMA_PRIVATE, mcf_edma->dma_dev.cap_mask); in mcf_edma_probe() 216 dma_cap_set(DMA_SLAVE, mcf_edma->dma_dev.cap_mask); in mcf_edma_probe() 217 dma_cap_set(DMA_CYCLIC, mcf_edma->dma_dev.cap_mask); in mcf_edma_probe()
|
D | xgene-dma.c | 1489 dma_cap_zero(dma_dev->cap_mask); in xgene_dma_set_caps() 1504 dma_cap_set(DMA_PQ, dma_dev->cap_mask); in xgene_dma_set_caps() 1505 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in xgene_dma_set_caps() 1508 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in xgene_dma_set_caps() 1518 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in xgene_dma_set_caps() 1524 if (dma_has_cap(DMA_PQ, dma_dev->cap_mask)) { in xgene_dma_set_caps() 1568 dma_has_cap(DMA_XOR, dma_dev->cap_mask) ? "XOR " : "", in xgene_dma_async_register() 1569 dma_has_cap(DMA_PQ, dma_dev->cap_mask) ? "PQ " : ""); in xgene_dma_async_register()
|
D | mv_xor_v2.c | 814 dma_cap_zero(dma_dev->cap_mask); in mv_xor_v2_probe() 815 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in mv_xor_v2_probe() 816 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in mv_xor_v2_probe() 817 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in mv_xor_v2_probe()
|
D | dmatest.c | 844 dev->cap_mask) && in dmatest_func() 1023 if (dma_has_cap(DMA_COMPLETION_NO_ORDER, dma_dev->cap_mask) && in dmatest_add_channel() 1029 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) { in dmatest_add_channel() 1036 if (dma_has_cap(DMA_MEMSET, dma_dev->cap_mask)) { in dmatest_add_channel() 1043 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in dmatest_add_channel() 1047 if (dma_has_cap(DMA_PQ, dma_dev->cap_mask)) { in dmatest_add_channel()
|
D | fsl-edma-main.c | 572 dma_cap_set(DMA_PRIVATE, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe() 573 dma_cap_set(DMA_SLAVE, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe() 574 dma_cap_set(DMA_CYCLIC, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe() 575 dma_cap_set(DMA_MEMCPY, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe()
|
D | ep93xx_dma.c | 1360 dma_cap_zero(dma_dev->cap_mask); in ep93xx_dma_probe() 1361 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask); in ep93xx_dma_probe() 1362 dma_cap_set(DMA_CYCLIC, dma_dev->cap_mask); in ep93xx_dma_probe() 1378 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in ep93xx_dma_probe() 1386 dma_cap_set(DMA_PRIVATE, dma_dev->cap_mask); in ep93xx_dma_probe()
|
D | sun4i-dma.c | 1172 dma_cap_zero(priv->slave.cap_mask); in sun4i_dma_probe() 1173 dma_cap_set(DMA_PRIVATE, priv->slave.cap_mask); in sun4i_dma_probe() 1174 dma_cap_set(DMA_MEMCPY, priv->slave.cap_mask); in sun4i_dma_probe() 1175 dma_cap_set(DMA_CYCLIC, priv->slave.cap_mask); in sun4i_dma_probe() 1176 dma_cap_set(DMA_SLAVE, priv->slave.cap_mask); in sun4i_dma_probe()
|
D | img-mdc-dma.c | 912 dma_cap_zero(mdma->dma_dev.cap_mask); in mdc_dma_probe() 913 dma_cap_set(DMA_SLAVE, mdma->dma_dev.cap_mask); in mdc_dma_probe() 914 dma_cap_set(DMA_PRIVATE, mdma->dma_dev.cap_mask); in mdc_dma_probe() 915 dma_cap_set(DMA_CYCLIC, mdma->dma_dev.cap_mask); in mdc_dma_probe() 916 dma_cap_set(DMA_MEMCPY, mdma->dma_dev.cap_mask); in mdc_dma_probe()
|
D | moxart-dma.c | 586 dma_cap_zero(mdc->dma_slave.cap_mask); in moxart_probe() 587 dma_cap_set(DMA_SLAVE, mdc->dma_slave.cap_mask); in moxart_probe() 588 dma_cap_set(DMA_PRIVATE, mdc->dma_slave.cap_mask); in moxart_probe()
|
D | uniphier-xdmac.c | 507 dma_cap_zero(ddev->cap_mask); in uniphier_xdmac_probe() 508 dma_cap_set(DMA_MEMCPY, ddev->cap_mask); in uniphier_xdmac_probe() 509 dma_cap_set(DMA_SLAVE, ddev->cap_mask); in uniphier_xdmac_probe()
|
D | mmp_tdma.c | 615 dma_cap_mask_t mask = tdev->device.cap_mask; in mmp_tdma_xlate() 693 dma_cap_set(DMA_SLAVE, tdev->device.cap_mask); in mmp_tdma_probe() 694 dma_cap_set(DMA_CYCLIC, tdev->device.cap_mask); in mmp_tdma_probe()
|
D | ste_dma40.c | 1985 dma_cap_mask_t cap = d40c->chan.device->cap_mask; in d40_config_memcpy() 2839 if (dma_has_cap(DMA_SLAVE, dev->cap_mask)) { in d40_ops_init() 2844 if (dma_has_cap(DMA_MEMCPY, dev->cap_mask)) { in d40_ops_init() 2854 if (dma_has_cap(DMA_CYCLIC, dev->cap_mask)) in d40_ops_init() 2877 dma_cap_zero(base->dma_slave.cap_mask); in d40_dmaengine_init() 2878 dma_cap_set(DMA_SLAVE, base->dma_slave.cap_mask); in d40_dmaengine_init() 2879 dma_cap_set(DMA_CYCLIC, base->dma_slave.cap_mask); in d40_dmaengine_init() 2893 dma_cap_zero(base->dma_memcpy.cap_mask); in d40_dmaengine_init() 2894 dma_cap_set(DMA_MEMCPY, base->dma_memcpy.cap_mask); in d40_dmaengine_init() 2909 dma_cap_zero(base->dma_both.cap_mask); in d40_dmaengine_init() [all …]
|
D | bcm2835-dma.c | 910 dma_cap_set(DMA_SLAVE, od->ddev.cap_mask); in bcm2835_dma_probe() 911 dma_cap_set(DMA_PRIVATE, od->ddev.cap_mask); in bcm2835_dma_probe() 912 dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask); in bcm2835_dma_probe() 913 dma_cap_set(DMA_MEMCPY, od->ddev.cap_mask); in bcm2835_dma_probe()
|
/drivers/dma/ioat/ |
D | sysfs.c | 22 dma_has_cap(DMA_PQ, dma->cap_mask) ? " pq" : "", in cap_show() 23 dma_has_cap(DMA_PQ_VAL, dma->cap_mask) ? " pq_val" : "", in cap_show() 24 dma_has_cap(DMA_XOR, dma->cap_mask) ? " xor" : "", in cap_show() 25 dma_has_cap(DMA_XOR_VAL, dma->cap_mask) ? " xor_val" : "", in cap_show() 26 dma_has_cap(DMA_INTERRUPT, dma->cap_mask) ? " intr" : ""); in cap_show()
|
D | init.c | 511 dma_cap_set(DMA_MEMCPY, dma->cap_mask); in ioat_probe() 794 if (!dma_has_cap(DMA_XOR, dma->cap_mask)) in ioat_xor_val_self_test() 902 if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask)) in ioat_xor_val_self_test() 1091 dma_cap_set(DMA_INTERRUPT, dma->cap_mask); in ioat3_dma_probe() 1107 dma_cap_set(DMA_XOR, dma->cap_mask); in ioat3_dma_probe() 1110 dma_cap_set(DMA_XOR_VAL, dma->cap_mask); in ioat3_dma_probe() 1118 dma_cap_set(DMA_PQ, dma->cap_mask); in ioat3_dma_probe() 1119 dma_cap_set(DMA_PQ_VAL, dma->cap_mask); in ioat3_dma_probe() 1129 dma_cap_set(DMA_XOR, dma->cap_mask); in ioat3_dma_probe() 1130 dma_cap_set(DMA_XOR_VAL, dma->cap_mask); in ioat3_dma_probe() [all …]
|
/drivers/dma/idxd/ |
D | dma.c | 212 dma_cap_set(DMA_INTERRUPT, dma->cap_mask); in idxd_register_dma_device() 213 dma_cap_set(DMA_PRIVATE, dma->cap_mask); in idxd_register_dma_device() 214 dma_cap_set(DMA_COMPLETION_NO_ORDER, dma->cap_mask); in idxd_register_dma_device() 219 dma_cap_set(DMA_MEMCPY, dma->cap_mask); in idxd_register_dma_device()
|
/drivers/infiniband/hw/hfi1/ |
D | driver.c | 50 module_param_cb(cap_mask, &cap_ops, &hfi1_cap_mask, S_IWUSR | S_IRUGO); 51 MODULE_PARM_DESC(cap_mask, "Bit mask of enabled/disabled HW features"); 73 cap_mask = *cap_mask_ptr, value, diff, in hfi1_caps_set() local 83 diff = value ^ (cap_mask & ~HFI1_CAP_LOCKED_SMASK); in hfi1_caps_set() 95 cap_mask &= ~diff; in hfi1_caps_set() 97 cap_mask |= (value & diff); in hfi1_caps_set() 99 diff = (cap_mask & (HFI1_CAP_MUST_HAVE_KERN << HFI1_CAP_USER_SHIFT)) ^ in hfi1_caps_set() 100 ((cap_mask & HFI1_CAP_MUST_HAVE_KERN) << HFI1_CAP_USER_SHIFT); in hfi1_caps_set() 101 cap_mask &= ~diff; in hfi1_caps_set() 103 *cap_mask_ptr = cap_mask; in hfi1_caps_set() [all …]
|
/drivers/net/ethernet/qlogic/qlcnic/ |
D | qlcnic_minidump.c | 295 hdr->drv_cap_mask = hdr->cap_mask; in qlcnic_82xx_cache_tmpl_hdr_values() 296 fw_dump->cap_mask = hdr->cap_mask; in qlcnic_82xx_cache_tmpl_hdr_values() 349 hdr->drv_cap_mask = hdr->cap_mask; in qlcnic_83xx_cache_tmpl_hdr_values() 350 fw_dump->cap_mask = hdr->cap_mask; in qlcnic_83xx_cache_tmpl_hdr_values() 1278 fw_dump->cap_mask); in qlcnic_fw_cmd_get_minidump_temp() 1322 if (i & fw_dump->cap_mask) in qlcnic_dump_fw() 1353 if (!(entry->hdr.mask & fw_dump->cap_mask)) { in qlcnic_dump_fw() 1450 fw_dump->cap_mask = 0x1f; in qlcnic_83xx_get_minidump_template()
|
/drivers/dma/ptdma/ |
D | ptdma-dmaengine.c | 360 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in pt_dmaengine_register() 361 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in pt_dmaengine_register() 367 dma_cap_set(DMA_PRIVATE, dma_dev->cap_mask); in pt_dmaengine_register()
|
/drivers/dma/ppc4xx/ |
D | adma.c | 1400 if (dma_has_cap(cap, ref->chan->device->cap_mask)) { in ppc440spe_async_tx_find_best_channel() 3772 dma_cap_set(DMA_MEMCPY, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3773 dma_cap_set(DMA_INTERRUPT, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3774 dma_cap_set(DMA_PQ, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3775 dma_cap_set(DMA_PQ_VAL, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3776 dma_cap_set(DMA_XOR_VAL, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3779 dma_cap_set(DMA_XOR, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3780 dma_cap_set(DMA_PQ, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3781 dma_cap_set(DMA_INTERRUPT, adev->common.cap_mask); in ppc440spe_adma_init_capabilities() 3782 adev->common.cap_mask = adev->common.cap_mask; in ppc440spe_adma_init_capabilities() [all …]
|
/drivers/dma/dw-edma/ |
D | dw-edma-core.c | 785 dma_cap_zero(dma->cap_mask); in dw_edma_channel_setup() 786 dma_cap_set(DMA_SLAVE, dma->cap_mask); in dw_edma_channel_setup() 787 dma_cap_set(DMA_CYCLIC, dma->cap_mask); in dw_edma_channel_setup() 788 dma_cap_set(DMA_PRIVATE, dma->cap_mask); in dw_edma_channel_setup() 789 dma_cap_set(DMA_INTERLEAVE, dma->cap_mask); in dw_edma_channel_setup()
|