Searched refs:xor_srcs (Results 1 – 5 of 5) sorted by relevance
/drivers/dma/ioat/ |
D | init.c | 771 struct page *xor_srcs[IOAT_NUM_SRC_TEST]; in ioat_xor_val_self_test() local 794 xor_srcs[src_idx] = alloc_page(GFP_KERNEL); in ioat_xor_val_self_test() 795 if (!xor_srcs[src_idx]) { in ioat_xor_val_self_test() 797 __free_page(xor_srcs[src_idx]); in ioat_xor_val_self_test() 805 __free_page(xor_srcs[src_idx]); in ioat_xor_val_self_test() 811 u8 *ptr = page_address(xor_srcs[src_idx]); in ioat_xor_val_self_test() 842 dma_srcs[i] = dma_map_page(dev, xor_srcs[i], 0, PAGE_SIZE, in ioat_xor_val_self_test() 905 xor_val_srcs[i] = xor_srcs[i]; in ioat_xor_val_self_test() 1027 __free_page(xor_srcs[src_idx]); in ioat_xor_val_self_test()
|
/drivers/dma/ |
D | iop-adma.c | 921 struct page *xor_srcs[IOP_ADMA_NUM_SRC_TEST]; in iop_adma_xor_val_self_test() local 937 xor_srcs[src_idx] = alloc_page(GFP_KERNEL); in iop_adma_xor_val_self_test() 938 if (!xor_srcs[src_idx]) { in iop_adma_xor_val_self_test() 940 __free_page(xor_srcs[src_idx]); in iop_adma_xor_val_self_test() 948 __free_page(xor_srcs[src_idx]); in iop_adma_xor_val_self_test() 954 u8 *ptr = page_address(xor_srcs[src_idx]); in iop_adma_xor_val_self_test() 979 dma_srcs[i] = dma_map_page(dma_chan->device->dev, xor_srcs[i], in iop_adma_xor_val_self_test() 1018 zero_sum_srcs[i] = xor_srcs[i]; in iop_adma_xor_val_self_test() 1084 __free_page(xor_srcs[src_idx]); in iop_adma_xor_val_self_test()
|
D | mv_xor.c | 884 struct page *xor_srcs[MV_XOR_NUM_SRC_TEST]; in mv_chan_xor_self_test() local 897 xor_srcs[src_idx] = alloc_page(GFP_KERNEL); in mv_chan_xor_self_test() 898 if (!xor_srcs[src_idx]) { in mv_chan_xor_self_test() 900 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test() 908 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test() 914 u8 *ptr = page_address(xor_srcs[src_idx]); in mv_chan_xor_self_test() 942 unmap->addr[i] = dma_map_page(dma_chan->device->dev, xor_srcs[i], in mv_chan_xor_self_test() 1012 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test()
|
/drivers/md/ |
D | raid5.c | 1417 struct page **xor_srcs = to_addr_page(percpu, 0); in ops_run_compute5() local 1434 xor_srcs[count++] = sh->dev[i].page; in ops_run_compute5() 1441 tx = async_memcpy(xor_dest, xor_srcs[0], 0, 0, STRIPE_SIZE, &submit); in ops_run_compute5() 1443 tx = async_xor(xor_dest, xor_srcs, 0, count, STRIPE_SIZE, &submit); in ops_run_compute5() 1681 struct page **xor_srcs = to_addr_page(percpu, 0); in ops_run_prexor5() local 1686 struct page *xor_dest = xor_srcs[count++] = sh->dev[pd_idx].page; in ops_run_prexor5() 1696 xor_srcs[count++] = dev->orig_page; in ops_run_prexor5() 1698 xor_srcs[count++] = dev->page; in ops_run_prexor5() 1703 tx = async_xor(xor_dest, xor_srcs, 0, count, STRIPE_SIZE, &submit); in ops_run_prexor5() 1851 struct page **xor_srcs; in ops_run_reconstruct5() local [all …]
|
D | raid5-ppl.c | 585 struct page *xor_srcs[] = { page1, page2 }; in ppl_xor() local 589 tx = async_xor(page1, xor_srcs, 0, 2, size, &submit); in ppl_xor()
|