Searched refs:xor_srcs (Results 1 – 4 of 4) sorted by relevance
| /linux/drivers/dma/ |
| H A D | mv_xor.c | 876 struct page *xor_srcs[MV_XOR_NUM_SRC_TEST]; in mv_chan_xor_self_test() local 889 xor_srcs[src_idx] = alloc_page(GFP_KERNEL); in mv_chan_xor_self_test() 890 if (!xor_srcs[src_idx]) { in mv_chan_xor_self_test() 892 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test() 900 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test() 906 u8 *ptr = page_address(xor_srcs[src_idx]); in mv_chan_xor_self_test() 934 unmap->addr[i] = dma_map_page(dma_chan->device->dev, xor_srcs[i], in mv_chan_xor_self_test() 1004 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test()
|
| /linux/Documentation/crypto/ |
| H A D | async-tx-api.rst | 162 static void run_xor_copy_xor(struct page **xor_srcs, 176 tx = async_xor(xor_dest, xor_srcs, 0, NDISKS, xor_len, &submit); 184 tx = async_xor(xor_dest, xor_srcs, 0, NDISKS, xor_len, &submit);
|
| /linux/drivers/md/ |
| H A D | raid5.c | 1551 struct page **xor_srcs = to_addr_page(percpu, 0); in ops_run_compute5() local 1571 xor_srcs[count++] = sh->dev[i].page; in ops_run_compute5() 1580 tx = async_memcpy(xor_dest, xor_srcs[0], off_dest, off_srcs[0], in ops_run_compute5() 1583 tx = async_xor_offs(xor_dest, off_dest, xor_srcs, off_srcs, count, in ops_run_compute5() 1848 struct page **xor_srcs = to_addr_page(percpu, 0); in ops_run_prexor5() local 1855 struct page *xor_dest = xor_srcs[count++] = sh->dev[pd_idx].page; in ops_run_prexor5() 1870 xor_srcs[count++] = dev->orig_page; in ops_run_prexor5() 1873 xor_srcs[count++] = dev->page; in ops_run_prexor5() 1879 tx = async_xor_offs(xor_dest, off_dest, xor_srcs, off_srcs, count, in ops_run_prexor5() 2031 struct page **xor_srcs; in ops_run_reconstruct5() local [all …]
|
| H A D | raid5-ppl.c | 709 struct page *xor_srcs[] = { page1, page2 }; in ppl_xor() local 713 tx = async_xor(page1, xor_srcs, 0, 2, size, &submit); in ppl_xor()
|