Lines Matching refs:tf

233 	struct dma_test_frame *tf = container_of(frame, typeof(*tf), frame);  in dma_test_rx_callback()  local
234 struct dma_test *dt = tf->dma_test; in dma_test_rx_callback()
237 dma_unmap_single(dma_dev, tf->frame.buffer_phy, DMA_TEST_FRAME_SIZE, in dma_test_rx_callback()
239 kfree(tf->data); in dma_test_rx_callback()
242 kfree(tf); in dma_test_rx_callback()
250 if (tf->frame.flags & RING_DESC_CRC_ERROR) in dma_test_rx_callback()
252 if (tf->frame.flags & RING_DESC_BUFFER_OVERRUN) in dma_test_rx_callback()
255 kfree(tf); in dma_test_rx_callback()
267 struct dma_test_frame *tf; in dma_test_submit_rx() local
270 tf = kzalloc(sizeof(*tf), GFP_KERNEL); in dma_test_submit_rx()
271 if (!tf) in dma_test_submit_rx()
274 tf->data = kzalloc(DMA_TEST_FRAME_SIZE, GFP_KERNEL); in dma_test_submit_rx()
275 if (!tf->data) { in dma_test_submit_rx()
276 kfree(tf); in dma_test_submit_rx()
280 dma_addr = dma_map_single(dma_dev, tf->data, DMA_TEST_FRAME_SIZE, in dma_test_submit_rx()
283 kfree(tf->data); in dma_test_submit_rx()
284 kfree(tf); in dma_test_submit_rx()
288 tf->frame.buffer_phy = dma_addr; in dma_test_submit_rx()
289 tf->frame.callback = dma_test_rx_callback; in dma_test_submit_rx()
290 tf->dma_test = dt; in dma_test_submit_rx()
291 INIT_LIST_HEAD(&tf->frame.list); in dma_test_submit_rx()
293 tb_ring_rx(dt->rx_ring, &tf->frame); in dma_test_submit_rx()
302 struct dma_test_frame *tf = container_of(frame, typeof(*tf), frame); in dma_test_tx_callback() local
303 struct dma_test *dt = tf->dma_test; in dma_test_tx_callback()
306 dma_unmap_single(dma_dev, tf->frame.buffer_phy, DMA_TEST_FRAME_SIZE, in dma_test_tx_callback()
308 kfree(tf->data); in dma_test_tx_callback()
309 kfree(tf); in dma_test_tx_callback()
318 struct dma_test_frame *tf; in dma_test_submit_tx() local
321 tf = kzalloc(sizeof(*tf), GFP_KERNEL); in dma_test_submit_tx()
322 if (!tf) in dma_test_submit_tx()
325 tf->frame.size = 0; /* means 4096 */ in dma_test_submit_tx()
326 tf->dma_test = dt; in dma_test_submit_tx()
328 tf->data = kmemdup(dma_test_pattern, DMA_TEST_FRAME_SIZE, GFP_KERNEL); in dma_test_submit_tx()
329 if (!tf->data) { in dma_test_submit_tx()
330 kfree(tf); in dma_test_submit_tx()
334 dma_addr = dma_map_single(dma_dev, tf->data, DMA_TEST_FRAME_SIZE, in dma_test_submit_tx()
337 kfree(tf->data); in dma_test_submit_tx()
338 kfree(tf); in dma_test_submit_tx()
342 tf->frame.buffer_phy = dma_addr; in dma_test_submit_tx()
343 tf->frame.callback = dma_test_tx_callback; in dma_test_submit_tx()
344 tf->frame.sof = DMA_TEST_PDF_FRAME_START; in dma_test_submit_tx()
345 tf->frame.eof = DMA_TEST_PDF_FRAME_END; in dma_test_submit_tx()
346 INIT_LIST_HEAD(&tf->frame.list); in dma_test_submit_tx()
352 tb_ring_tx(dt->tx_ring, &tf->frame); in dma_test_submit_tx()