Lines Matching refs:swdesc

291 	struct stm32_dma3_swdesc *swdesc;  member
361 struct stm32_dma3_swdesc *swdesc) in stm32_dma3_chan_dump_hwdesc() argument
366 for (i = 0; i < swdesc->lli_size; i++) { in stm32_dma3_chan_dump_hwdesc()
367 hwdesc = swdesc->lli[i].hwdesc; in stm32_dma3_chan_dump_hwdesc()
370 dev_dbg(chan2dev(chan), "[%d]@%pad\n", i, &swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_dump_hwdesc()
379 if (swdesc->cyclic) { in stm32_dma3_chan_dump_hwdesc()
381 dev_dbg(chan2dev(chan), "-->[0]@%pad\n", &swdesc->lli[0].hwdesc_addr); in stm32_dma3_chan_dump_hwdesc()
390 struct stm32_dma3_swdesc *swdesc; in stm32_dma3_chan_desc_alloc() local
403 swdesc = kzalloc(struct_size(swdesc, lli, count), GFP_NOWAIT); in stm32_dma3_chan_desc_alloc()
404 if (!swdesc) in stm32_dma3_chan_desc_alloc()
406 swdesc->lli_size = count; in stm32_dma3_chan_desc_alloc()
409 swdesc->lli[i].hwdesc = dma_pool_zalloc(chan->lli_pool, GFP_NOWAIT, in stm32_dma3_chan_desc_alloc()
410 &swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_alloc()
411 if (!swdesc->lli[i].hwdesc) in stm32_dma3_chan_desc_alloc()
414 swdesc->ccr = 0; in stm32_dma3_chan_desc_alloc()
417 writel_relaxed(swdesc->lli[0].hwdesc_addr & CLBAR_LBA, in stm32_dma3_chan_desc_alloc()
421 swdesc->ccr &= ~CCR_LAP; in stm32_dma3_chan_desc_alloc()
423 return swdesc; in stm32_dma3_chan_desc_alloc()
428 dma_pool_free(chan->lli_pool, swdesc->lli[i].hwdesc, swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_alloc()
429 kfree(swdesc); in stm32_dma3_chan_desc_alloc()
435 struct stm32_dma3_swdesc *swdesc) in stm32_dma3_chan_desc_free() argument
439 for (i = 0; i < swdesc->lli_size; i++) in stm32_dma3_chan_desc_free()
440 dma_pool_free(chan->lli_pool, swdesc->lli[i].hwdesc, swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_free()
442 kfree(swdesc); in stm32_dma3_chan_desc_free()
447 struct stm32_dma3_swdesc *swdesc = to_stm32_dma3_swdesc(vdesc); in stm32_dma3_chan_vdesc_free() local
450 stm32_dma3_chan_desc_free(chan, swdesc); in stm32_dma3_chan_vdesc_free()
485 struct stm32_dma3_swdesc *swdesc, in stm32_dma3_chan_prep_hwdesc() argument
493 hwdesc = swdesc->lli[curr].hwdesc; in stm32_dma3_chan_prep_hwdesc()
502 next_lli = swdesc->lli[0].hwdesc_addr; in stm32_dma3_chan_prep_hwdesc()
506 next_lli = swdesc->lli[next].hwdesc_addr; in stm32_dma3_chan_prep_hwdesc()
745 chan->swdesc = NULL; in stm32_dma3_chan_start()
750 chan->swdesc = to_stm32_dma3_swdesc(vdesc); in stm32_dma3_chan_start()
751 hwdesc = chan->swdesc->lli[0].hwdesc; in stm32_dma3_chan_start()
753 stm32_dma3_chan_dump_hwdesc(chan, chan->swdesc); in stm32_dma3_chan_start()
755 writel_relaxed(chan->swdesc->ccr, ddata->base + STM32_DMA3_CCR(id)); in stm32_dma3_chan_start()
811 static int stm32_dma3_chan_get_curr_hwdesc(struct stm32_dma3_swdesc *swdesc, u32 cllr, u32 *residue) in stm32_dma3_chan_get_curr_hwdesc() argument
817 return swdesc->lli_size - 1; in stm32_dma3_chan_get_curr_hwdesc()
820 if (swdesc->cyclic && next_lli_offset == (swdesc->lli[0].hwdesc_addr & CLLR_LA)) in stm32_dma3_chan_get_curr_hwdesc()
821 return swdesc->lli_size - 1; in stm32_dma3_chan_get_curr_hwdesc()
824 for (i = swdesc->lli_size - 1; i > 0; i--) { in stm32_dma3_chan_get_curr_hwdesc()
825 *residue += FIELD_GET(CBR1_BNDT, swdesc->lli[i].hwdesc->cbr1); in stm32_dma3_chan_get_curr_hwdesc()
826 lli_offset = swdesc->lli[i].hwdesc_addr & CLLR_LA; in stm32_dma3_chan_get_curr_hwdesc()
835 struct stm32_dma3_swdesc *swdesc, in stm32_dma3_chan_set_residue() argument
848 writel_relaxed(swdesc->ccr | CCR_SUSP, ddata->base + STM32_DMA3_CCR(chan->id)); in stm32_dma3_chan_set_residue()
854 writel_relaxed(swdesc->ccr, ddata->base + STM32_DMA3_CCR(chan->id)); in stm32_dma3_chan_set_residue()
879 writel_relaxed(swdesc->ccr, ddata->base + STM32_DMA3_CCR(chan->id)); in stm32_dma3_chan_set_residue()
887 ret = stm32_dma3_chan_get_curr_hwdesc(swdesc, cllr, &residue); in stm32_dma3_chan_set_residue()
895 hwdesc = swdesc->lli[curr_lli].hwdesc; in stm32_dma3_chan_set_residue()
969 if (!chan->swdesc) in stm32_dma3_chan_complete()
972 vchan_cookie_complete(&chan->swdesc->vdesc); in stm32_dma3_chan_complete()
973 chan->swdesc = NULL; in stm32_dma3_chan_complete()
995 if (chan->swdesc->cyclic) in stm32_dma3_chan_irq()
996 vchan_cyclic_callback(&chan->swdesc->vdesc); in stm32_dma3_chan_irq()
1099 chan->swdesc = NULL; in stm32_dma3_free_chan_resources()
1150 struct stm32_dma3_swdesc *swdesc; in stm32_dma3_prep_dma_memcpy() local
1156 swdesc = stm32_dma3_chan_desc_alloc(chan, count); in stm32_dma3_prep_dma_memcpy()
1157 if (!swdesc) in stm32_dma3_prep_dma_memcpy()
1170 ret = stm32_dma3_chan_prep_hw(chan, DMA_MEM_TO_MEM, &swdesc->ccr, &ctr1, &ctr2, in stm32_dma3_prep_dma_memcpy()
1175 stm32_dma3_chan_prep_hwdesc(chan, swdesc, i, src + offset, dst + offset, next_size, in stm32_dma3_prep_dma_memcpy()
1180 swdesc->ccr |= CCR_USEIE | CCR_ULEIE | CCR_DTEIE; in stm32_dma3_prep_dma_memcpy()
1182 swdesc->ccr |= CCR_TCIE; in stm32_dma3_prep_dma_memcpy()
1184 swdesc->cyclic = false; in stm32_dma3_prep_dma_memcpy()
1186 return vchan_tx_prep(&chan->vchan, &swdesc->vdesc, flags); in stm32_dma3_prep_dma_memcpy()
1189 stm32_dma3_chan_desc_free(chan, swdesc); in stm32_dma3_prep_dma_memcpy()
1201 struct stm32_dma3_swdesc *swdesc; in stm32_dma3_prep_slave_sg() local
1215 swdesc = stm32_dma3_chan_desc_alloc(chan, count); in stm32_dma3_prep_slave_sg()
1216 if (!swdesc) in stm32_dma3_prep_slave_sg()
1234 ret = stm32_dma3_chan_prep_hw(chan, dir, &swdesc->ccr, &ctr1, &ctr2, in stm32_dma3_prep_slave_sg()
1243 ret = stm32_dma3_chan_prep_hw(chan, dir, &swdesc->ccr, &ctr1, &ctr2, in stm32_dma3_prep_slave_sg()
1253 stm32_dma3_chan_prep_hwdesc(chan, swdesc, j, src, dst, chunk, in stm32_dma3_prep_slave_sg()
1263 swdesc->ccr |= CCR_USEIE | CCR_ULEIE | CCR_DTEIE; in stm32_dma3_prep_slave_sg()
1265 swdesc->ccr |= CCR_TCIE; in stm32_dma3_prep_slave_sg()
1267 swdesc->cyclic = false; in stm32_dma3_prep_slave_sg()
1269 return vchan_tx_prep(&chan->vchan, &swdesc->vdesc, flags); in stm32_dma3_prep_slave_sg()
1272 stm32_dma3_chan_desc_free(chan, swdesc); in stm32_dma3_prep_slave_sg()
1284 struct stm32_dma3_swdesc *swdesc; in stm32_dma3_prep_dma_cyclic() local
1300 swdesc = stm32_dma3_chan_desc_alloc(chan, count); in stm32_dma3_prep_dma_cyclic()
1301 if (!swdesc) in stm32_dma3_prep_dma_cyclic()
1308 ret = stm32_dma3_chan_prep_hw(chan, DMA_MEM_TO_DEV, &swdesc->ccr, &ctr1, &ctr2, in stm32_dma3_prep_dma_cyclic()
1314 ret = stm32_dma3_chan_prep_hw(chan, DMA_DEV_TO_MEM, &swdesc->ccr, &ctr1, &ctr2, in stm32_dma3_prep_dma_cyclic()
1333 stm32_dma3_chan_prep_hwdesc(chan, swdesc, i, src, dst, period_len, in stm32_dma3_prep_dma_cyclic()
1338 swdesc->ccr |= CCR_USEIE | CCR_ULEIE | CCR_DTEIE; in stm32_dma3_prep_dma_cyclic()
1340 swdesc->ccr |= CCR_TCIE; in stm32_dma3_prep_dma_cyclic()
1342 swdesc->cyclic = true; in stm32_dma3_prep_dma_cyclic()
1344 return vchan_tx_prep(&chan->vchan, &swdesc->vdesc, flags); in stm32_dma3_prep_dma_cyclic()
1347 stm32_dma3_chan_desc_free(chan, swdesc); in stm32_dma3_prep_dma_cyclic()
1417 if (chan->swdesc) { in stm32_dma3_terminate_all()
1418 vchan_terminate_vdesc(&chan->swdesc->vdesc); in stm32_dma3_terminate_all()
1419 chan->swdesc = NULL; in stm32_dma3_terminate_all()
1445 struct stm32_dma3_swdesc *swdesc = NULL; in stm32_dma3_tx_status() local
1461 swdesc = to_stm32_dma3_swdesc(vd); in stm32_dma3_tx_status()
1462 else if (chan->swdesc && chan->swdesc->vdesc.tx.cookie == cookie) in stm32_dma3_tx_status()
1463 swdesc = chan->swdesc; in stm32_dma3_tx_status()
1466 if (swdesc) in stm32_dma3_tx_status()
1467 stm32_dma3_chan_set_residue(chan, swdesc, txstate); in stm32_dma3_tx_status()
1481 if (vchan_issue_pending(&chan->vchan) && !chan->swdesc) { in stm32_dma3_issue_pending()