Lines Matching +full:dma +full:- +full:coherent
1 // SPDX-License-Identifier: GPL-2.0
2 // Copyright (C) 2024-2025 Arm Limited
3 // Arm DMA-350 driver
7 #include <linux/dma-mapping.h>
14 #include "virt-dma.h"
190 bool coherent; member
194 struct dma_device dma; member
226 desc->tsz = __ffs(len | dest | src | (1 << dch->tsz)); in d350_prep_memcpy()
227 desc->xsize = lower_16_bits(len >> desc->tsz); in d350_prep_memcpy()
228 desc->xsizehi = upper_16_bits(len >> desc->tsz); in d350_prep_memcpy()
230 cmd = desc->command; in d350_prep_memcpy()
235 cmd[1] = FIELD_PREP(CH_CTRL_TRANSIZE, desc->tsz) | in d350_prep_memcpy()
243 cmd[6] = FIELD_PREP(CH_XY_SRC, desc->xsize) | FIELD_PREP(CH_XY_DES, desc->xsize); in d350_prep_memcpy()
244 cmd[7] = FIELD_PREP(CH_XY_SRC, desc->xsizehi) | FIELD_PREP(CH_XY_DES, desc->xsizehi); in d350_prep_memcpy()
245 cmd[8] = dch->coherent ? TRANSCFG_WB : TRANSCFG_NC; in d350_prep_memcpy()
246 cmd[9] = dch->coherent ? TRANSCFG_WB : TRANSCFG_NC; in d350_prep_memcpy()
250 return vchan_tx_prep(&dch->vc, &desc->vd, flags); in d350_prep_memcpy()
264 desc->tsz = __ffs(len | dest | (1 << dch->tsz)); in d350_prep_memset()
265 desc->xsize = lower_16_bits(len >> desc->tsz); in d350_prep_memset()
266 desc->xsizehi = upper_16_bits(len >> desc->tsz); in d350_prep_memset()
268 cmd = desc->command; in d350_prep_memset()
273 cmd[1] = FIELD_PREP(CH_CTRL_TRANSIZE, desc->tsz) | in d350_prep_memset()
279 cmd[4] = FIELD_PREP(CH_XY_DES, desc->xsize); in d350_prep_memset()
280 cmd[5] = FIELD_PREP(CH_XY_DES, desc->xsizehi); in d350_prep_memset()
281 cmd[6] = dch->coherent ? TRANSCFG_WB : TRANSCFG_NC; in d350_prep_memset()
286 return vchan_tx_prep(&dch->vc, &desc->vd, flags); in d350_prep_memset()
294 spin_lock_irqsave(&dch->vc.lock, flags); in d350_pause()
295 if (dch->status == DMA_IN_PROGRESS) { in d350_pause()
296 writel_relaxed(CH_CMD_PAUSE, dch->base + CH_CMD); in d350_pause()
297 dch->status = DMA_PAUSED; in d350_pause()
299 spin_unlock_irqrestore(&dch->vc.lock, flags); in d350_pause()
309 spin_lock_irqsave(&dch->vc.lock, flags); in d350_resume()
310 if (dch->status == DMA_PAUSED) { in d350_resume()
311 writel_relaxed(CH_CMD_RESUME, dch->base + CH_CMD); in d350_resume()
312 dch->status = DMA_IN_PROGRESS; in d350_resume()
314 spin_unlock_irqrestore(&dch->vc.lock, flags); in d350_resume()
324 hi_new = readl_relaxed(dch->base + CH_XSIZEHI); in d350_get_residue()
327 xsize = readl_relaxed(dch->base + CH_XSIZE); in d350_get_residue()
328 hi_new = readl_relaxed(dch->base + CH_XSIZEHI); in d350_get_residue()
329 } while (xsizehi != hi_new && --retries); in d350_get_residue()
334 return res << dch->desc->tsz; in d350_get_residue()
343 spin_lock_irqsave(&dch->vc.lock, flags); in d350_terminate_all()
344 writel_relaxed(CH_CMD_STOP, dch->base + CH_CMD); in d350_terminate_all()
345 if (dch->desc) { in d350_terminate_all()
346 if (dch->status != DMA_ERROR) in d350_terminate_all()
347 vchan_terminate_vdesc(&dch->desc->vd); in d350_terminate_all()
348 dch->desc = NULL; in d350_terminate_all()
349 dch->status = DMA_COMPLETE; in d350_terminate_all()
351 vchan_get_all_descriptors(&dch->vc, &list); in d350_terminate_all()
352 list_splice_tail(&list, &dch->vc.desc_terminated); in d350_terminate_all()
353 spin_unlock_irqrestore(&dch->vc.lock, flags); in d350_terminate_all()
362 vchan_synchronize(&dch->vc); in d350_synchronize()
367 return ((u32)desc->xsizehi << 16 | desc->xsize) << desc->tsz; in d350_desc_bytes()
381 spin_lock_irqsave(&dch->vc.lock, flags); in d350_tx_status()
382 if (cookie == dch->cookie) { in d350_tx_status()
383 status = dch->status; in d350_tx_status()
385 dch->residue = d350_get_residue(dch); in d350_tx_status()
386 residue = dch->residue; in d350_tx_status()
387 } else if ((vd = vchan_find_desc(&dch->vc, cookie))) { in d350_tx_status()
393 spin_unlock_irqrestore(&dch->vc.lock, flags); in d350_tx_status()
403 dch->desc = to_d350_desc(vchan_next_desc(&dch->vc)); in d350_start_next()
404 if (!dch->desc) in d350_start_next()
407 list_del(&dch->desc->vd.node); in d350_start_next()
408 dch->status = DMA_IN_PROGRESS; in d350_start_next()
409 dch->cookie = dch->desc->vd.tx.cookie; in d350_start_next()
410 dch->residue = d350_desc_bytes(dch->desc); in d350_start_next()
412 hdr = dch->desc->command[0]; in d350_start_next()
413 reg = &dch->desc->command[1]; in d350_start_next()
416 writel_relaxed(*reg++, dch->base + CH_INTREN); in d350_start_next()
418 writel_relaxed(*reg++, dch->base + CH_CTRL); in d350_start_next()
420 writel_relaxed(*reg++, dch->base + CH_SRCADDR); in d350_start_next()
422 writel_relaxed(*reg++, dch->base + CH_SRCADDRHI); in d350_start_next()
424 writel_relaxed(*reg++, dch->base + CH_DESADDR); in d350_start_next()
426 writel_relaxed(*reg++, dch->base + CH_DESADDRHI); in d350_start_next()
428 writel_relaxed(*reg++, dch->base + CH_XSIZE); in d350_start_next()
430 writel_relaxed(*reg++, dch->base + CH_XSIZEHI); in d350_start_next()
432 writel_relaxed(*reg++, dch->base + CH_SRCTRANSCFG); in d350_start_next()
434 writel_relaxed(*reg++, dch->base + CH_DESTRANSCFG); in d350_start_next()
436 writel_relaxed(*reg++, dch->base + CH_XADDRINC); in d350_start_next()
438 writel_relaxed(*reg++, dch->base + CH_FILLVAL); in d350_start_next()
440 writel_relaxed(*reg++, dch->base + CH_SRCTRIGINCFG); in d350_start_next()
442 writel_relaxed(*reg++, dch->base + CH_DESTRIGINCFG); in d350_start_next()
444 writel_relaxed(*reg++, dch->base + CH_AUTOCFG); in d350_start_next()
446 writel_relaxed(*reg++, dch->base + CH_LINKADDR); in d350_start_next()
448 writel_relaxed(*reg++, dch->base + CH_LINKADDRHI); in d350_start_next()
450 writel(CH_CMD_ENABLE, dch->base + CH_CMD); in d350_start_next()
458 spin_lock_irqsave(&dch->vc.lock, flags); in d350_issue_pending()
459 if (vchan_issue_pending(&dch->vc) && !dch->desc) in d350_issue_pending()
461 spin_unlock_irqrestore(&dch->vc.lock, flags); in d350_issue_pending()
467 struct device *dev = dch->vc.chan.device->dev; in d350_irq()
468 struct virt_dma_desc *vd = &dch->desc->vd; in d350_irq()
471 ch_status = readl(dch->base + CH_STATUS); in d350_irq()
476 u32 errinfo = readl_relaxed(dch->base + CH_ERRINFO); in d350_irq()
479 vd->tx_result.result = DMA_TRANS_READ_FAILED; in d350_irq()
481 vd->tx_result.result = DMA_TRANS_WRITE_FAILED; in d350_irq()
483 vd->tx_result.result = DMA_TRANS_ABORTED; in d350_irq()
485 vd->tx_result.residue = d350_get_residue(dch); in d350_irq()
489 writel_relaxed(ch_status, dch->base + CH_STATUS); in d350_irq()
491 spin_lock(&dch->vc.lock); in d350_irq()
494 dch->status = DMA_COMPLETE; in d350_irq()
495 dch->residue = 0; in d350_irq()
498 dch->status = DMA_ERROR; in d350_irq()
499 dch->residue = vd->tx_result.residue; in d350_irq()
501 spin_unlock(&dch->vc.lock); in d350_irq()
509 int ret = request_irq(dch->irq, d350_irq, IRQF_SHARED, in d350_alloc_chan_resources()
510 dev_name(&dch->vc.chan.dev->device), dch); in d350_alloc_chan_resources()
512 writel_relaxed(CH_INTREN_DONE | CH_INTREN_ERR, dch->base + CH_INTREN); in d350_alloc_chan_resources()
521 writel_relaxed(0, dch->base + CH_INTREN); in d350_free_chan_resources()
522 free_irq(dch->irq, dch); in d350_free_chan_resources()
523 vchan_free_chan_resources(&dch->vc); in d350_free_chan_resources()
528 struct device *dev = &pdev->dev; in d350_probe()
533 bool coherent, memset; in d350_probe() local
544 return dev_err_probe(dev, -ENODEV, "Not a DMA-350!"); in d350_probe()
552 coherent = device_get_dma_attr(dev) == DEV_DMA_COHERENT; in d350_probe()
556 return -ENOMEM; in d350_probe()
558 dmac->nchan = nchan; in d350_probe()
561 dmac->nreq = FIELD_GET(DMA_CFG_NUM_TRIGGER_IN, reg); in d350_probe()
563 dev_dbg(dev, "DMA-350 r%dp%d with %d channels, %d requests\n", r, p, dmac->nchan, dmac->nreq); in d350_probe()
565 dmac->dma.dev = dev; in d350_probe()
567 dmac->dma.src_addr_widths |= BIT(i); in d350_probe()
568 dmac->dma.dst_addr_widths |= BIT(i); in d350_probe()
570 dmac->dma.directions = BIT(DMA_MEM_TO_MEM); in d350_probe()
571 dmac->dma.descriptor_reuse = true; in d350_probe()
572 dmac->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in d350_probe()
573 dmac->dma.device_alloc_chan_resources = d350_alloc_chan_resources; in d350_probe()
574 dmac->dma.device_free_chan_resources = d350_free_chan_resources; in d350_probe()
575 dma_cap_set(DMA_MEMCPY, dmac->dma.cap_mask); in d350_probe()
576 dmac->dma.device_prep_dma_memcpy = d350_prep_memcpy; in d350_probe()
577 dmac->dma.device_pause = d350_pause; in d350_probe()
578 dmac->dma.device_resume = d350_resume; in d350_probe()
579 dmac->dma.device_terminate_all = d350_terminate_all; in d350_probe()
580 dmac->dma.device_synchronize = d350_synchronize; in d350_probe()
581 dmac->dma.device_tx_status = d350_tx_status; in d350_probe()
582 dmac->dma.device_issue_pending = d350_issue_pending; in d350_probe()
583 INIT_LIST_HEAD(&dmac->dma.channels); in d350_probe()
585 /* Would be nice to have per-channel caps for this... */ in d350_probe()
588 struct d350_chan *dch = &dmac->channels[i]; in d350_probe()
590 dch->base = base + DMACH(i); in d350_probe()
591 writel_relaxed(CH_CMD_CLEAR, dch->base + CH_CMD); in d350_probe()
593 reg = readl_relaxed(dch->base + CH_BUILDCFG1); in d350_probe()
598 dch->irq = platform_get_irq(pdev, i); in d350_probe()
599 if (dch->irq < 0) in d350_probe()
600 return dev_err_probe(dev, dch->irq, in d350_probe()
603 dch->has_wrap = FIELD_GET(CH_CFG_HAS_WRAP, reg); in d350_probe()
604 dch->has_trig = FIELD_GET(CH_CFG_HAS_TRIGIN, reg) & in d350_probe()
608 memset &= dch->has_wrap; in d350_probe()
610 reg = readl_relaxed(dch->base + CH_BUILDCFG0); in d350_probe()
611 dch->tsz = FIELD_GET(CH_CFG_DATA_WIDTH, reg); in d350_probe()
613 reg = FIELD_PREP(CH_LINK_SHAREATTR, coherent ? SHAREATTR_ISH : SHAREATTR_OSH); in d350_probe()
614 reg |= FIELD_PREP(CH_LINK_MEMATTR, coherent ? MEMATTR_WB : MEMATTR_NC); in d350_probe()
615 writel_relaxed(reg, dch->base + CH_LINKATTR); in d350_probe()
617 dch->vc.desc_free = d350_desc_free; in d350_probe()
618 vchan_init(&dch->vc, &dmac->dma); in d350_probe()
622 dma_cap_set(DMA_MEMSET, dmac->dma.cap_mask); in d350_probe()
623 dmac->dma.device_prep_dma_memset = d350_prep_memset; in d350_probe()
628 ret = dma_async_device_register(&dmac->dma); in d350_probe()
630 return dev_err_probe(dev, ret, "Failed to register DMA device\n"); in d350_probe()
639 dma_async_device_unregister(&dmac->dma); in d350_remove()
643 { .compatible = "arm,dma-350" },
650 .name = "arm-dma350",
659 MODULE_DESCRIPTION("Arm DMA-350 driver");