Lines Matching +full:enable +full:- +full:mask

1 // SPDX-License-Identifier: GPL-2.0-only
9 #include <linux/dma-mapping.h>
19 return param == chan->device->dev; in catpt_dma_filter()
33 dma_cap_mask_t mask; in catpt_dma_request_config_chan() local
36 dma_cap_zero(mask); in catpt_dma_request_config_chan()
37 dma_cap_set(DMA_MEMCPY, mask); in catpt_dma_request_config_chan()
39 chan = dma_request_channel(mask, catpt_dma_filter, cdev->dev); in catpt_dma_request_config_chan()
41 dev_err(cdev->dev, "request channel failed\n"); in catpt_dma_request_config_chan()
42 return ERR_PTR(-ENODEV); in catpt_dma_request_config_chan()
54 dev_err(cdev->dev, "slave config failed: %d\n", ret); in catpt_dma_request_config_chan()
73 dev_err(cdev->dev, "prep dma memcpy failed\n"); in catpt_dma_memcpy()
74 return -EIO; in catpt_dma_memcpy()
77 /* enable demand mode for dma channel */ in catpt_dma_memcpy()
79 CATPT_HMDC_HDDA(CATPT_DMA_DEVID, chan->chan_id), in catpt_dma_memcpy()
80 CATPT_HMDC_HDDA(CATPT_DMA_DEVID, chan->chan_id)); in catpt_dma_memcpy()
84 dev_err(cdev->dev, "submit tx failed: %d\n", ret); in catpt_dma_memcpy()
89 ret = (status == DMA_COMPLETE) ? 0 : -EPROTO; in catpt_dma_memcpy()
94 CATPT_HMDC_HDDA(CATPT_DMA_DEVID, chan->chan_id), 0); in catpt_dma_memcpy()
120 dmac = devm_kzalloc(cdev->dev, sizeof(*dmac), GFP_KERNEL); in catpt_dmac_probe()
122 return -ENOMEM; in catpt_dmac_probe()
124 dmac->regs = cdev->lpe_ba + cdev->spec->host_dma_offset[CATPT_DMA_DEVID]; in catpt_dmac_probe()
125 dmac->dev = cdev->dev; in catpt_dmac_probe()
126 dmac->irq = cdev->irq; in catpt_dmac_probe()
128 ret = dma_coerce_mask_and_coherent(cdev->dev, DMA_BIT_MASK(31)); in catpt_dmac_probe()
139 cdev->dmac = dmac; in catpt_dmac_probe()
151 dw_dma_remove(cdev->dmac); in catpt_dmac_remove()
155 unsigned long mask, unsigned long new) in catpt_dsp_set_srampge() argument
158 u32 off = sram->start; in catpt_dsp_set_srampge()
159 unsigned long b = __ffs(mask); in catpt_dsp_set_srampge()
161 old = catpt_readl_pci(cdev, VDRTCTL0) & mask; in catpt_dsp_set_srampge()
162 dev_dbg(cdev->dev, "SRAMPGE [0x%08lx] 0x%08lx -> 0x%08lx", in catpt_dsp_set_srampge()
163 mask, old, new); in catpt_dsp_set_srampge()
168 catpt_updatel_pci(cdev, VDRTCTL0, mask, new); in catpt_dsp_set_srampge()
173 * Dummy read as the very first access after block enable in catpt_dsp_set_srampge()
176 for_each_clear_bit_from(b, &new, fls_long(mask)) { in catpt_dsp_set_srampge()
181 dev_dbg(cdev->dev, "sanitize block %ld: off 0x%08x\n", in catpt_dsp_set_srampge()
182 b - __ffs(mask), off); in catpt_dsp_set_srampge()
183 memcpy_fromio(buf, cdev->lpe_ba + off, sizeof(buf)); in catpt_dsp_set_srampge()
190 unsigned long mask) in catpt_dsp_update_srampge() argument
196 for (res = sram->child; res; res = res->sibling) { in catpt_dsp_update_srampge()
199 h = (res->end - sram->start) / CATPT_MEMBLOCK_SIZE; in catpt_dsp_update_srampge()
200 l = (res->start - sram->start) / CATPT_MEMBLOCK_SIZE; in catpt_dsp_update_srampge()
204 /* offset value given mask's start and invert it as ON=b0 */ in catpt_dsp_update_srampge()
205 new = ~(new << __ffs(mask)) & mask; in catpt_dsp_update_srampge()
210 catpt_dsp_set_srampge(cdev, sram, mask, new); in catpt_dsp_update_srampge()
212 /* enable core clock gating */ in catpt_dsp_update_srampge()
241 void lpt_dsp_pll_shutdown(struct catpt_dev *cdev, bool enable) in lpt_dsp_pll_shutdown() argument
245 val = enable ? LPT_VDRTCTL0_APLLSE : 0; in lpt_dsp_pll_shutdown()
249 void wpt_dsp_pll_shutdown(struct catpt_dev *cdev, bool enable) in wpt_dsp_pll_shutdown() argument
253 val = enable ? WPT_VDRTCTL2_APLLSE : 0; in wpt_dsp_pll_shutdown()
259 u32 mask, reg, val; in catpt_dsp_select_lpclock() local
262 mutex_lock(&cdev->clk_mutex); in catpt_dsp_select_lpclock()
266 dev_dbg(cdev->dev, "LPCS [0x%08lx] 0x%08x -> 0x%08x", in catpt_dsp_select_lpclock()
270 mutex_unlock(&cdev->clk_mutex); in catpt_dsp_select_lpclock()
280 dev_warn(cdev->dev, "await WAITI timeout\n"); in catpt_dsp_select_lpclock()
281 /* no signal - only high clock selection allowed */ in catpt_dsp_select_lpclock()
283 mutex_unlock(&cdev->clk_mutex); in catpt_dsp_select_lpclock()
293 dev_warn(cdev->dev, "clock change still in progress\n"); in catpt_dsp_select_lpclock()
297 mask = CATPT_CS_LPCS | CATPT_CS_DCS; in catpt_dsp_select_lpclock()
298 catpt_updatel_shim(cdev, CS1, mask, val); in catpt_dsp_select_lpclock()
304 dev_warn(cdev->dev, "clock change still in progress\n"); in catpt_dsp_select_lpclock()
307 cdev->spec->pll_shutdown(cdev, lp); in catpt_dsp_select_lpclock()
309 mutex_unlock(&cdev->clk_mutex); in catpt_dsp_select_lpclock()
317 list_for_each_entry(stream, &cdev->stream_list, node) in catpt_dsp_update_lpclock()
318 if (stream->prepared) in catpt_dsp_update_lpclock()
359 u32 mask, val; in catpt_dsp_power_down() local
375 mask = CATPT_VDRTCTL2_CGEALL & (~CATPT_VDRTCTL2_DCLCGE); in catpt_dsp_power_down()
376 val = mask & (~CATPT_VDRTCTL2_DTCGE); in catpt_dsp_power_down()
377 catpt_updatel_pci(cdev, VDRTCTL2, mask, val); in catpt_dsp_power_down()
378 /* enable DTCGE separatelly */ in catpt_dsp_power_down()
383 catpt_dsp_set_srampge(cdev, &cdev->dram, cdev->spec->dram_mask, in catpt_dsp_power_down()
384 cdev->spec->dram_mask); in catpt_dsp_power_down()
385 catpt_dsp_set_srampge(cdev, &cdev->iram, cdev->spec->iram_mask, in catpt_dsp_power_down()
386 cdev->spec->iram_mask); in catpt_dsp_power_down()
387 mask = cdev->spec->d3srampgd_bit | cdev->spec->d3pgd_bit; in catpt_dsp_power_down()
388 catpt_updatel_pci(cdev, VDRTCTL0, mask, cdev->spec->d3pgd_bit); in catpt_dsp_power_down()
394 /* enable core clock gating */ in catpt_dsp_power_down()
404 u32 mask, val; in catpt_dsp_power_up() local
410 mask = CATPT_VDRTCTL2_CGEALL & (~CATPT_VDRTCTL2_DCLCGE); in catpt_dsp_power_up()
411 val = mask & (~CATPT_VDRTCTL2_DTCGE); in catpt_dsp_power_up()
412 catpt_updatel_pci(cdev, VDRTCTL2, mask, val); in catpt_dsp_power_up()
417 mask = cdev->spec->d3srampgd_bit | cdev->spec->d3pgd_bit; in catpt_dsp_power_up()
418 catpt_updatel_pci(cdev, VDRTCTL0, mask, mask); in catpt_dsp_power_up()
419 catpt_dsp_set_srampge(cdev, &cdev->dram, cdev->spec->dram_mask, 0); in catpt_dsp_power_up()
420 catpt_dsp_set_srampge(cdev, &cdev->iram, cdev->spec->iram_mask, 0); in catpt_dsp_power_up()
432 /* enable core clock gating */ in catpt_dsp_power_up()
468 dump_size = resource_size(&cdev->dram); in catpt_coredump()
469 dump_size += resource_size(&cdev->iram); in catpt_coredump()
476 return -ENOMEM; in catpt_coredump()
481 hdr->magic = CATPT_DUMP_MAGIC; in catpt_coredump()
482 hdr->core_id = cdev->spec->core_id; in catpt_coredump()
483 hdr->section_id = CATPT_DUMP_SECTION_ID_FILE; in catpt_coredump()
484 hdr->size = dump_size - sizeof(*hdr); in catpt_coredump()
487 info = cdev->ipc.config.fw_info; in catpt_coredump()
492 info = strnchr(info, eof - info, ' '); in catpt_coredump()
498 memcpy(pos, info, min_t(u32, eof - info, CATPT_DUMP_HASH_SIZE)); in catpt_coredump()
502 hdr->magic = CATPT_DUMP_MAGIC; in catpt_coredump()
503 hdr->core_id = cdev->spec->core_id; in catpt_coredump()
504 hdr->section_id = CATPT_DUMP_SECTION_ID_IRAM; in catpt_coredump()
505 hdr->size = resource_size(&cdev->iram); in catpt_coredump()
508 memcpy_fromio(pos, cdev->lpe_ba + cdev->iram.start, hdr->size); in catpt_coredump()
509 pos += hdr->size; in catpt_coredump()
512 hdr->magic = CATPT_DUMP_MAGIC; in catpt_coredump()
513 hdr->core_id = cdev->spec->core_id; in catpt_coredump()
514 hdr->section_id = CATPT_DUMP_SECTION_ID_DRAM; in catpt_coredump()
515 hdr->size = resource_size(&cdev->dram); in catpt_coredump()
518 memcpy_fromio(pos, cdev->lpe_ba + cdev->dram.start, hdr->size); in catpt_coredump()
519 pos += hdr->size; in catpt_coredump()
522 hdr->magic = CATPT_DUMP_MAGIC; in catpt_coredump()
523 hdr->core_id = cdev->spec->core_id; in catpt_coredump()
524 hdr->section_id = CATPT_DUMP_SECTION_ID_REGS; in catpt_coredump()
525 hdr->size = regs_size; in catpt_coredump()
542 dev_coredumpv(cdev->dev, dump, dump_size, GFP_KERNEL); in catpt_coredump()