Lines Matching +full:udma +full:- +full:c
1 /*-
3 Copyright (C) 2015 Annapurna Labs Ltd.
10 found at http://www.gnu.org/licenses/gpl-2.0.html
38 * @file al_hal_udma_config.c
55 al_reg_write32(&axi_regs->cfg_1, axi->axi_timeout); in al_udma_axi_set()
57 reg = al_reg_read32(&axi_regs->cfg_2); in al_udma_axi_set()
59 reg |= axi->arb_promotion; in al_udma_axi_set()
60 al_reg_write32(&axi_regs->cfg_2, reg); in al_udma_axi_set()
62 reg = al_reg_read32(&axi_regs->endian_cfg); in al_udma_axi_set()
63 if (axi->swap_8_bytes == AL_TRUE) in al_udma_axi_set()
68 if (axi->swap_s2m_data == AL_TRUE) in al_udma_axi_set()
73 if (axi->swap_s2m_desc == AL_TRUE) in al_udma_axi_set()
78 if (axi->swap_m2s_data == AL_TRUE) in al_udma_axi_set()
83 if (axi->swap_m2s_desc == AL_TRUE) in al_udma_axi_set()
88 al_reg_write32(&axi_regs->endian_cfg, reg); in al_udma_axi_set()
92 /* Configure UDMA AXI M2S configuration */
101 reg |= m2s_sm->id & UDMA_AXI_M2S_COMP_WR_CFG_1_AWID_MASK; in al_udma_m2s_axi_sm_set()
103 reg |= (m2s_sm->cache_type << in al_udma_m2s_axi_sm_set()
107 reg |= (m2s_sm->burst << UDMA_AXI_M2S_COMP_WR_CFG_1_AWBURST_SHIFT) & in al_udma_m2s_axi_sm_set()
113 reg |= m2s_sm->used_ext & UDMA_AXI_M2S_COMP_WR_CFG_2_AWUSER_MASK; in al_udma_m2s_axi_sm_set()
115 reg |= (m2s_sm->bus_size << in al_udma_m2s_axi_sm_set()
119 reg |= (m2s_sm->qos << UDMA_AXI_M2S_COMP_WR_CFG_2_AWQOS_SHIFT) & in al_udma_m2s_axi_sm_set()
122 reg |= (m2s_sm->prot << UDMA_AXI_M2S_COMP_WR_CFG_2_AWPROT_SHIFT) & in al_udma_m2s_axi_sm_set()
128 reg |= m2s_sm->max_beats & in al_udma_m2s_axi_sm_set()
135 /** Configure UDMA AXI M2S configuration */
136 int al_udma_m2s_axi_set(struct al_udma *udma, in al_udma_m2s_axi_set() argument
141 al_udma_m2s_axi_sm_set(&axi_m2s->comp_write, in al_udma_m2s_axi_set()
142 &udma->udma_regs->m2s.axi_m2s.comp_wr_cfg_1, in al_udma_m2s_axi_set()
143 &udma->udma_regs->m2s.axi_m2s.comp_wr_cfg_2, in al_udma_m2s_axi_set()
144 &udma->udma_regs->m2s.axi_m2s.desc_wr_cfg_1); in al_udma_m2s_axi_set()
146 al_udma_m2s_axi_sm_set(&axi_m2s->data_read, in al_udma_m2s_axi_set()
147 &udma->udma_regs->m2s.axi_m2s.data_rd_cfg_1, in al_udma_m2s_axi_set()
148 &udma->udma_regs->m2s.axi_m2s.data_rd_cfg_2, in al_udma_m2s_axi_set()
149 &udma->udma_regs->m2s.axi_m2s.data_rd_cfg); in al_udma_m2s_axi_set()
151 al_udma_m2s_axi_sm_set(&axi_m2s->desc_read, in al_udma_m2s_axi_set()
152 &udma->udma_regs->m2s.axi_m2s.desc_rd_cfg_1, in al_udma_m2s_axi_set()
153 &udma->udma_regs->m2s.axi_m2s.desc_rd_cfg_2, in al_udma_m2s_axi_set()
154 &udma->udma_regs->m2s.axi_m2s.desc_rd_cfg_3); in al_udma_m2s_axi_set()
156 reg = al_reg_read32(&udma->udma_regs->m2s.axi_m2s.data_rd_cfg); in al_udma_m2s_axi_set()
157 if (axi_m2s->break_on_max_boundary == AL_TRUE) in al_udma_m2s_axi_set()
161 al_reg_write32(&udma->udma_regs->m2s.axi_m2s.data_rd_cfg, reg); in al_udma_m2s_axi_set()
163 reg = al_reg_read32(&udma->udma_regs->m2s.axi_m2s.desc_wr_cfg_1); in al_udma_m2s_axi_set()
165 reg |= (axi_m2s->min_axi_beats << in al_udma_m2s_axi_set()
168 al_reg_write32(&udma->udma_regs->m2s.axi_m2s.desc_wr_cfg_1, reg); in al_udma_m2s_axi_set()
170 reg = al_reg_read32(&udma->udma_regs->m2s.axi_m2s.ostand_cfg); in al_udma_m2s_axi_set()
172 reg |= axi_m2s->ostand_max_data_read & in al_udma_m2s_axi_set()
175 reg |= (axi_m2s->ostand_max_desc_read << in al_udma_m2s_axi_set()
179 reg |= (axi_m2s->ostand_max_comp_req << in al_udma_m2s_axi_set()
183 reg |= (axi_m2s->ostand_max_comp_write << in al_udma_m2s_axi_set()
186 al_reg_write32(&udma->udma_regs->m2s.axi_m2s.ostand_cfg, reg); in al_udma_m2s_axi_set()
198 reg |= s2m_sm->id & UDMA_AXI_S2M_COMP_WR_CFG_1_AWID_MASK; in al_udma_s2m_axi_sm_set()
200 reg |= (s2m_sm->cache_type << in al_udma_s2m_axi_sm_set()
204 reg |= (s2m_sm->burst << UDMA_AXI_S2M_COMP_WR_CFG_1_AWBURST_SHIFT) & in al_udma_s2m_axi_sm_set()
210 reg |= s2m_sm->used_ext & UDMA_AXI_S2M_COMP_WR_CFG_2_AWUSER_MASK; in al_udma_s2m_axi_sm_set()
212 reg |= (s2m_sm->bus_size << UDMA_AXI_S2M_COMP_WR_CFG_2_AWSIZE_SHIFT) & in al_udma_s2m_axi_sm_set()
215 reg |= (s2m_sm->qos << UDMA_AXI_S2M_COMP_WR_CFG_2_AWQOS_SHIFT) & in al_udma_s2m_axi_sm_set()
218 reg |= (s2m_sm->prot << UDMA_AXI_S2M_COMP_WR_CFG_2_AWPROT_SHIFT) & in al_udma_s2m_axi_sm_set()
224 reg |= s2m_sm->max_beats & in al_udma_s2m_axi_sm_set()
231 /** Configure UDMA AXI S2M configuration */
232 int al_udma_s2m_axi_set(struct al_udma *udma, in al_udma_s2m_axi_set() argument
238 al_udma_s2m_axi_sm_set(&axi_s2m->data_write, in al_udma_s2m_axi_set()
239 &udma->udma_regs->s2m.axi_s2m.data_wr_cfg_1, in al_udma_s2m_axi_set()
240 &udma->udma_regs->s2m.axi_s2m.data_wr_cfg_2, in al_udma_s2m_axi_set()
241 &udma->udma_regs->s2m.axi_s2m.data_wr_cfg); in al_udma_s2m_axi_set()
243 al_udma_s2m_axi_sm_set(&axi_s2m->desc_read, in al_udma_s2m_axi_set()
244 &udma->udma_regs->s2m.axi_s2m.desc_rd_cfg_4, in al_udma_s2m_axi_set()
245 &udma->udma_regs->s2m.axi_s2m.desc_rd_cfg_5, in al_udma_s2m_axi_set()
246 &udma->udma_regs->s2m.axi_s2m.desc_rd_cfg_3); in al_udma_s2m_axi_set()
248 al_udma_s2m_axi_sm_set(&axi_s2m->comp_write, in al_udma_s2m_axi_set()
249 &udma->udma_regs->s2m.axi_s2m.comp_wr_cfg_1, in al_udma_s2m_axi_set()
250 &udma->udma_regs->s2m.axi_s2m.comp_wr_cfg_2, in al_udma_s2m_axi_set()
251 &udma->udma_regs->s2m.axi_s2m.desc_wr_cfg_1); in al_udma_s2m_axi_set()
253 reg = al_reg_read32(&udma->udma_regs->s2m.axi_s2m.desc_rd_cfg_3); in al_udma_s2m_axi_set()
254 if (axi_s2m->break_on_max_boundary == AL_TRUE) in al_udma_s2m_axi_set()
258 al_reg_write32(&udma->udma_regs->s2m.axi_s2m.desc_rd_cfg_3, reg); in al_udma_s2m_axi_set()
260 reg = al_reg_read32(&udma->udma_regs->s2m.axi_s2m.desc_wr_cfg_1); in al_udma_s2m_axi_set()
262 reg |= (axi_s2m->min_axi_beats << in al_udma_s2m_axi_set()
265 al_reg_write32(&udma->udma_regs->s2m.axi_s2m.desc_wr_cfg_1, reg); in al_udma_s2m_axi_set()
267 reg = al_reg_read32(&udma->udma_regs->s2m.axi_s2m.ostand_cfg_rd); in al_udma_s2m_axi_set()
269 reg |= axi_s2m->ostand_max_desc_read & in al_udma_s2m_axi_set()
273 reg |= (axi_s2m->ack_fifo_depth << in al_udma_s2m_axi_set()
277 al_reg_write32(&udma->udma_regs->s2m.axi_s2m.ostand_cfg_rd, reg); in al_udma_s2m_axi_set()
279 reg = al_reg_read32(&udma->udma_regs->s2m.axi_s2m.ostand_cfg_wr); in al_udma_s2m_axi_set()
281 reg |= axi_s2m->ostand_max_data_req & in al_udma_s2m_axi_set()
284 reg |= (axi_s2m->ostand_max_data_write << in al_udma_s2m_axi_set()
288 reg |= (axi_s2m->ostand_max_comp_req << in al_udma_s2m_axi_set()
292 reg |= (axi_s2m->ostand_max_comp_write << in al_udma_s2m_axi_set()
295 al_reg_write32(&udma->udma_regs->s2m.axi_s2m.ostand_cfg_wr, reg); in al_udma_s2m_axi_set()
300 int al_udma_m2s_packet_size_cfg_set(struct al_udma *udma, in al_udma_m2s_packet_size_cfg_set() argument
303 uint32_t reg = al_reg_read32(&udma->udma_regs->m2s.m2s.cfg_len); in al_udma_m2s_packet_size_cfg_set()
306 al_assert(udma->type == UDMA_TX); in al_udma_m2s_packet_size_cfg_set()
308 if (conf->encode_64k_as_zero == AL_TRUE) in al_udma_m2s_packet_size_cfg_set()
311 if (conf->max_pkt_size > max_supported_size) { in al_udma_m2s_packet_size_cfg_set()
312 al_err("udma [%s]: requested max_pkt_size (0x%x) exceeds the" in al_udma_m2s_packet_size_cfg_set()
313 "supported limit (0x%x)\n", udma->name, in al_udma_m2s_packet_size_cfg_set()
314 conf->max_pkt_size, max_supported_size); in al_udma_m2s_packet_size_cfg_set()
315 return -EINVAL; in al_udma_m2s_packet_size_cfg_set()
319 if (conf->encode_64k_as_zero == AL_TRUE) in al_udma_m2s_packet_size_cfg_set()
325 reg |= conf->max_pkt_size; in al_udma_m2s_packet_size_cfg_set()
327 al_reg_write32(&udma->udma_regs->m2s.m2s.cfg_len, reg); in al_udma_m2s_packet_size_cfg_set()
331 /** Report Error - to be used for abort */
332 void al_udma_err_report(struct al_udma *udma __attribute__((__unused__))) in al_udma_err_report() argument
337 /** Statistics - TBD */
338 void al_udma_stats_get(struct al_udma *udma __attribute__((__unused__))) in al_udma_stats_get() argument
343 /** Configure UDMA M2S descriptor prefetch */
344 int al_udma_m2s_pref_set(struct al_udma *udma, in al_udma_m2s_pref_set() argument
349 reg = al_reg_read32(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_1); in al_udma_m2s_pref_set()
351 reg |= conf->desc_fifo_depth; in al_udma_m2s_pref_set()
352 al_reg_write32(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_1, reg); in al_udma_m2s_pref_set()
354 reg = al_reg_read32(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_2); in al_udma_m2s_pref_set()
356 if (conf->sch_mode == SRR) in al_udma_m2s_pref_set()
358 else if (conf->sch_mode == STRICT) in al_udma_m2s_pref_set()
361 al_err("udma [%s]: requested descriptor preferch arbiter " in al_udma_m2s_pref_set()
362 "mode (%d) is invalid\n", udma->name, conf->sch_mode); in al_udma_m2s_pref_set()
363 return -EINVAL; in al_udma_m2s_pref_set()
366 reg |= conf->max_desc_per_packet & in al_udma_m2s_pref_set()
368 al_reg_write32(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_2, reg); in al_udma_m2s_pref_set()
370 reg = al_reg_read32(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_3); in al_udma_m2s_pref_set()
372 reg |= conf->min_burst_below_thr & in al_udma_m2s_pref_set()
376 reg |=(conf->min_burst_above_thr << in al_udma_m2s_pref_set()
381 reg |= (conf->pref_thr << in al_udma_m2s_pref_set()
385 al_reg_write32(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_3, reg); in al_udma_m2s_pref_set()
387 reg = al_reg_read32(&udma->udma_regs->m2s.m2s_rd.data_cfg); in al_udma_m2s_pref_set()
389 reg |= conf->data_fifo_depth & in al_udma_m2s_pref_set()
393 reg |= (conf->max_pkt_limit in al_udma_m2s_pref_set()
396 al_reg_write32(&udma->udma_regs->m2s.m2s_rd.data_cfg, reg); in al_udma_m2s_pref_set()
401 /** Ger the M2S UDMA descriptor prefetch */
402 int al_udma_m2s_pref_get(struct al_udma *udma, in al_udma_m2s_pref_get() argument
407 reg = al_reg_read32(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_1); in al_udma_m2s_pref_get()
408 conf->desc_fifo_depth = in al_udma_m2s_pref_get()
412 reg = al_reg_read32(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_2); in al_udma_m2s_pref_get()
414 conf->sch_mode = SRR; in al_udma_m2s_pref_get()
416 conf->sch_mode = STRICT; in al_udma_m2s_pref_get()
417 conf->max_desc_per_packet = in al_udma_m2s_pref_get()
422 reg = al_reg_read32(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_3); in al_udma_m2s_pref_get()
424 conf->min_burst_below_thr = in al_udma_m2s_pref_get()
429 conf->min_burst_above_thr = in al_udma_m2s_pref_get()
434 conf->pref_thr = AL_REG_FIELD_GET(reg, in al_udma_m2s_pref_get()
441 int al_udma_m2s_max_descs_set(struct al_udma *udma, uint8_t max_descs) in al_udma_m2s_max_descs_set() argument
458 al_reg_write32_masked(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_2, in al_udma_m2s_max_descs_set()
462 al_reg_write32_masked(&udma->udma_regs->m2s.m2s_rd.desc_pref_cfg_3, in al_udma_m2s_max_descs_set()
472 int al_udma_s2m_max_descs_set(struct al_udma *udma, uint8_t max_descs) in al_udma_s2m_max_descs_set() argument
488 al_reg_write32_masked(&udma->udma_regs->s2m.s2m_rd.desc_pref_cfg_3, in al_udma_s2m_max_descs_set()
497 int al_udma_s2m_full_line_write_set(struct al_udma *udma, al_bool enable) in al_udma_s2m_full_line_write_set() argument
503 al_info("udma [%s]: full line write enabled\n", udma->name); in al_udma_s2m_full_line_write_set()
506 al_reg_write32_masked(&udma->udma_regs->s2m.s2m_wr.data_cfg_2, in al_udma_s2m_full_line_write_set()
512 /** Configure S2M UDMA descriptor prefetch */
513 int al_udma_s2m_pref_set(struct al_udma *udma, in al_udma_s2m_pref_set() argument
518 reg = al_reg_read32(&udma->udma_regs->s2m.s2m_rd.desc_pref_cfg_1); in al_udma_s2m_pref_set()
520 reg |= conf->desc_fifo_depth; in al_udma_s2m_pref_set()
521 al_reg_write32(&udma->udma_regs->s2m.s2m_rd.desc_pref_cfg_1, reg); in al_udma_s2m_pref_set()
523 reg = al_reg_read32(&udma->udma_regs->s2m.s2m_rd.desc_pref_cfg_2); in al_udma_s2m_pref_set()
525 if (conf->sch_mode == SRR) in al_udma_s2m_pref_set()
527 else if (conf->sch_mode == STRICT) in al_udma_s2m_pref_set()
530 al_err("udma [%s]: requested descriptor preferch arbiter " in al_udma_s2m_pref_set()
531 "mode (%d) is invalid\n", udma->name, conf->sch_mode); in al_udma_s2m_pref_set()
532 return -EINVAL; in al_udma_s2m_pref_set()
534 if (conf->q_promotion == AL_TRUE) in al_udma_s2m_pref_set()
539 if (conf->force_promotion == AL_TRUE) in al_udma_s2m_pref_set()
544 if (conf->en_pref_prediction == AL_TRUE) in al_udma_s2m_pref_set()
550 reg |= (conf->promotion_th in al_udma_s2m_pref_set()
554 al_reg_write32(&udma->udma_regs->s2m.s2m_rd.desc_pref_cfg_2, reg); in al_udma_s2m_pref_set()
556 reg = al_reg_read32(&udma->udma_regs->s2m.s2m_rd.desc_pref_cfg_3); in al_udma_s2m_pref_set()
558 reg |= (conf->pref_thr << UDMA_S2M_RD_DESC_PREF_CFG_3_PREF_THR_SHIFT) & in al_udma_s2m_pref_set()
562 reg |= conf->min_burst_below_thr & in al_udma_s2m_pref_set()
566 reg |=(conf->min_burst_above_thr << in al_udma_s2m_pref_set()
570 al_reg_write32(&udma->udma_regs->s2m.s2m_rd.desc_pref_cfg_3, reg); in al_udma_s2m_pref_set()
572 reg = al_reg_read32(&udma->udma_regs->s2m.s2m_rd.desc_pref_cfg_4); in al_udma_s2m_pref_set()
574 reg |= conf->a_full_thr & UDMA_S2M_RD_DESC_PREF_CFG_4_A_FULL_THR_MASK; in al_udma_s2m_pref_set()
575 al_reg_write32(&udma->udma_regs->s2m.s2m_rd.desc_pref_cfg_4, reg); in al_udma_s2m_pref_set()
581 /* Configure S2M UDMA data write */
582 int al_udma_s2m_data_write_set(struct al_udma *udma, in al_udma_s2m_data_write_set() argument
587 reg = al_reg_read32(&udma->udma_regs->s2m.s2m_wr.data_cfg_1); in al_udma_s2m_data_write_set()
589 reg |= conf->data_fifo_depth & in al_udma_s2m_data_write_set()
592 reg |= (conf->max_pkt_limit << in al_udma_s2m_data_write_set()
596 reg |= (conf->fifo_margin << in al_udma_s2m_data_write_set()
599 al_reg_write32(&udma->udma_regs->s2m.s2m_wr.data_cfg_1, reg); in al_udma_s2m_data_write_set()
601 reg = al_reg_read32(&udma->udma_regs->s2m.s2m_wr.data_cfg_2); in al_udma_s2m_data_write_set()
603 reg |= conf->desc_wait_timer & in al_udma_s2m_data_write_set()
610 reg |= conf->flags & in al_udma_s2m_data_write_set()
616 al_reg_write32(&udma->udma_regs->s2m.s2m_wr.data_cfg_2, reg); in al_udma_s2m_data_write_set()
621 /* Configure S2M UDMA completion */
622 int al_udma_s2m_completion_set(struct al_udma *udma, in al_udma_s2m_completion_set() argument
625 uint32_t reg = al_reg_read32(&udma->udma_regs->s2m.s2m_comp.cfg_1c); in al_udma_s2m_completion_set()
627 reg |= conf->desc_size & UDMA_S2M_COMP_CFG_1C_DESC_SIZE_MASK; in al_udma_s2m_completion_set()
628 if (conf->cnt_words == AL_TRUE) in al_udma_s2m_completion_set()
632 if (conf->q_promotion == AL_TRUE) in al_udma_s2m_completion_set()
636 if (conf->force_rr == AL_TRUE) in al_udma_s2m_completion_set()
641 reg |= (conf->q_free_min << UDMA_S2M_COMP_CFG_1C_Q_FREE_MIN_SHIFT) & in al_udma_s2m_completion_set()
643 al_reg_write32(&udma->udma_regs->s2m.s2m_comp.cfg_1c, reg); in al_udma_s2m_completion_set()
645 reg = al_reg_read32(&udma->udma_regs->s2m.s2m_comp.cfg_2c); in al_udma_s2m_completion_set()
647 reg |= conf->comp_fifo_depth in al_udma_s2m_completion_set()
650 reg |= (conf->unack_fifo_depth in al_udma_s2m_completion_set()
653 al_reg_write32(&udma->udma_regs->s2m.s2m_comp.cfg_2c, reg); in al_udma_s2m_completion_set()
655 al_reg_write32(&udma->udma_regs->s2m.s2m_comp.cfg_application_ack, in al_udma_s2m_completion_set()
656 conf->timeout); in al_udma_s2m_completion_set()
660 /** Configure the M2S UDMA scheduling mode */
661 int al_udma_m2s_sc_set(struct al_udma *udma, in al_udma_m2s_sc_set() argument
664 uint32_t reg = al_reg_read32(&udma->udma_regs->m2s.m2s_dwrr.cfg_sched); in al_udma_m2s_sc_set()
666 if (sched->enable_dwrr == AL_TRUE) in al_udma_m2s_sc_set()
671 if (sched->pkt_mode == AL_TRUE) in al_udma_m2s_sc_set()
677 reg |= sched->weight << UDMA_M2S_DWRR_CFG_SCHED_WEIGHT_INC_SHIFT; in al_udma_m2s_sc_set()
679 reg |= sched->inc_factor << UDMA_M2S_DWRR_CFG_SCHED_INC_FACTOR_SHIFT; in al_udma_m2s_sc_set()
680 al_reg_write32(&udma->udma_regs->m2s.m2s_dwrr.cfg_sched, reg); in al_udma_m2s_sc_set()
682 reg = al_reg_read32(&udma->udma_regs->m2s.m2s_dwrr.ctrl_deficit_cnt); in al_udma_m2s_sc_set()
684 reg |= sched->deficit_init_val; in al_udma_m2s_sc_set()
685 al_reg_write32(&udma->udma_regs->m2s.m2s_dwrr.ctrl_deficit_cnt, reg); in al_udma_m2s_sc_set()
690 /** Configure the M2S UDMA rate limitation */
691 int al_udma_m2s_rlimit_set(struct al_udma *udma, in al_udma_m2s_rlimit_set() argument
695 &udma->udma_regs->m2s.m2s_rate_limiter.gen_cfg); in al_udma_m2s_rlimit_set()
697 if (mode->pkt_mode_en == AL_TRUE) in al_udma_m2s_rlimit_set()
702 reg |= mode->short_cycle_sz & in al_udma_m2s_rlimit_set()
704 al_reg_write32(&udma->udma_regs->m2s.m2s_rate_limiter.gen_cfg, reg); in al_udma_m2s_rlimit_set()
706 reg = al_reg_read32(&udma->udma_regs->m2s.m2s_rate_limiter.ctrl_token); in al_udma_m2s_rlimit_set()
708 reg |= mode->token_init_val & in al_udma_m2s_rlimit_set()
710 al_reg_write32(&udma->udma_regs->m2s.m2s_rate_limiter.ctrl_token, reg); in al_udma_m2s_rlimit_set()
715 int al_udma_m2s_rlimit_reset(struct al_udma *udma) in al_udma_m2s_rlimit_reset() argument
718 &udma->udma_regs->m2s.m2s_rate_limiter.ctrl_cycle_cnt); in al_udma_m2s_rlimit_reset()
720 al_reg_write32(&udma->udma_regs->m2s.m2s_rate_limiter.ctrl_cycle_cnt, in al_udma_m2s_rlimit_reset()
729 uint32_t reg = al_reg_read32(®s->cfg_1s); in al_udma_common_rlimit_set()
734 reg |= conf->max_burst_sz & in al_udma_common_rlimit_set()
736 al_reg_write32(®s->cfg_1s, reg); in al_udma_common_rlimit_set()
738 reg = al_reg_read32(®s->cfg_cycle); in al_udma_common_rlimit_set()
740 reg |= conf->long_cycle_sz & in al_udma_common_rlimit_set()
742 al_reg_write32(®s->cfg_cycle, reg); in al_udma_common_rlimit_set()
744 reg = al_reg_read32(®s->cfg_token_size_1); in al_udma_common_rlimit_set()
746 reg |= conf->long_cycle & in al_udma_common_rlimit_set()
748 al_reg_write32(®s->cfg_token_size_1, reg); in al_udma_common_rlimit_set()
750 reg = al_reg_read32(®s->cfg_token_size_2); in al_udma_common_rlimit_set()
752 reg |= conf->short_cycle & in al_udma_common_rlimit_set()
754 al_reg_write32(®s->cfg_token_size_2, reg); in al_udma_common_rlimit_set()
756 reg = al_reg_read32(®s->mask); in al_udma_common_rlimit_set()
757 reg &= ~0xf; /* only bits 0-3 defined */ in al_udma_common_rlimit_set()
758 reg |= conf->mask & 0xf; in al_udma_common_rlimit_set()
759 al_reg_write32(®s->mask, reg); in al_udma_common_rlimit_set()
771 reg = al_reg_read32(®s->cfg_1s); in al_udma_common_rlimit_act()
773 al_reg_write32(®s->cfg_1s, reg); in al_udma_common_rlimit_act()
776 reg = al_reg_read32(®s->cfg_1s); in al_udma_common_rlimit_act()
778 al_reg_write32(®s->cfg_1s, reg); in al_udma_common_rlimit_act()
781 reg = al_reg_read32(®s->sw_ctrl); in al_udma_common_rlimit_act()
783 al_reg_write32(®s->sw_ctrl, reg); in al_udma_common_rlimit_act()
786 return -EINVAL; in al_udma_common_rlimit_act()
792 int al_udma_m2s_strm_rlimit_set(struct al_udma *udma, in al_udma_m2s_strm_rlimit_set() argument
796 &udma->udma_regs->m2s.m2s_stream_rate_limiter.rlimit; in al_udma_m2s_strm_rlimit_set()
801 int al_udma_m2s_strm_rlimit_act(struct al_udma *udma, in al_udma_m2s_strm_rlimit_act() argument
805 &udma->udma_regs->m2s.m2s_stream_rate_limiter.rlimit; in al_udma_m2s_strm_rlimit_act()
807 if (al_udma_common_rlimit_act(rlimit_regs, act) == -EINVAL) { in al_udma_m2s_strm_rlimit_act()
808 al_err("udma [%s]: udma stream rate limit invalid action " in al_udma_m2s_strm_rlimit_act()
809 "(%d)\n", udma->name, act); in al_udma_m2s_strm_rlimit_act()
810 return -EINVAL; in al_udma_m2s_strm_rlimit_act()
815 /** Configure the M2S UDMA Q rate limitation */
819 struct udma_rlimit_common *rlimit_regs = &udma_q->q_regs->m2s_q.rlimit; in al_udma_m2s_q_rlimit_set()
827 struct udma_rlimit_common *rlimit_regs = &udma_q->q_regs->m2s_q.rlimit; in al_udma_m2s_q_rlimit_act()
829 if (al_udma_common_rlimit_act(rlimit_regs, act) == -EINVAL) { in al_udma_m2s_q_rlimit_act()
830 al_err("udma [%s %d]: udma stream rate limit invalid action " in al_udma_m2s_q_rlimit_act()
832 udma_q->udma->name, udma_q->qid, act); in al_udma_m2s_q_rlimit_act()
833 return -EINVAL; in al_udma_m2s_q_rlimit_act()
838 /** Configure the M2S UDMA Q scheduling mode */
842 uint32_t reg = al_reg_read32(&udma_q->q_regs->m2s_q.dwrr_cfg_1); in al_udma_m2s_q_sc_set()
845 reg |= conf->max_deficit_cnt_sz & in al_udma_m2s_q_sc_set()
847 if (conf->strict == AL_TRUE) in al_udma_m2s_q_sc_set()
851 al_reg_write32(&udma_q->q_regs->m2s_q.dwrr_cfg_1, reg); in al_udma_m2s_q_sc_set()
853 reg = al_reg_read32(&udma_q->q_regs->m2s_q.dwrr_cfg_2); in al_udma_m2s_q_sc_set()
855 reg |= (conf->axi_qos << UDMA_M2S_Q_DWRR_CFG_2_Q_QOS_SHIFT) & in al_udma_m2s_q_sc_set()
858 reg |= conf->q_qos & UDMA_M2S_Q_DWRR_CFG_2_Q_QOS_MASK; in al_udma_m2s_q_sc_set()
859 al_reg_write32(&udma_q->q_regs->m2s_q.dwrr_cfg_2, reg); in al_udma_m2s_q_sc_set()
861 reg = al_reg_read32(&udma_q->q_regs->m2s_q.dwrr_cfg_3); in al_udma_m2s_q_sc_set()
863 reg |= conf->weight & UDMA_M2S_Q_DWRR_CFG_3_WEIGHT_MASK; in al_udma_m2s_q_sc_set()
864 al_reg_write32(&udma_q->q_regs->m2s_q.dwrr_cfg_3, reg); in al_udma_m2s_q_sc_set()
871 uint32_t reg = al_reg_read32(&udma_q->q_regs->m2s_q.dwrr_cfg_1); in al_udma_m2s_q_sc_pause()
877 al_reg_write32(&udma_q->q_regs->m2s_q.dwrr_cfg_1, reg); in al_udma_m2s_q_sc_pause()
884 uint32_t reg = al_reg_read32(&udma_q->q_regs->m2s_q.dwrr_sw_ctrl); in al_udma_m2s_q_sc_reset()
887 al_reg_write32(&udma_q->q_regs->m2s_q.dwrr_sw_ctrl, reg); in al_udma_m2s_q_sc_reset()
892 /** M2S UDMA completion and application timeouts */
893 int al_udma_m2s_comp_timeouts_set(struct al_udma *udma, in al_udma_m2s_comp_timeouts_set() argument
896 uint32_t reg = al_reg_read32(&udma->udma_regs->m2s.m2s_comp.cfg_1c); in al_udma_m2s_comp_timeouts_set()
898 if (conf->sch_mode == SRR) in al_udma_m2s_comp_timeouts_set()
900 else if (conf->sch_mode == STRICT) in al_udma_m2s_comp_timeouts_set()
903 al_err("udma [%s]: requested completion descriptor preferch " in al_udma_m2s_comp_timeouts_set()
905 udma->name, conf->sch_mode); in al_udma_m2s_comp_timeouts_set()
906 return -EINVAL; in al_udma_m2s_comp_timeouts_set()
908 if (conf->enable_q_promotion == AL_TRUE) in al_udma_m2s_comp_timeouts_set()
914 conf->comp_fifo_depth << UDMA_M2S_COMP_CFG_1C_COMP_FIFO_DEPTH_SHIFT; in al_udma_m2s_comp_timeouts_set()
917 reg |= conf->unack_fifo_depth in al_udma_m2s_comp_timeouts_set()
919 al_reg_write32(&udma->udma_regs->m2s.m2s_comp.cfg_1c, reg); in al_udma_m2s_comp_timeouts_set()
921 al_reg_write32(&udma->udma_regs->m2s.m2s_comp.cfg_coal in al_udma_m2s_comp_timeouts_set()
922 , conf->coal_timeout); in al_udma_m2s_comp_timeouts_set()
924 reg = al_reg_read32(&udma->udma_regs->m2s.m2s_comp.cfg_application_ack); in al_udma_m2s_comp_timeouts_set()
926 reg |= conf->app_timeout << UDMA_M2S_COMP_CFG_APPLICATION_ACK_TOUT_SHIFT; in al_udma_m2s_comp_timeouts_set()
927 al_reg_write32(&udma->udma_regs->m2s.m2s_comp.cfg_application_ack, reg); in al_udma_m2s_comp_timeouts_set()
931 int al_udma_m2s_comp_timeouts_get(struct al_udma *udma, in al_udma_m2s_comp_timeouts_get() argument
934 uint32_t reg = al_reg_read32(&udma->udma_regs->m2s.m2s_comp.cfg_1c); in al_udma_m2s_comp_timeouts_get()
937 conf->sch_mode = SRR; in al_udma_m2s_comp_timeouts_get()
939 conf->sch_mode = STRICT; in al_udma_m2s_comp_timeouts_get()
942 conf->enable_q_promotion = AL_TRUE; in al_udma_m2s_comp_timeouts_get()
944 conf->enable_q_promotion = AL_FALSE; in al_udma_m2s_comp_timeouts_get()
946 conf->comp_fifo_depth = in al_udma_m2s_comp_timeouts_get()
950 conf->unack_fifo_depth = in al_udma_m2s_comp_timeouts_get()
955 conf->coal_timeout = al_reg_read32( in al_udma_m2s_comp_timeouts_get()
956 &udma->udma_regs->m2s.m2s_comp.cfg_coal); in al_udma_m2s_comp_timeouts_get()
959 &udma->udma_regs->m2s.m2s_comp.cfg_application_ack); in al_udma_m2s_comp_timeouts_get()
961 conf->app_timeout = in al_udma_m2s_comp_timeouts_get()
970 * S2M UDMA configure no descriptors behaviour
972 int al_udma_s2m_no_desc_cfg_set(struct al_udma *udma, al_bool drop_packet, al_bool gen_interrupt, u… in al_udma_s2m_no_desc_cfg_set() argument
976 reg = al_reg_read32(&udma->udma_regs->s2m.s2m_wr.data_cfg_2); in al_udma_s2m_no_desc_cfg_set()
979 …udam [%s]: setting timeout to 0 will cause the udma to wait forever instead of dropping the packet… in al_udma_s2m_no_desc_cfg_set()
980 return -EINVAL; in al_udma_s2m_no_desc_cfg_set()
995 al_reg_write32(&udma->udma_regs->s2m.s2m_wr.data_cfg_2, reg); in al_udma_s2m_no_desc_cfg_set()
1000 /* S2M UDMA configure a queue's completion update */
1003 uint32_t reg = al_reg_read32(&udma_q->q_regs->s2m_q.comp_cfg); in al_udma_s2m_q_compl_updade_config()
1010 al_reg_write32(&udma_q->q_regs->s2m_q.comp_cfg, reg); in al_udma_s2m_q_compl_updade_config()
1015 /* S2M UDMA configure a queue's completion descriptors coalescing */
1019 uint32_t reg = al_reg_read32(&udma_q->q_regs->s2m_q.comp_cfg); in al_udma_s2m_q_compl_coal_config()
1026 al_reg_write32(&udma_q->q_regs->s2m_q.comp_cfg, reg); in al_udma_s2m_q_compl_coal_config()
1028 al_reg_write32(&udma_q->q_regs->s2m_q.comp_cfg_2, coal_timeout); in al_udma_s2m_q_compl_coal_config()
1032 /* S2M UDMA configure completion descriptors write burst parameters */
1033 int al_udma_s2m_compl_desc_burst_config(struct al_udma *udma, uint16_t in al_udma_s2m_compl_desc_burst_config() argument
1039 return -EINVAL; in al_udma_s2m_compl_desc_burst_config()
1044 al_reg_write32_masked(&udma->udma_regs->s2m.axi_s2m.desc_wr_cfg_1, in al_udma_s2m_compl_desc_burst_config()
1052 /* S2M UDMA configure a queue's completion descriptors header split */
1056 uint32_t reg = al_reg_read32(&udma_q->q_regs->s2m_q.pkt_cfg); in al_udma_s2m_q_compl_hdr_split_config()
1070 al_reg_write32(&udma_q->q_regs->s2m_q.pkt_cfg, reg); in al_udma_s2m_q_compl_hdr_split_config()
1075 /* S2M UDMA per queue completion configuration */
1079 uint32_t reg = al_reg_read32(&udma_q->q_regs->s2m_q.comp_cfg); in al_udma_s2m_q_comp_set()
1080 if (conf->en_comp_ring_update == AL_TRUE) in al_udma_s2m_q_comp_set()
1085 if (conf->dis_comp_coal == AL_TRUE) in al_udma_s2m_q_comp_set()
1090 al_reg_write32(&udma_q->q_regs->s2m_q.comp_cfg, reg); in al_udma_s2m_q_comp_set()
1092 al_reg_write32(&udma_q->q_regs->s2m_q.comp_cfg_2, conf->comp_timer); in al_udma_s2m_q_comp_set()
1094 reg = al_reg_read32(&udma_q->q_regs->s2m_q.pkt_cfg); in al_udma_s2m_q_comp_set()
1097 reg |= conf->hdr_split_size & UDMA_S2M_Q_PKT_CFG_HDR_SPLIT_SIZE_MASK; in al_udma_s2m_q_comp_set()
1098 if (conf->force_hdr_split == AL_TRUE) in al_udma_s2m_q_comp_set()
1102 if (conf->en_hdr_split == AL_TRUE) in al_udma_s2m_q_comp_set()
1107 al_reg_write32(&udma_q->q_regs->s2m_q.pkt_cfg, reg); in al_udma_s2m_q_comp_set()
1109 reg = al_reg_read32(&udma_q->q_regs->s2m_q.qos_cfg); in al_udma_s2m_q_comp_set()
1111 reg |= conf->q_qos & UDMA_S2M_QOS_CFG_Q_QOS_MASK; in al_udma_s2m_q_comp_set()
1112 al_reg_write32(&udma_q->q_regs->s2m_q.qos_cfg, reg); in al_udma_s2m_q_comp_set()
1117 /* UDMA Target-ID control configuration per queue */
1129 /* Target-ID TX DESC EN */ in al_udma_gen_tgtid_conf_queue_set()
1130 al_reg_write32_masked(&unit_regs->gen.tgtid.cfg_tgtid_0, in al_udma_gen_tgtid_conf_queue_set()
1131 (conf->tx_q_conf[qid].desc_en << qid) << in al_udma_gen_tgtid_conf_queue_set()
1133 (conf->tx_q_conf[qid].desc_en << qid) << in al_udma_gen_tgtid_conf_queue_set()
1136 /* Target-ID TX QUEUE EN */ in al_udma_gen_tgtid_conf_queue_set()
1137 al_reg_write32_masked(&unit_regs->gen.tgtid.cfg_tgtid_0, in al_udma_gen_tgtid_conf_queue_set()
1138 (conf->tx_q_conf[qid].queue_en << qid) << in al_udma_gen_tgtid_conf_queue_set()
1140 (conf->tx_q_conf[qid].queue_en << qid) << in al_udma_gen_tgtid_conf_queue_set()
1143 /* Target-ID RX DESC EN */ in al_udma_gen_tgtid_conf_queue_set()
1144 al_reg_write32_masked(&unit_regs->gen.tgtid.cfg_tgtid_0, in al_udma_gen_tgtid_conf_queue_set()
1145 (conf->rx_q_conf[qid].desc_en << qid) << in al_udma_gen_tgtid_conf_queue_set()
1147 (conf->rx_q_conf[qid].desc_en << qid) << in al_udma_gen_tgtid_conf_queue_set()
1150 /* Target-ID RX QUEUE EN */ in al_udma_gen_tgtid_conf_queue_set()
1151 al_reg_write32_masked(&unit_regs->gen.tgtid.cfg_tgtid_0, in al_udma_gen_tgtid_conf_queue_set()
1152 (conf->rx_q_conf[qid].queue_en << qid) << in al_udma_gen_tgtid_conf_queue_set()
1154 (conf->rx_q_conf[qid].queue_en << qid) << in al_udma_gen_tgtid_conf_queue_set()
1160 tx_tgtid_reg = &unit_regs->gen.tgtid.cfg_tgtid_1; in al_udma_gen_tgtid_conf_queue_set()
1161 rx_tgtid_reg = &unit_regs->gen.tgtid.cfg_tgtid_3; in al_udma_gen_tgtid_conf_queue_set()
1162 tx_tgtaddr_reg = &unit_regs->gen.tgtaddr.cfg_tgtaddr_0; in al_udma_gen_tgtid_conf_queue_set()
1163 rx_tgtaddr_reg = &unit_regs->gen.tgtaddr.cfg_tgtaddr_2; in al_udma_gen_tgtid_conf_queue_set()
1167 tx_tgtid_reg = &unit_regs->gen.tgtid.cfg_tgtid_2; in al_udma_gen_tgtid_conf_queue_set()
1168 rx_tgtid_reg = &unit_regs->gen.tgtid.cfg_tgtid_4; in al_udma_gen_tgtid_conf_queue_set()
1169 tx_tgtaddr_reg = &unit_regs->gen.tgtaddr.cfg_tgtaddr_1; in al_udma_gen_tgtid_conf_queue_set()
1170 rx_tgtaddr_reg = &unit_regs->gen.tgtaddr.cfg_tgtaddr_3; in al_udma_gen_tgtid_conf_queue_set()
1179 conf->tx_q_conf[qid].tgtid << UDMA_GEN_TGTID_CFG_TGTID_SHIFT(qid)); in al_udma_gen_tgtid_conf_queue_set()
1183 conf->rx_q_conf[qid].tgtid << UDMA_GEN_TGTID_CFG_TGTID_SHIFT(qid)); in al_udma_gen_tgtid_conf_queue_set()
1188 conf->tx_q_conf[qid].tgtaddr << UDMA_GEN_TGTADDR_CFG_SHIFT(qid)); in al_udma_gen_tgtid_conf_queue_set()
1192 conf->rx_q_conf[qid].tgtaddr << UDMA_GEN_TGTADDR_CFG_SHIFT(qid)); in al_udma_gen_tgtid_conf_queue_set()
1196 /* UDMA Target-ID control configuration */
1207 /* UDMA Target-ID MSIX control configuration */
1213 &unit_regs->gen.tgtid.cfg_tgtid_0, in al_udma_gen_tgtid_msix_conf_set()
1216 (conf->access_en ? UDMA_GEN_TGTID_CFG_TGTID_0_MSIX_TGTID_ACCESS_EN : 0) | in al_udma_gen_tgtid_msix_conf_set()
1217 (conf->sel ? UDMA_GEN_TGTID_CFG_TGTID_0_MSIX_TGTID_SEL : 0)); in al_udma_gen_tgtid_msix_conf_set()