1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (c) 2020-2022, Linaro Limited 4 */ 5 6 #include "dpu_kms.h" 7 #include "dpu_hw_catalog.h" 8 #include "dpu_hwio.h" 9 #include "dpu_hw_mdss.h" 10 #include "dpu_hw_dsc.h" 11 12 #define DSC_COMMON_MODE 0x000 13 #define DSC_ENC 0x004 14 #define DSC_PICTURE 0x008 15 #define DSC_SLICE 0x00C 16 #define DSC_CHUNK_SIZE 0x010 17 #define DSC_DELAY 0x014 18 #define DSC_SCALE_INITIAL 0x018 19 #define DSC_SCALE_DEC_INTERVAL 0x01C 20 #define DSC_SCALE_INC_INTERVAL 0x020 21 #define DSC_FIRST_LINE_BPG_OFFSET 0x024 22 #define DSC_BPG_OFFSET 0x028 23 #define DSC_DSC_OFFSET 0x02C 24 #define DSC_FLATNESS 0x030 25 #define DSC_RC_MODEL_SIZE 0x034 26 #define DSC_RC 0x038 27 #define DSC_RC_BUF_THRESH 0x03C 28 #define DSC_RANGE_MIN_QP 0x074 29 #define DSC_RANGE_MAX_QP 0x0B0 30 #define DSC_RANGE_BPG_OFFSET 0x0EC 31 32 static void dpu_hw_dsc_disable(struct dpu_hw_dsc *dsc) 33 { 34 struct dpu_hw_blk_reg_map *c = &dsc->hw; 35 36 DPU_REG_WRITE(c, DSC_COMMON_MODE, 0); 37 } 38 39 static void dpu_hw_dsc_config(struct dpu_hw_dsc *hw_dsc, 40 struct drm_dsc_config *dsc, 41 u32 mode, 42 u32 initial_lines) 43 { 44 struct dpu_hw_blk_reg_map *c = &hw_dsc->hw; 45 u32 data; 46 u32 slice_last_group_size; 47 u32 det_thresh_flatness; 48 bool is_cmd_mode = !(mode & DSC_MODE_VIDEO); 49 50 DPU_REG_WRITE(c, DSC_COMMON_MODE, mode); 51 52 if (is_cmd_mode) 53 initial_lines += 1; 54 55 slice_last_group_size = 3 - (dsc->slice_width % 3); 56 data = (initial_lines << 20); 57 data |= ((slice_last_group_size - 1) << 18); 58 /* bpp is 6.4 format, 4 LSBs bits are for fractional part */ 59 data |= (dsc->bits_per_pixel << 8); 60 data |= (dsc->block_pred_enable << 7); 61 data |= (dsc->line_buf_depth << 3); 62 data |= (dsc->simple_422 << 2); 63 data |= (dsc->convert_rgb << 1); 64 data |= dsc->bits_per_component; 65 66 DPU_REG_WRITE(c, DSC_ENC, data); 67 68 data = dsc->pic_width << 16; 69 data |= dsc->pic_height; 70 DPU_REG_WRITE(c, DSC_PICTURE, data); 71 72 data = dsc->slice_width << 16; 73 data |= dsc->slice_height; 74 DPU_REG_WRITE(c, DSC_SLICE, data); 75 76 data = dsc->slice_chunk_size << 16; 77 DPU_REG_WRITE(c, DSC_CHUNK_SIZE, data); 78 79 data = dsc->initial_dec_delay << 16; 80 data |= dsc->initial_xmit_delay; 81 DPU_REG_WRITE(c, DSC_DELAY, data); 82 83 data = dsc->initial_scale_value; 84 DPU_REG_WRITE(c, DSC_SCALE_INITIAL, data); 85 86 data = dsc->scale_decrement_interval; 87 DPU_REG_WRITE(c, DSC_SCALE_DEC_INTERVAL, data); 88 89 data = dsc->scale_increment_interval; 90 DPU_REG_WRITE(c, DSC_SCALE_INC_INTERVAL, data); 91 92 data = dsc->first_line_bpg_offset; 93 DPU_REG_WRITE(c, DSC_FIRST_LINE_BPG_OFFSET, data); 94 95 data = dsc->nfl_bpg_offset << 16; 96 data |= dsc->slice_bpg_offset; 97 DPU_REG_WRITE(c, DSC_BPG_OFFSET, data); 98 99 data = dsc->initial_offset << 16; 100 data |= dsc->final_offset; 101 DPU_REG_WRITE(c, DSC_DSC_OFFSET, data); 102 103 det_thresh_flatness = 7 + 2 * (dsc->bits_per_component - 8); 104 data = det_thresh_flatness << 10; 105 data |= dsc->flatness_max_qp << 5; 106 data |= dsc->flatness_min_qp; 107 DPU_REG_WRITE(c, DSC_FLATNESS, data); 108 109 data = dsc->rc_model_size; 110 DPU_REG_WRITE(c, DSC_RC_MODEL_SIZE, data); 111 112 data = dsc->rc_tgt_offset_low << 18; 113 data |= dsc->rc_tgt_offset_high << 14; 114 data |= dsc->rc_quant_incr_limit1 << 9; 115 data |= dsc->rc_quant_incr_limit0 << 4; 116 data |= dsc->rc_edge_factor; 117 DPU_REG_WRITE(c, DSC_RC, data); 118 } 119 120 static void dpu_hw_dsc_config_thresh(struct dpu_hw_dsc *hw_dsc, 121 struct drm_dsc_config *dsc) 122 { 123 struct drm_dsc_rc_range_parameters *rc = dsc->rc_range_params; 124 struct dpu_hw_blk_reg_map *c = &hw_dsc->hw; 125 u32 off; 126 int i; 127 128 off = DSC_RC_BUF_THRESH; 129 for (i = 0; i < DSC_NUM_BUF_RANGES - 1 ; i++) { 130 DPU_REG_WRITE(c, off, dsc->rc_buf_thresh[i]); 131 off += 4; 132 } 133 134 off = DSC_RANGE_MIN_QP; 135 for (i = 0; i < DSC_NUM_BUF_RANGES; i++) { 136 DPU_REG_WRITE(c, off, rc[i].range_min_qp); 137 off += 4; 138 } 139 140 off = DSC_RANGE_MAX_QP; 141 for (i = 0; i < 15; i++) { 142 DPU_REG_WRITE(c, off, rc[i].range_max_qp); 143 off += 4; 144 } 145 146 off = DSC_RANGE_BPG_OFFSET; 147 for (i = 0; i < 15; i++) { 148 DPU_REG_WRITE(c, off, rc[i].range_bpg_offset); 149 off += 4; 150 } 151 } 152 153 static struct dpu_dsc_cfg *_dsc_offset(enum dpu_dsc dsc, 154 const struct dpu_mdss_cfg *m, 155 void __iomem *addr, 156 struct dpu_hw_blk_reg_map *b) 157 { 158 int i; 159 160 for (i = 0; i < m->dsc_count; i++) { 161 if (dsc == m->dsc[i].id) { 162 b->blk_addr = addr + m->dsc[i].base; 163 b->log_mask = DPU_DBG_MASK_DSC; 164 return &m->dsc[i]; 165 } 166 } 167 168 return NULL; 169 } 170 171 static void _setup_dsc_ops(struct dpu_hw_dsc_ops *ops, 172 unsigned long cap) 173 { 174 ops->dsc_disable = dpu_hw_dsc_disable; 175 ops->dsc_config = dpu_hw_dsc_config; 176 ops->dsc_config_thresh = dpu_hw_dsc_config_thresh; 177 }; 178 179 struct dpu_hw_dsc *dpu_hw_dsc_init(enum dpu_dsc idx, void __iomem *addr, 180 const struct dpu_mdss_cfg *m) 181 { 182 struct dpu_hw_dsc *c; 183 struct dpu_dsc_cfg *cfg; 184 185 c = kzalloc(sizeof(*c), GFP_KERNEL); 186 if (!c) 187 return ERR_PTR(-ENOMEM); 188 189 cfg = _dsc_offset(idx, m, addr, &c->hw); 190 if (IS_ERR_OR_NULL(cfg)) { 191 kfree(c); 192 return ERR_PTR(-EINVAL); 193 } 194 195 c->idx = idx; 196 c->caps = cfg; 197 _setup_dsc_ops(&c->ops, c->caps->features); 198 199 return c; 200 } 201 202 void dpu_hw_dsc_destroy(struct dpu_hw_dsc *dsc) 203 { 204 kfree(dsc); 205 } 206