1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (c) 2015 MediaTek Inc. 4 */ 5 6 #include <drm/drm_blend.h> 7 #include <drm/drm_fourcc.h> 8 #include <drm/drm_framebuffer.h> 9 10 #include <linux/clk.h> 11 #include <linux/component.h> 12 #include <linux/module.h> 13 #include <linux/of.h> 14 #include <linux/platform_device.h> 15 #include <linux/pm_runtime.h> 16 #include <linux/soc/mediatek/mtk-cmdq.h> 17 18 #include "mtk_crtc.h" 19 #include "mtk_ddp_comp.h" 20 #include "mtk_disp_drv.h" 21 #include "mtk_drm_drv.h" 22 23 #define DISP_REG_OVL_INTEN 0x0004 24 #define OVL_FME_CPL_INT BIT(1) 25 #define DISP_REG_OVL_INTSTA 0x0008 26 #define DISP_REG_OVL_EN 0x000c 27 #define DISP_REG_OVL_RST 0x0014 28 #define DISP_REG_OVL_ROI_SIZE 0x0020 29 #define DISP_REG_OVL_DATAPATH_CON 0x0024 30 #define OVL_LAYER_SMI_ID_EN BIT(0) 31 #define OVL_BGCLR_SEL_IN BIT(2) 32 #define OVL_LAYER_AFBC_EN(n) BIT(4+n) 33 #define DISP_REG_OVL_ROI_BGCLR 0x0028 34 #define DISP_REG_OVL_SRC_CON 0x002c 35 #define DISP_REG_OVL_CON(n) (0x0030 + 0x20 * (n)) 36 #define DISP_REG_OVL_SRC_SIZE(n) (0x0038 + 0x20 * (n)) 37 #define DISP_REG_OVL_OFFSET(n) (0x003c + 0x20 * (n)) 38 #define DISP_REG_OVL_PITCH_MSB(n) (0x0040 + 0x20 * (n)) 39 #define OVL_PITCH_MSB_2ND_SUBBUF BIT(16) 40 #define DISP_REG_OVL_PITCH(n) (0x0044 + 0x20 * (n)) 41 #define OVL_CONST_BLEND BIT(28) 42 #define DISP_REG_OVL_RDMA_CTRL(n) (0x00c0 + 0x20 * (n)) 43 #define DISP_REG_OVL_RDMA_GMC(n) (0x00c8 + 0x20 * (n)) 44 #define DISP_REG_OVL_ADDR_MT2701 0x0040 45 #define DISP_REG_OVL_CLRFMT_EXT 0x02d0 46 #define OVL_CON_CLRFMT_BIT_DEPTH_MASK(n) (GENMASK(1, 0) << (4 * (n))) 47 #define OVL_CON_CLRFMT_BIT_DEPTH(depth, n) ((depth) << (4 * (n))) 48 #define OVL_CON_CLRFMT_8_BIT (0) 49 #define OVL_CON_CLRFMT_10_BIT (1) 50 #define DISP_REG_OVL_ADDR_MT8173 0x0f40 51 #define DISP_REG_OVL_ADDR(ovl, n) ((ovl)->data->addr + 0x20 * (n)) 52 #define DISP_REG_OVL_HDR_ADDR(ovl, n) ((ovl)->data->addr + 0x20 * (n) + 0x04) 53 #define DISP_REG_OVL_HDR_PITCH(ovl, n) ((ovl)->data->addr + 0x20 * (n) + 0x08) 54 55 #define GMC_THRESHOLD_BITS 16 56 #define GMC_THRESHOLD_HIGH ((1 << GMC_THRESHOLD_BITS) / 4) 57 #define GMC_THRESHOLD_LOW ((1 << GMC_THRESHOLD_BITS) / 8) 58 59 #define OVL_CON_CLRFMT_MAN BIT(23) 60 #define OVL_CON_BYTE_SWAP BIT(24) 61 62 /* OVL_CON_RGB_SWAP works only if OVL_CON_CLRFMT_MAN is enabled */ 63 #define OVL_CON_RGB_SWAP BIT(25) 64 65 #define OVL_CON_CLRFMT_RGB (1 << 12) 66 #define OVL_CON_CLRFMT_ARGB8888 (2 << 12) 67 #define OVL_CON_CLRFMT_RGBA8888 (3 << 12) 68 #define OVL_CON_CLRFMT_ABGR8888 (OVL_CON_CLRFMT_ARGB8888 | OVL_CON_BYTE_SWAP) 69 #define OVL_CON_CLRFMT_BGRA8888 (OVL_CON_CLRFMT_RGBA8888 | OVL_CON_BYTE_SWAP) 70 #define OVL_CON_CLRFMT_UYVY (4 << 12) 71 #define OVL_CON_CLRFMT_YUYV (5 << 12) 72 #define OVL_CON_MTX_YUV_TO_RGB (6 << 16) 73 #define OVL_CON_CLRFMT_PARGB8888 ((3 << 12) | OVL_CON_CLRFMT_MAN) 74 #define OVL_CON_CLRFMT_PABGR8888 (OVL_CON_CLRFMT_PARGB8888 | OVL_CON_RGB_SWAP) 75 #define OVL_CON_CLRFMT_PBGRA8888 (OVL_CON_CLRFMT_PARGB8888 | OVL_CON_BYTE_SWAP) 76 #define OVL_CON_CLRFMT_PRGBA8888 (OVL_CON_CLRFMT_PABGR8888 | OVL_CON_BYTE_SWAP) 77 #define OVL_CON_CLRFMT_RGB565(ovl) ((ovl)->data->fmt_rgb565_is_0 ? \ 78 0 : OVL_CON_CLRFMT_RGB) 79 #define OVL_CON_CLRFMT_RGB888(ovl) ((ovl)->data->fmt_rgb565_is_0 ? \ 80 OVL_CON_CLRFMT_RGB : 0) 81 #define OVL_CON_AEN BIT(8) 82 #define OVL_CON_ALPHA 0xff 83 #define OVL_CON_VIRT_FLIP BIT(9) 84 #define OVL_CON_HORZ_FLIP BIT(10) 85 86 #define OVL_COLOR_ALPHA GENMASK(31, 24) 87 88 static inline bool is_10bit_rgb(u32 fmt) 89 { 90 switch (fmt) { 91 case DRM_FORMAT_XRGB2101010: 92 case DRM_FORMAT_ARGB2101010: 93 case DRM_FORMAT_RGBX1010102: 94 case DRM_FORMAT_RGBA1010102: 95 case DRM_FORMAT_XBGR2101010: 96 case DRM_FORMAT_ABGR2101010: 97 case DRM_FORMAT_BGRX1010102: 98 case DRM_FORMAT_BGRA1010102: 99 return true; 100 } 101 return false; 102 } 103 104 static const u32 mt8173_formats[] = { 105 DRM_FORMAT_XRGB8888, 106 DRM_FORMAT_ARGB8888, 107 DRM_FORMAT_BGRX8888, 108 DRM_FORMAT_BGRA8888, 109 DRM_FORMAT_ABGR8888, 110 DRM_FORMAT_XBGR8888, 111 DRM_FORMAT_RGB888, 112 DRM_FORMAT_BGR888, 113 DRM_FORMAT_RGB565, 114 DRM_FORMAT_UYVY, 115 DRM_FORMAT_YUYV, 116 }; 117 118 static const u32 mt8195_formats[] = { 119 DRM_FORMAT_XRGB8888, 120 DRM_FORMAT_ARGB8888, 121 DRM_FORMAT_XRGB2101010, 122 DRM_FORMAT_ARGB2101010, 123 DRM_FORMAT_BGRX8888, 124 DRM_FORMAT_BGRA8888, 125 DRM_FORMAT_BGRX1010102, 126 DRM_FORMAT_BGRA1010102, 127 DRM_FORMAT_ABGR8888, 128 DRM_FORMAT_XBGR8888, 129 DRM_FORMAT_XBGR2101010, 130 DRM_FORMAT_ABGR2101010, 131 DRM_FORMAT_RGBX8888, 132 DRM_FORMAT_RGBA8888, 133 DRM_FORMAT_RGBX1010102, 134 DRM_FORMAT_RGBA1010102, 135 DRM_FORMAT_RGB888, 136 DRM_FORMAT_BGR888, 137 DRM_FORMAT_RGB565, 138 DRM_FORMAT_UYVY, 139 DRM_FORMAT_YUYV, 140 }; 141 142 struct mtk_disp_ovl_data { 143 unsigned int addr; 144 unsigned int gmc_bits; 145 unsigned int layer_nr; 146 bool fmt_rgb565_is_0; 147 bool smi_id_en; 148 bool supports_afbc; 149 const u32 blend_modes; 150 const u32 *formats; 151 size_t num_formats; 152 bool supports_clrfmt_ext; 153 }; 154 155 /* 156 * struct mtk_disp_ovl - DISP_OVL driver structure 157 * @crtc: associated crtc to report vblank events to 158 * @data: platform data 159 */ 160 struct mtk_disp_ovl { 161 struct drm_crtc *crtc; 162 struct clk *clk; 163 void __iomem *regs; 164 struct cmdq_client_reg cmdq_reg; 165 const struct mtk_disp_ovl_data *data; 166 void (*vblank_cb)(void *data); 167 void *vblank_cb_data; 168 }; 169 170 static irqreturn_t mtk_disp_ovl_irq_handler(int irq, void *dev_id) 171 { 172 struct mtk_disp_ovl *priv = dev_id; 173 174 /* Clear frame completion interrupt */ 175 writel(0x0, priv->regs + DISP_REG_OVL_INTSTA); 176 177 if (!priv->vblank_cb) 178 return IRQ_NONE; 179 180 priv->vblank_cb(priv->vblank_cb_data); 181 182 return IRQ_HANDLED; 183 } 184 185 void mtk_ovl_register_vblank_cb(struct device *dev, 186 void (*vblank_cb)(void *), 187 void *vblank_cb_data) 188 { 189 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 190 191 ovl->vblank_cb = vblank_cb; 192 ovl->vblank_cb_data = vblank_cb_data; 193 } 194 195 void mtk_ovl_unregister_vblank_cb(struct device *dev) 196 { 197 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 198 199 ovl->vblank_cb = NULL; 200 ovl->vblank_cb_data = NULL; 201 } 202 203 void mtk_ovl_enable_vblank(struct device *dev) 204 { 205 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 206 207 writel(0x0, ovl->regs + DISP_REG_OVL_INTSTA); 208 writel_relaxed(OVL_FME_CPL_INT, ovl->regs + DISP_REG_OVL_INTEN); 209 } 210 211 void mtk_ovl_disable_vblank(struct device *dev) 212 { 213 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 214 215 writel_relaxed(0x0, ovl->regs + DISP_REG_OVL_INTEN); 216 } 217 218 u32 mtk_ovl_get_blend_modes(struct device *dev) 219 { 220 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 221 222 return ovl->data->blend_modes; 223 } 224 225 const u32 *mtk_ovl_get_formats(struct device *dev) 226 { 227 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 228 229 return ovl->data->formats; 230 } 231 232 size_t mtk_ovl_get_num_formats(struct device *dev) 233 { 234 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 235 236 return ovl->data->num_formats; 237 } 238 239 int mtk_ovl_clk_enable(struct device *dev) 240 { 241 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 242 243 return clk_prepare_enable(ovl->clk); 244 } 245 246 void mtk_ovl_clk_disable(struct device *dev) 247 { 248 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 249 250 clk_disable_unprepare(ovl->clk); 251 } 252 253 void mtk_ovl_start(struct device *dev) 254 { 255 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 256 257 if (ovl->data->smi_id_en) { 258 unsigned int reg; 259 260 reg = readl(ovl->regs + DISP_REG_OVL_DATAPATH_CON); 261 reg = reg | OVL_LAYER_SMI_ID_EN; 262 writel_relaxed(reg, ovl->regs + DISP_REG_OVL_DATAPATH_CON); 263 } 264 writel_relaxed(0x1, ovl->regs + DISP_REG_OVL_EN); 265 } 266 267 void mtk_ovl_stop(struct device *dev) 268 { 269 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 270 271 writel_relaxed(0x0, ovl->regs + DISP_REG_OVL_EN); 272 if (ovl->data->smi_id_en) { 273 unsigned int reg; 274 275 reg = readl(ovl->regs + DISP_REG_OVL_DATAPATH_CON); 276 reg = reg & ~OVL_LAYER_SMI_ID_EN; 277 writel_relaxed(reg, ovl->regs + DISP_REG_OVL_DATAPATH_CON); 278 } 279 } 280 281 static void mtk_ovl_set_afbc(struct mtk_disp_ovl *ovl, struct cmdq_pkt *cmdq_pkt, 282 int idx, bool enabled) 283 { 284 mtk_ddp_write_mask(cmdq_pkt, enabled ? OVL_LAYER_AFBC_EN(idx) : 0, 285 &ovl->cmdq_reg, ovl->regs, 286 DISP_REG_OVL_DATAPATH_CON, OVL_LAYER_AFBC_EN(idx)); 287 } 288 289 static void mtk_ovl_set_bit_depth(struct device *dev, int idx, u32 format, 290 struct cmdq_pkt *cmdq_pkt) 291 { 292 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 293 unsigned int bit_depth = OVL_CON_CLRFMT_8_BIT; 294 295 if (!ovl->data->supports_clrfmt_ext) 296 return; 297 298 if (is_10bit_rgb(format)) 299 bit_depth = OVL_CON_CLRFMT_10_BIT; 300 301 mtk_ddp_write_mask(cmdq_pkt, OVL_CON_CLRFMT_BIT_DEPTH(bit_depth, idx), 302 &ovl->cmdq_reg, ovl->regs, DISP_REG_OVL_CLRFMT_EXT, 303 OVL_CON_CLRFMT_BIT_DEPTH_MASK(idx)); 304 } 305 306 void mtk_ovl_config(struct device *dev, unsigned int w, 307 unsigned int h, unsigned int vrefresh, 308 unsigned int bpc, struct cmdq_pkt *cmdq_pkt) 309 { 310 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 311 312 if (w != 0 && h != 0) 313 mtk_ddp_write_relaxed(cmdq_pkt, h << 16 | w, &ovl->cmdq_reg, ovl->regs, 314 DISP_REG_OVL_ROI_SIZE); 315 316 /* 317 * The background color must be opaque black (ARGB), 318 * otherwise the alpha blending will have no effect 319 */ 320 mtk_ddp_write_relaxed(cmdq_pkt, OVL_COLOR_ALPHA, &ovl->cmdq_reg, 321 ovl->regs, DISP_REG_OVL_ROI_BGCLR); 322 323 mtk_ddp_write(cmdq_pkt, 0x1, &ovl->cmdq_reg, ovl->regs, DISP_REG_OVL_RST); 324 mtk_ddp_write(cmdq_pkt, 0x0, &ovl->cmdq_reg, ovl->regs, DISP_REG_OVL_RST); 325 } 326 327 unsigned int mtk_ovl_layer_nr(struct device *dev) 328 { 329 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 330 331 return ovl->data->layer_nr; 332 } 333 334 unsigned int mtk_ovl_supported_rotations(struct device *dev) 335 { 336 return DRM_MODE_ROTATE_0 | DRM_MODE_ROTATE_180 | 337 DRM_MODE_REFLECT_X | DRM_MODE_REFLECT_Y; 338 } 339 340 int mtk_ovl_layer_check(struct device *dev, unsigned int idx, 341 struct mtk_plane_state *mtk_state) 342 { 343 struct drm_plane_state *state = &mtk_state->base; 344 345 /* check if any unsupported rotation is set */ 346 if (state->rotation & ~mtk_ovl_supported_rotations(dev)) 347 return -EINVAL; 348 349 /* 350 * TODO: Rotating/reflecting YUV buffers is not supported at this time. 351 * Only RGB[AX] variants are supported. 352 * Since DRM_MODE_ROTATE_0 means "no rotation", we should not 353 * reject layers with this property. 354 */ 355 if (state->fb->format->is_yuv && (state->rotation & ~DRM_MODE_ROTATE_0)) 356 return -EINVAL; 357 358 return 0; 359 } 360 361 void mtk_ovl_layer_on(struct device *dev, unsigned int idx, 362 struct cmdq_pkt *cmdq_pkt) 363 { 364 unsigned int gmc_thrshd_l; 365 unsigned int gmc_thrshd_h; 366 unsigned int gmc_value; 367 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 368 369 mtk_ddp_write(cmdq_pkt, 0x1, &ovl->cmdq_reg, ovl->regs, 370 DISP_REG_OVL_RDMA_CTRL(idx)); 371 gmc_thrshd_l = GMC_THRESHOLD_LOW >> 372 (GMC_THRESHOLD_BITS - ovl->data->gmc_bits); 373 gmc_thrshd_h = GMC_THRESHOLD_HIGH >> 374 (GMC_THRESHOLD_BITS - ovl->data->gmc_bits); 375 if (ovl->data->gmc_bits == 10) 376 gmc_value = gmc_thrshd_h | gmc_thrshd_h << 16; 377 else 378 gmc_value = gmc_thrshd_l | gmc_thrshd_l << 8 | 379 gmc_thrshd_h << 16 | gmc_thrshd_h << 24; 380 mtk_ddp_write(cmdq_pkt, gmc_value, 381 &ovl->cmdq_reg, ovl->regs, DISP_REG_OVL_RDMA_GMC(idx)); 382 mtk_ddp_write_mask(cmdq_pkt, BIT(idx), &ovl->cmdq_reg, ovl->regs, 383 DISP_REG_OVL_SRC_CON, BIT(idx)); 384 } 385 386 void mtk_ovl_layer_off(struct device *dev, unsigned int idx, 387 struct cmdq_pkt *cmdq_pkt) 388 { 389 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 390 391 mtk_ddp_write_mask(cmdq_pkt, 0, &ovl->cmdq_reg, ovl->regs, 392 DISP_REG_OVL_SRC_CON, BIT(idx)); 393 mtk_ddp_write(cmdq_pkt, 0, &ovl->cmdq_reg, ovl->regs, 394 DISP_REG_OVL_RDMA_CTRL(idx)); 395 } 396 397 static unsigned int mtk_ovl_fmt_convert(struct mtk_disp_ovl *ovl, 398 struct mtk_plane_state *state) 399 { 400 unsigned int fmt = state->pending.format; 401 unsigned int blend_mode = DRM_MODE_BLEND_COVERAGE; 402 403 /* 404 * For the platforms where OVL_CON_CLRFMT_MAN is defined in the hardware data sheet 405 * and supports premultiplied color formats, such as OVL_CON_CLRFMT_PARGB8888. 406 * 407 * Check blend_modes in the driver data to see if premultiplied mode is supported. 408 * If not, use coverage mode instead to set it to the supported color formats. 409 * 410 * Current DRM assumption is that alpha is default premultiplied, so the bitmask of 411 * blend_modes must include BIT(DRM_MODE_BLEND_PREMULTI). Otherwise, mtk_plane_init() 412 * will get an error return from drm_plane_create_blend_mode_property() and 413 * state->base.pixel_blend_mode should not be used. 414 */ 415 if (ovl->data->blend_modes & BIT(DRM_MODE_BLEND_PREMULTI)) 416 blend_mode = state->base.pixel_blend_mode; 417 418 switch (fmt) { 419 default: 420 case DRM_FORMAT_RGB565: 421 return OVL_CON_CLRFMT_RGB565(ovl); 422 case DRM_FORMAT_BGR565: 423 return OVL_CON_CLRFMT_RGB565(ovl) | OVL_CON_BYTE_SWAP; 424 case DRM_FORMAT_RGB888: 425 return OVL_CON_CLRFMT_RGB888(ovl); 426 case DRM_FORMAT_BGR888: 427 return OVL_CON_CLRFMT_RGB888(ovl) | OVL_CON_BYTE_SWAP; 428 case DRM_FORMAT_RGBX8888: 429 case DRM_FORMAT_RGBA8888: 430 case DRM_FORMAT_RGBX1010102: 431 case DRM_FORMAT_RGBA1010102: 432 return blend_mode == DRM_MODE_BLEND_COVERAGE ? 433 OVL_CON_CLRFMT_RGBA8888 : 434 OVL_CON_CLRFMT_PRGBA8888; 435 case DRM_FORMAT_BGRX8888: 436 case DRM_FORMAT_BGRA8888: 437 case DRM_FORMAT_BGRX1010102: 438 case DRM_FORMAT_BGRA1010102: 439 return blend_mode == DRM_MODE_BLEND_COVERAGE ? 440 OVL_CON_CLRFMT_BGRA8888 : 441 OVL_CON_CLRFMT_PBGRA8888; 442 case DRM_FORMAT_XRGB8888: 443 case DRM_FORMAT_ARGB8888: 444 case DRM_FORMAT_XRGB2101010: 445 case DRM_FORMAT_ARGB2101010: 446 return blend_mode == DRM_MODE_BLEND_COVERAGE ? 447 OVL_CON_CLRFMT_ARGB8888 : 448 OVL_CON_CLRFMT_PARGB8888; 449 case DRM_FORMAT_XBGR8888: 450 case DRM_FORMAT_ABGR8888: 451 case DRM_FORMAT_XBGR2101010: 452 case DRM_FORMAT_ABGR2101010: 453 return blend_mode == DRM_MODE_BLEND_COVERAGE ? 454 OVL_CON_CLRFMT_ABGR8888 : 455 OVL_CON_CLRFMT_PABGR8888; 456 case DRM_FORMAT_UYVY: 457 return OVL_CON_CLRFMT_UYVY | OVL_CON_MTX_YUV_TO_RGB; 458 case DRM_FORMAT_YUYV: 459 return OVL_CON_CLRFMT_YUYV | OVL_CON_MTX_YUV_TO_RGB; 460 } 461 } 462 463 void mtk_ovl_layer_config(struct device *dev, unsigned int idx, 464 struct mtk_plane_state *state, 465 struct cmdq_pkt *cmdq_pkt) 466 { 467 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 468 struct mtk_plane_pending_state *pending = &state->pending; 469 unsigned int addr = pending->addr; 470 unsigned int hdr_addr = pending->hdr_addr; 471 unsigned int pitch = pending->pitch; 472 unsigned int hdr_pitch = pending->hdr_pitch; 473 unsigned int fmt = pending->format; 474 unsigned int offset = (pending->y << 16) | pending->x; 475 unsigned int src_size = (pending->height << 16) | pending->width; 476 unsigned int blend_mode = state->base.pixel_blend_mode; 477 unsigned int ignore_pixel_alpha = 0; 478 unsigned int con; 479 bool is_afbc = pending->modifier != DRM_FORMAT_MOD_LINEAR; 480 union overlay_pitch { 481 struct split_pitch { 482 u16 lsb; 483 u16 msb; 484 } split_pitch; 485 u32 pitch; 486 } overlay_pitch; 487 488 overlay_pitch.pitch = pitch; 489 490 if (!pending->enable) { 491 mtk_ovl_layer_off(dev, idx, cmdq_pkt); 492 return; 493 } 494 495 con = mtk_ovl_fmt_convert(ovl, state); 496 if (state->base.fb) { 497 con |= state->base.alpha & OVL_CON_ALPHA; 498 499 /* 500 * For blend_modes supported SoCs, always enable alpha blending. 501 * For blend_modes unsupported SoCs, enable alpha blending when has_alpha is set. 502 */ 503 if (blend_mode || state->base.fb->format->has_alpha) 504 con |= OVL_CON_AEN; 505 506 /* 507 * Although the alpha channel can be ignored, CONST_BLD must be enabled 508 * for XRGB format, otherwise OVL will still read the value from memory. 509 * For RGB888 related formats, whether CONST_BLD is enabled or not won't 510 * affect the result. Therefore we use !has_alpha as the condition. 511 */ 512 if (blend_mode == DRM_MODE_BLEND_PIXEL_NONE || !state->base.fb->format->has_alpha) 513 ignore_pixel_alpha = OVL_CONST_BLEND; 514 } 515 516 if (pending->rotation & DRM_MODE_REFLECT_Y) { 517 con |= OVL_CON_VIRT_FLIP; 518 addr += (pending->height - 1) * pending->pitch; 519 } 520 521 if (pending->rotation & DRM_MODE_REFLECT_X) { 522 con |= OVL_CON_HORZ_FLIP; 523 addr += pending->pitch - 1; 524 } 525 526 if (ovl->data->supports_afbc) 527 mtk_ovl_set_afbc(ovl, cmdq_pkt, idx, is_afbc); 528 529 mtk_ddp_write_relaxed(cmdq_pkt, con, &ovl->cmdq_reg, ovl->regs, 530 DISP_REG_OVL_CON(idx)); 531 mtk_ddp_write_relaxed(cmdq_pkt, overlay_pitch.split_pitch.lsb | ignore_pixel_alpha, 532 &ovl->cmdq_reg, ovl->regs, DISP_REG_OVL_PITCH(idx)); 533 mtk_ddp_write_relaxed(cmdq_pkt, src_size, &ovl->cmdq_reg, ovl->regs, 534 DISP_REG_OVL_SRC_SIZE(idx)); 535 mtk_ddp_write_relaxed(cmdq_pkt, offset, &ovl->cmdq_reg, ovl->regs, 536 DISP_REG_OVL_OFFSET(idx)); 537 mtk_ddp_write_relaxed(cmdq_pkt, addr, &ovl->cmdq_reg, ovl->regs, 538 DISP_REG_OVL_ADDR(ovl, idx)); 539 540 if (is_afbc) { 541 mtk_ddp_write_relaxed(cmdq_pkt, hdr_addr, &ovl->cmdq_reg, ovl->regs, 542 DISP_REG_OVL_HDR_ADDR(ovl, idx)); 543 mtk_ddp_write_relaxed(cmdq_pkt, 544 OVL_PITCH_MSB_2ND_SUBBUF | overlay_pitch.split_pitch.msb, 545 &ovl->cmdq_reg, ovl->regs, DISP_REG_OVL_PITCH_MSB(idx)); 546 mtk_ddp_write_relaxed(cmdq_pkt, hdr_pitch, &ovl->cmdq_reg, ovl->regs, 547 DISP_REG_OVL_HDR_PITCH(ovl, idx)); 548 } else { 549 mtk_ddp_write_relaxed(cmdq_pkt, 550 overlay_pitch.split_pitch.msb, 551 &ovl->cmdq_reg, ovl->regs, DISP_REG_OVL_PITCH_MSB(idx)); 552 } 553 554 mtk_ovl_set_bit_depth(dev, idx, fmt, cmdq_pkt); 555 mtk_ovl_layer_on(dev, idx, cmdq_pkt); 556 } 557 558 void mtk_ovl_bgclr_in_on(struct device *dev) 559 { 560 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 561 unsigned int reg; 562 563 reg = readl(ovl->regs + DISP_REG_OVL_DATAPATH_CON); 564 reg = reg | OVL_BGCLR_SEL_IN; 565 writel(reg, ovl->regs + DISP_REG_OVL_DATAPATH_CON); 566 } 567 568 void mtk_ovl_bgclr_in_off(struct device *dev) 569 { 570 struct mtk_disp_ovl *ovl = dev_get_drvdata(dev); 571 unsigned int reg; 572 573 reg = readl(ovl->regs + DISP_REG_OVL_DATAPATH_CON); 574 reg = reg & ~OVL_BGCLR_SEL_IN; 575 writel(reg, ovl->regs + DISP_REG_OVL_DATAPATH_CON); 576 } 577 578 static int mtk_disp_ovl_bind(struct device *dev, struct device *master, 579 void *data) 580 { 581 return 0; 582 } 583 584 static void mtk_disp_ovl_unbind(struct device *dev, struct device *master, 585 void *data) 586 { 587 } 588 589 static const struct component_ops mtk_disp_ovl_component_ops = { 590 .bind = mtk_disp_ovl_bind, 591 .unbind = mtk_disp_ovl_unbind, 592 }; 593 594 static int mtk_disp_ovl_probe(struct platform_device *pdev) 595 { 596 struct device *dev = &pdev->dev; 597 struct mtk_disp_ovl *priv; 598 struct resource *res; 599 int irq; 600 int ret; 601 602 priv = devm_kzalloc(dev, sizeof(*priv), GFP_KERNEL); 603 if (!priv) 604 return -ENOMEM; 605 606 irq = platform_get_irq(pdev, 0); 607 if (irq < 0) 608 return irq; 609 610 priv->clk = devm_clk_get(dev, NULL); 611 if (IS_ERR(priv->clk)) 612 return dev_err_probe(dev, PTR_ERR(priv->clk), 613 "failed to get ovl clk\n"); 614 615 res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 616 priv->regs = devm_ioremap_resource(dev, res); 617 if (IS_ERR(priv->regs)) 618 return dev_err_probe(dev, PTR_ERR(priv->regs), 619 "failed to ioremap ovl\n"); 620 #if IS_REACHABLE(CONFIG_MTK_CMDQ) 621 ret = cmdq_dev_get_client_reg(dev, &priv->cmdq_reg, 0); 622 if (ret) 623 dev_dbg(dev, "get mediatek,gce-client-reg fail!\n"); 624 #endif 625 626 priv->data = of_device_get_match_data(dev); 627 platform_set_drvdata(pdev, priv); 628 629 ret = devm_request_irq(dev, irq, mtk_disp_ovl_irq_handler, 630 IRQF_TRIGGER_NONE, dev_name(dev), priv); 631 if (ret < 0) 632 return dev_err_probe(dev, ret, "Failed to request irq %d\n", irq); 633 634 pm_runtime_enable(dev); 635 636 ret = component_add(dev, &mtk_disp_ovl_component_ops); 637 if (ret) { 638 pm_runtime_disable(dev); 639 return dev_err_probe(dev, ret, "Failed to add component\n"); 640 } 641 642 return 0; 643 } 644 645 static void mtk_disp_ovl_remove(struct platform_device *pdev) 646 { 647 component_del(&pdev->dev, &mtk_disp_ovl_component_ops); 648 pm_runtime_disable(&pdev->dev); 649 } 650 651 static const struct mtk_disp_ovl_data mt2701_ovl_driver_data = { 652 .addr = DISP_REG_OVL_ADDR_MT2701, 653 .gmc_bits = 8, 654 .layer_nr = 4, 655 .fmt_rgb565_is_0 = false, 656 .formats = mt8173_formats, 657 .num_formats = ARRAY_SIZE(mt8173_formats), 658 }; 659 660 static const struct mtk_disp_ovl_data mt8173_ovl_driver_data = { 661 .addr = DISP_REG_OVL_ADDR_MT8173, 662 .gmc_bits = 8, 663 .layer_nr = 4, 664 .fmt_rgb565_is_0 = true, 665 .formats = mt8173_formats, 666 .num_formats = ARRAY_SIZE(mt8173_formats), 667 }; 668 669 static const struct mtk_disp_ovl_data mt8183_ovl_driver_data = { 670 .addr = DISP_REG_OVL_ADDR_MT8173, 671 .gmc_bits = 10, 672 .layer_nr = 4, 673 .fmt_rgb565_is_0 = true, 674 .formats = mt8173_formats, 675 .num_formats = ARRAY_SIZE(mt8173_formats), 676 }; 677 678 static const struct mtk_disp_ovl_data mt8183_ovl_2l_driver_data = { 679 .addr = DISP_REG_OVL_ADDR_MT8173, 680 .gmc_bits = 10, 681 .layer_nr = 2, 682 .fmt_rgb565_is_0 = true, 683 .formats = mt8173_formats, 684 .num_formats = ARRAY_SIZE(mt8173_formats), 685 }; 686 687 static const struct mtk_disp_ovl_data mt8192_ovl_driver_data = { 688 .addr = DISP_REG_OVL_ADDR_MT8173, 689 .gmc_bits = 10, 690 .layer_nr = 4, 691 .fmt_rgb565_is_0 = true, 692 .smi_id_en = true, 693 .blend_modes = BIT(DRM_MODE_BLEND_PREMULTI) | 694 BIT(DRM_MODE_BLEND_COVERAGE) | 695 BIT(DRM_MODE_BLEND_PIXEL_NONE), 696 .formats = mt8173_formats, 697 .num_formats = ARRAY_SIZE(mt8173_formats), 698 }; 699 700 static const struct mtk_disp_ovl_data mt8192_ovl_2l_driver_data = { 701 .addr = DISP_REG_OVL_ADDR_MT8173, 702 .gmc_bits = 10, 703 .layer_nr = 2, 704 .fmt_rgb565_is_0 = true, 705 .smi_id_en = true, 706 .blend_modes = BIT(DRM_MODE_BLEND_PREMULTI) | 707 BIT(DRM_MODE_BLEND_COVERAGE) | 708 BIT(DRM_MODE_BLEND_PIXEL_NONE), 709 .formats = mt8173_formats, 710 .num_formats = ARRAY_SIZE(mt8173_formats), 711 }; 712 713 static const struct mtk_disp_ovl_data mt8195_ovl_driver_data = { 714 .addr = DISP_REG_OVL_ADDR_MT8173, 715 .gmc_bits = 10, 716 .layer_nr = 4, 717 .fmt_rgb565_is_0 = true, 718 .smi_id_en = true, 719 .supports_afbc = true, 720 .blend_modes = BIT(DRM_MODE_BLEND_PREMULTI) | 721 BIT(DRM_MODE_BLEND_COVERAGE) | 722 BIT(DRM_MODE_BLEND_PIXEL_NONE), 723 .formats = mt8195_formats, 724 .num_formats = ARRAY_SIZE(mt8195_formats), 725 .supports_clrfmt_ext = true, 726 }; 727 728 static const struct of_device_id mtk_disp_ovl_driver_dt_match[] = { 729 { .compatible = "mediatek,mt2701-disp-ovl", 730 .data = &mt2701_ovl_driver_data}, 731 { .compatible = "mediatek,mt8173-disp-ovl", 732 .data = &mt8173_ovl_driver_data}, 733 { .compatible = "mediatek,mt8183-disp-ovl", 734 .data = &mt8183_ovl_driver_data}, 735 { .compatible = "mediatek,mt8183-disp-ovl-2l", 736 .data = &mt8183_ovl_2l_driver_data}, 737 { .compatible = "mediatek,mt8192-disp-ovl", 738 .data = &mt8192_ovl_driver_data}, 739 { .compatible = "mediatek,mt8192-disp-ovl-2l", 740 .data = &mt8192_ovl_2l_driver_data}, 741 { .compatible = "mediatek,mt8195-disp-ovl", 742 .data = &mt8195_ovl_driver_data}, 743 {}, 744 }; 745 MODULE_DEVICE_TABLE(of, mtk_disp_ovl_driver_dt_match); 746 747 struct platform_driver mtk_disp_ovl_driver = { 748 .probe = mtk_disp_ovl_probe, 749 .remove_new = mtk_disp_ovl_remove, 750 .driver = { 751 .name = "mediatek-disp-ovl", 752 .of_match_table = mtk_disp_ovl_driver_dt_match, 753 }, 754 }; 755