1aaa36a97SAlex Deucher /* 2aaa36a97SAlex Deucher * Copyright 2014 Advanced Micro Devices, Inc. 3aaa36a97SAlex Deucher * 4aaa36a97SAlex Deucher * Permission is hereby granted, free of charge, to any person obtaining a 5aaa36a97SAlex Deucher * copy of this software and associated documentation files (the "Software"), 6aaa36a97SAlex Deucher * to deal in the Software without restriction, including without limitation 7aaa36a97SAlex Deucher * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8aaa36a97SAlex Deucher * and/or sell copies of the Software, and to permit persons to whom the 9aaa36a97SAlex Deucher * Software is furnished to do so, subject to the following conditions: 10aaa36a97SAlex Deucher * 11aaa36a97SAlex Deucher * The above copyright notice and this permission notice shall be included in 12aaa36a97SAlex Deucher * all copies or substantial portions of the Software. 13aaa36a97SAlex Deucher * 14aaa36a97SAlex Deucher * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15aaa36a97SAlex Deucher * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16aaa36a97SAlex Deucher * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17aaa36a97SAlex Deucher * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18aaa36a97SAlex Deucher * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19aaa36a97SAlex Deucher * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20aaa36a97SAlex Deucher * OTHER DEALINGS IN THE SOFTWARE. 21aaa36a97SAlex Deucher * 22aaa36a97SAlex Deucher */ 2347b757fbSSam Ravnborg 24d9501844SJani Nikula #include <drm/drm_edid.h> 2547b757fbSSam Ravnborg #include <drm/drm_fourcc.h> 26973ad627SThomas Zimmermann #include <drm/drm_modeset_helper.h> 27973ad627SThomas Zimmermann #include <drm/drm_modeset_helper_vtables.h> 2847b757fbSSam Ravnborg #include <drm/drm_vblank.h> 2947b757fbSSam Ravnborg 30aaa36a97SAlex Deucher #include "amdgpu.h" 31aaa36a97SAlex Deucher #include "amdgpu_pm.h" 32aaa36a97SAlex Deucher #include "amdgpu_i2c.h" 33aaa36a97SAlex Deucher #include "vid.h" 34aaa36a97SAlex Deucher #include "atom.h" 35aaa36a97SAlex Deucher #include "amdgpu_atombios.h" 36aaa36a97SAlex Deucher #include "atombios_crtc.h" 37aaa36a97SAlex Deucher #include "atombios_encoders.h" 38aaa36a97SAlex Deucher #include "amdgpu_pll.h" 39aaa36a97SAlex Deucher #include "amdgpu_connectors.h" 405df58525SHuang Rui #include "amdgpu_display.h" 41356aee30SBaoyou Xie #include "dce_v11_0.h" 42aaa36a97SAlex Deucher 43aaa36a97SAlex Deucher #include "dce/dce_11_0_d.h" 44aaa36a97SAlex Deucher #include "dce/dce_11_0_sh_mask.h" 45aaa36a97SAlex Deucher #include "dce/dce_11_0_enum.h" 46aaa36a97SAlex Deucher #include "oss/oss_3_0_d.h" 47aaa36a97SAlex Deucher #include "oss/oss_3_0_sh_mask.h" 48aaa36a97SAlex Deucher #include "gmc/gmc_8_1_d.h" 49aaa36a97SAlex Deucher #include "gmc/gmc_8_1_sh_mask.h" 50aaa36a97SAlex Deucher 51091aec0bSAndrey Grodzovsky #include "ivsrcid/ivsrcid_vislands30.h" 52091aec0bSAndrey Grodzovsky 53aaa36a97SAlex Deucher static void dce_v11_0_set_display_funcs(struct amdgpu_device *adev); 54aaa36a97SAlex Deucher static void dce_v11_0_set_irq_funcs(struct amdgpu_device *adev); 55aeaf3e6cSQiang Ma static void dce_v11_0_hpd_int_ack(struct amdgpu_device *adev, int hpd); 56aaa36a97SAlex Deucher 57aaa36a97SAlex Deucher static const u32 crtc_offsets[] = 58aaa36a97SAlex Deucher { 59aaa36a97SAlex Deucher CRTC0_REGISTER_OFFSET, 60aaa36a97SAlex Deucher CRTC1_REGISTER_OFFSET, 61aaa36a97SAlex Deucher CRTC2_REGISTER_OFFSET, 62aaa36a97SAlex Deucher CRTC3_REGISTER_OFFSET, 63aaa36a97SAlex Deucher CRTC4_REGISTER_OFFSET, 64aaa36a97SAlex Deucher CRTC5_REGISTER_OFFSET, 65aaa36a97SAlex Deucher CRTC6_REGISTER_OFFSET 66aaa36a97SAlex Deucher }; 67aaa36a97SAlex Deucher 68aaa36a97SAlex Deucher static const u32 hpd_offsets[] = 69aaa36a97SAlex Deucher { 70aaa36a97SAlex Deucher HPD0_REGISTER_OFFSET, 71aaa36a97SAlex Deucher HPD1_REGISTER_OFFSET, 72aaa36a97SAlex Deucher HPD2_REGISTER_OFFSET, 73aaa36a97SAlex Deucher HPD3_REGISTER_OFFSET, 74aaa36a97SAlex Deucher HPD4_REGISTER_OFFSET, 75aaa36a97SAlex Deucher HPD5_REGISTER_OFFSET 76aaa36a97SAlex Deucher }; 77aaa36a97SAlex Deucher 78aaa36a97SAlex Deucher static const uint32_t dig_offsets[] = { 79aaa36a97SAlex Deucher DIG0_REGISTER_OFFSET, 80aaa36a97SAlex Deucher DIG1_REGISTER_OFFSET, 81aaa36a97SAlex Deucher DIG2_REGISTER_OFFSET, 82aaa36a97SAlex Deucher DIG3_REGISTER_OFFSET, 83aaa36a97SAlex Deucher DIG4_REGISTER_OFFSET, 84aaa36a97SAlex Deucher DIG5_REGISTER_OFFSET, 85aaa36a97SAlex Deucher DIG6_REGISTER_OFFSET, 86aaa36a97SAlex Deucher DIG7_REGISTER_OFFSET, 87aaa36a97SAlex Deucher DIG8_REGISTER_OFFSET 88aaa36a97SAlex Deucher }; 89aaa36a97SAlex Deucher 90aaa36a97SAlex Deucher static const struct { 91aaa36a97SAlex Deucher uint32_t reg; 92aaa36a97SAlex Deucher uint32_t vblank; 93aaa36a97SAlex Deucher uint32_t vline; 94aaa36a97SAlex Deucher uint32_t hpd; 95aaa36a97SAlex Deucher 96aaa36a97SAlex Deucher } interrupt_status_offsets[] = { { 97aaa36a97SAlex Deucher .reg = mmDISP_INTERRUPT_STATUS, 98aaa36a97SAlex Deucher .vblank = DISP_INTERRUPT_STATUS__LB_D1_VBLANK_INTERRUPT_MASK, 99aaa36a97SAlex Deucher .vline = DISP_INTERRUPT_STATUS__LB_D1_VLINE_INTERRUPT_MASK, 100aaa36a97SAlex Deucher .hpd = DISP_INTERRUPT_STATUS__DC_HPD1_INTERRUPT_MASK 101aaa36a97SAlex Deucher }, { 102aaa36a97SAlex Deucher .reg = mmDISP_INTERRUPT_STATUS_CONTINUE, 103aaa36a97SAlex Deucher .vblank = DISP_INTERRUPT_STATUS_CONTINUE__LB_D2_VBLANK_INTERRUPT_MASK, 104aaa36a97SAlex Deucher .vline = DISP_INTERRUPT_STATUS_CONTINUE__LB_D2_VLINE_INTERRUPT_MASK, 105aaa36a97SAlex Deucher .hpd = DISP_INTERRUPT_STATUS_CONTINUE__DC_HPD2_INTERRUPT_MASK 106aaa36a97SAlex Deucher }, { 107aaa36a97SAlex Deucher .reg = mmDISP_INTERRUPT_STATUS_CONTINUE2, 108aaa36a97SAlex Deucher .vblank = DISP_INTERRUPT_STATUS_CONTINUE2__LB_D3_VBLANK_INTERRUPT_MASK, 109aaa36a97SAlex Deucher .vline = DISP_INTERRUPT_STATUS_CONTINUE2__LB_D3_VLINE_INTERRUPT_MASK, 110aaa36a97SAlex Deucher .hpd = DISP_INTERRUPT_STATUS_CONTINUE2__DC_HPD3_INTERRUPT_MASK 111aaa36a97SAlex Deucher }, { 112aaa36a97SAlex Deucher .reg = mmDISP_INTERRUPT_STATUS_CONTINUE3, 113aaa36a97SAlex Deucher .vblank = DISP_INTERRUPT_STATUS_CONTINUE3__LB_D4_VBLANK_INTERRUPT_MASK, 114aaa36a97SAlex Deucher .vline = DISP_INTERRUPT_STATUS_CONTINUE3__LB_D4_VLINE_INTERRUPT_MASK, 115aaa36a97SAlex Deucher .hpd = DISP_INTERRUPT_STATUS_CONTINUE3__DC_HPD4_INTERRUPT_MASK 116aaa36a97SAlex Deucher }, { 117aaa36a97SAlex Deucher .reg = mmDISP_INTERRUPT_STATUS_CONTINUE4, 118aaa36a97SAlex Deucher .vblank = DISP_INTERRUPT_STATUS_CONTINUE4__LB_D5_VBLANK_INTERRUPT_MASK, 119aaa36a97SAlex Deucher .vline = DISP_INTERRUPT_STATUS_CONTINUE4__LB_D5_VLINE_INTERRUPT_MASK, 120aaa36a97SAlex Deucher .hpd = DISP_INTERRUPT_STATUS_CONTINUE4__DC_HPD5_INTERRUPT_MASK 121aaa36a97SAlex Deucher }, { 122aaa36a97SAlex Deucher .reg = mmDISP_INTERRUPT_STATUS_CONTINUE5, 123aaa36a97SAlex Deucher .vblank = DISP_INTERRUPT_STATUS_CONTINUE5__LB_D6_VBLANK_INTERRUPT_MASK, 124aaa36a97SAlex Deucher .vline = DISP_INTERRUPT_STATUS_CONTINUE5__LB_D6_VLINE_INTERRUPT_MASK, 125aaa36a97SAlex Deucher .hpd = DISP_INTERRUPT_STATUS_CONTINUE5__DC_HPD6_INTERRUPT_MASK 126aaa36a97SAlex Deucher } }; 127aaa36a97SAlex Deucher 128aaa36a97SAlex Deucher static const u32 cz_golden_settings_a11[] = 129aaa36a97SAlex Deucher { 130aaa36a97SAlex Deucher mmCRTC_DOUBLE_BUFFER_CONTROL, 0x00010101, 0x00010000, 131aaa36a97SAlex Deucher mmFBC_MISC, 0x1f311fff, 0x14300000, 132aaa36a97SAlex Deucher }; 133aaa36a97SAlex Deucher 1345732a94fSAlex Deucher static const u32 cz_mgcg_cgcg_init[] = 1355732a94fSAlex Deucher { 1365732a94fSAlex Deucher mmXDMA_CLOCK_GATING_CNTL, 0xffffffff, 0x00000100, 1375732a94fSAlex Deucher mmXDMA_MEM_POWER_CNTL, 0x00000101, 0x00000000, 1385732a94fSAlex Deucher }; 1395732a94fSAlex Deucher 140fa2f9befSSamuel Li static const u32 stoney_golden_settings_a11[] = 141fa2f9befSSamuel Li { 142fa2f9befSSamuel Li mmCRTC_DOUBLE_BUFFER_CONTROL, 0x00010101, 0x00010000, 143fa2f9befSSamuel Li mmFBC_MISC, 0x1f311fff, 0x14302000, 144fa2f9befSSamuel Li }; 145fa2f9befSSamuel Li 1462cc0c0b5SFlora Cui static const u32 polaris11_golden_settings_a11[] = 14760909285SFlora Cui { 14860909285SFlora Cui mmDCI_CLK_CNTL, 0x00000080, 0x00000000, 14960909285SFlora Cui mmFBC_DEBUG_COMP, 0x000000f0, 0x00000070, 15060909285SFlora Cui mmFBC_DEBUG1, 0xffffffff, 0x00000008, 151b9934878SFlora Cui mmFBC_MISC, 0x9f313fff, 0x14302008, 15260909285SFlora Cui mmHDMI_CONTROL, 0x313f031f, 0x00000011, 15360909285SFlora Cui }; 15460909285SFlora Cui 1552cc0c0b5SFlora Cui static const u32 polaris10_golden_settings_a11[] = 15660909285SFlora Cui { 15760909285SFlora Cui mmDCI_CLK_CNTL, 0x00000080, 0x00000000, 15860909285SFlora Cui mmFBC_DEBUG_COMP, 0x000000f0, 0x00000070, 159d4ab989fSFlora Cui mmFBC_MISC, 0x9f313fff, 0x14302008, 16060909285SFlora Cui mmHDMI_CONTROL, 0x313f031f, 0x00000011, 16160909285SFlora Cui }; 162fa2f9befSSamuel Li 163aaa36a97SAlex Deucher static void dce_v11_0_init_golden_registers(struct amdgpu_device *adev) 164aaa36a97SAlex Deucher { 165aaa36a97SAlex Deucher switch (adev->asic_type) { 166aaa36a97SAlex Deucher case CHIP_CARRIZO: 1679c3f2b54SAlex Deucher amdgpu_device_program_register_sequence(adev, 1685732a94fSAlex Deucher cz_mgcg_cgcg_init, 169c47b41a7SChristian König ARRAY_SIZE(cz_mgcg_cgcg_init)); 1709c3f2b54SAlex Deucher amdgpu_device_program_register_sequence(adev, 171aaa36a97SAlex Deucher cz_golden_settings_a11, 172c47b41a7SChristian König ARRAY_SIZE(cz_golden_settings_a11)); 173aaa36a97SAlex Deucher break; 174fa2f9befSSamuel Li case CHIP_STONEY: 1759c3f2b54SAlex Deucher amdgpu_device_program_register_sequence(adev, 176fa2f9befSSamuel Li stoney_golden_settings_a11, 177c47b41a7SChristian König ARRAY_SIZE(stoney_golden_settings_a11)); 178fa2f9befSSamuel Li break; 1792cc0c0b5SFlora Cui case CHIP_POLARIS11: 180c4642a47SJunwei Zhang case CHIP_POLARIS12: 1819c3f2b54SAlex Deucher amdgpu_device_program_register_sequence(adev, 1822cc0c0b5SFlora Cui polaris11_golden_settings_a11, 183c47b41a7SChristian König ARRAY_SIZE(polaris11_golden_settings_a11)); 18460909285SFlora Cui break; 1852cc0c0b5SFlora Cui case CHIP_POLARIS10: 186221adb21SAlex Deucher case CHIP_VEGAM: 1879c3f2b54SAlex Deucher amdgpu_device_program_register_sequence(adev, 1882cc0c0b5SFlora Cui polaris10_golden_settings_a11, 189c47b41a7SChristian König ARRAY_SIZE(polaris10_golden_settings_a11)); 19060909285SFlora Cui break; 191aaa36a97SAlex Deucher default: 192aaa36a97SAlex Deucher break; 193aaa36a97SAlex Deucher } 194aaa36a97SAlex Deucher } 195aaa36a97SAlex Deucher 196aaa36a97SAlex Deucher static u32 dce_v11_0_audio_endpt_rreg(struct amdgpu_device *adev, 197aaa36a97SAlex Deucher u32 block_offset, u32 reg) 198aaa36a97SAlex Deucher { 199aaa36a97SAlex Deucher unsigned long flags; 200aaa36a97SAlex Deucher u32 r; 201aaa36a97SAlex Deucher 202aaa36a97SAlex Deucher spin_lock_irqsave(&adev->audio_endpt_idx_lock, flags); 203aaa36a97SAlex Deucher WREG32(mmAZALIA_F0_CODEC_ENDPOINT_INDEX + block_offset, reg); 204aaa36a97SAlex Deucher r = RREG32(mmAZALIA_F0_CODEC_ENDPOINT_DATA + block_offset); 205aaa36a97SAlex Deucher spin_unlock_irqrestore(&adev->audio_endpt_idx_lock, flags); 206aaa36a97SAlex Deucher 207aaa36a97SAlex Deucher return r; 208aaa36a97SAlex Deucher } 209aaa36a97SAlex Deucher 210aaa36a97SAlex Deucher static void dce_v11_0_audio_endpt_wreg(struct amdgpu_device *adev, 211aaa36a97SAlex Deucher u32 block_offset, u32 reg, u32 v) 212aaa36a97SAlex Deucher { 213aaa36a97SAlex Deucher unsigned long flags; 214aaa36a97SAlex Deucher 215aaa36a97SAlex Deucher spin_lock_irqsave(&adev->audio_endpt_idx_lock, flags); 216aaa36a97SAlex Deucher WREG32(mmAZALIA_F0_CODEC_ENDPOINT_INDEX + block_offset, reg); 217aaa36a97SAlex Deucher WREG32(mmAZALIA_F0_CODEC_ENDPOINT_DATA + block_offset, v); 218aaa36a97SAlex Deucher spin_unlock_irqrestore(&adev->audio_endpt_idx_lock, flags); 219aaa36a97SAlex Deucher } 220aaa36a97SAlex Deucher 221aaa36a97SAlex Deucher static u32 dce_v11_0_vblank_get_counter(struct amdgpu_device *adev, int crtc) 222aaa36a97SAlex Deucher { 22315c3277fSTom St Denis if (crtc < 0 || crtc >= adev->mode_info.num_crtc) 224aaa36a97SAlex Deucher return 0; 225aaa36a97SAlex Deucher else 226aaa36a97SAlex Deucher return RREG32(mmCRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]); 227aaa36a97SAlex Deucher } 228aaa36a97SAlex Deucher 229f6c7aba4SMichel Dänzer static void dce_v11_0_pageflip_interrupt_init(struct amdgpu_device *adev) 230f6c7aba4SMichel Dänzer { 231f6c7aba4SMichel Dänzer unsigned i; 232f6c7aba4SMichel Dänzer 233f6c7aba4SMichel Dänzer /* Enable pflip interrupts */ 234f6c7aba4SMichel Dänzer for (i = 0; i < adev->mode_info.num_crtc; i++) 235f6c7aba4SMichel Dänzer amdgpu_irq_get(adev, &adev->pageflip_irq, i); 236f6c7aba4SMichel Dänzer } 237f6c7aba4SMichel Dänzer 238f6c7aba4SMichel Dänzer static void dce_v11_0_pageflip_interrupt_fini(struct amdgpu_device *adev) 239f6c7aba4SMichel Dänzer { 240f6c7aba4SMichel Dänzer unsigned i; 241f6c7aba4SMichel Dänzer 242f6c7aba4SMichel Dänzer /* Disable pflip interrupts */ 243f6c7aba4SMichel Dänzer for (i = 0; i < adev->mode_info.num_crtc; i++) 244f6c7aba4SMichel Dänzer amdgpu_irq_put(adev, &adev->pageflip_irq, i); 245f6c7aba4SMichel Dänzer } 246f6c7aba4SMichel Dänzer 247aaa36a97SAlex Deucher /** 248aaa36a97SAlex Deucher * dce_v11_0_page_flip - pageflip callback. 249aaa36a97SAlex Deucher * 250aaa36a97SAlex Deucher * @adev: amdgpu_device pointer 251aaa36a97SAlex Deucher * @crtc_id: crtc to cleanup pageflip on 252aaa36a97SAlex Deucher * @crtc_base: new address of the crtc (GPU MC address) 253c4403754SLee Jones * @async: asynchronous flip 254aaa36a97SAlex Deucher * 255ce055fe3SAlex Deucher * Triggers the actual pageflip by updating the primary 256ce055fe3SAlex Deucher * surface base address. 257aaa36a97SAlex Deucher */ 258aaa36a97SAlex Deucher static void dce_v11_0_page_flip(struct amdgpu_device *adev, 259cb9e59d7SAlex Deucher int crtc_id, u64 crtc_base, bool async) 260aaa36a97SAlex Deucher { 261aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = adev->mode_info.crtcs[crtc_id]; 262965ebe3dSMichel Dänzer struct drm_framebuffer *fb = amdgpu_crtc->base.primary->fb; 263cb9e59d7SAlex Deucher u32 tmp; 264aaa36a97SAlex Deucher 2657359ee63SAlex Deucher /* flip immediate for async, default is vsync */ 266cb9e59d7SAlex Deucher tmp = RREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset); 267cb9e59d7SAlex Deucher tmp = REG_SET_FIELD(tmp, GRPH_FLIP_CONTROL, 2687359ee63SAlex Deucher GRPH_SURFACE_UPDATE_IMMEDIATE_EN, async ? 1 : 0); 269cb9e59d7SAlex Deucher WREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, tmp); 270965ebe3dSMichel Dänzer /* update pitch */ 271965ebe3dSMichel Dänzer WREG32(mmGRPH_PITCH + amdgpu_crtc->crtc_offset, 272965ebe3dSMichel Dänzer fb->pitches[0] / fb->format->cpp[0]); 273aaa36a97SAlex Deucher /* update the scanout addresses */ 274aaa36a97SAlex Deucher WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, 275aaa36a97SAlex Deucher upper_32_bits(crtc_base)); 276ce055fe3SAlex Deucher /* writing to the low address triggers the update */ 277aaa36a97SAlex Deucher WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, 278aaa36a97SAlex Deucher lower_32_bits(crtc_base)); 279ce055fe3SAlex Deucher /* post the write */ 280ce055fe3SAlex Deucher RREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset); 281aaa36a97SAlex Deucher } 282aaa36a97SAlex Deucher 283aaa36a97SAlex Deucher static int dce_v11_0_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc, 284aaa36a97SAlex Deucher u32 *vbl, u32 *position) 285aaa36a97SAlex Deucher { 286aaa36a97SAlex Deucher if ((crtc < 0) || (crtc >= adev->mode_info.num_crtc)) 287aaa36a97SAlex Deucher return -EINVAL; 288aaa36a97SAlex Deucher 289aaa36a97SAlex Deucher *vbl = RREG32(mmCRTC_V_BLANK_START_END + crtc_offsets[crtc]); 290aaa36a97SAlex Deucher *position = RREG32(mmCRTC_STATUS_POSITION + crtc_offsets[crtc]); 291aaa36a97SAlex Deucher 292aaa36a97SAlex Deucher return 0; 293aaa36a97SAlex Deucher } 294aaa36a97SAlex Deucher 295aaa36a97SAlex Deucher /** 296aaa36a97SAlex Deucher * dce_v11_0_hpd_sense - hpd sense callback. 297aaa36a97SAlex Deucher * 298aaa36a97SAlex Deucher * @adev: amdgpu_device pointer 299aaa36a97SAlex Deucher * @hpd: hpd (hotplug detect) pin 300aaa36a97SAlex Deucher * 301aaa36a97SAlex Deucher * Checks if a digital monitor is connected (evergreen+). 302aaa36a97SAlex Deucher * Returns true if connected, false if not connected. 303aaa36a97SAlex Deucher */ 304aaa36a97SAlex Deucher static bool dce_v11_0_hpd_sense(struct amdgpu_device *adev, 305aaa36a97SAlex Deucher enum amdgpu_hpd_id hpd) 306aaa36a97SAlex Deucher { 307aaa36a97SAlex Deucher bool connected = false; 308aaa36a97SAlex Deucher 309d2486d25SAlex Deucher if (hpd >= adev->mode_info.num_hpd) 310aaa36a97SAlex Deucher return connected; 311aaa36a97SAlex Deucher 312d2486d25SAlex Deucher if (RREG32(mmDC_HPD_INT_STATUS + hpd_offsets[hpd]) & 313aaa36a97SAlex Deucher DC_HPD_INT_STATUS__DC_HPD_SENSE_MASK) 314aaa36a97SAlex Deucher connected = true; 315aaa36a97SAlex Deucher 316aaa36a97SAlex Deucher return connected; 317aaa36a97SAlex Deucher } 318aaa36a97SAlex Deucher 319aaa36a97SAlex Deucher /** 320aaa36a97SAlex Deucher * dce_v11_0_hpd_set_polarity - hpd set polarity callback. 321aaa36a97SAlex Deucher * 322aaa36a97SAlex Deucher * @adev: amdgpu_device pointer 323aaa36a97SAlex Deucher * @hpd: hpd (hotplug detect) pin 324aaa36a97SAlex Deucher * 325aaa36a97SAlex Deucher * Set the polarity of the hpd pin (evergreen+). 326aaa36a97SAlex Deucher */ 327aaa36a97SAlex Deucher static void dce_v11_0_hpd_set_polarity(struct amdgpu_device *adev, 328aaa36a97SAlex Deucher enum amdgpu_hpd_id hpd) 329aaa36a97SAlex Deucher { 330aaa36a97SAlex Deucher u32 tmp; 331aaa36a97SAlex Deucher bool connected = dce_v11_0_hpd_sense(adev, hpd); 332aaa36a97SAlex Deucher 333d2486d25SAlex Deucher if (hpd >= adev->mode_info.num_hpd) 334aaa36a97SAlex Deucher return; 335aaa36a97SAlex Deucher 336d2486d25SAlex Deucher tmp = RREG32(mmDC_HPD_INT_CONTROL + hpd_offsets[hpd]); 337aaa36a97SAlex Deucher if (connected) 338aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DC_HPD_INT_CONTROL, DC_HPD_INT_POLARITY, 0); 339aaa36a97SAlex Deucher else 340aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DC_HPD_INT_CONTROL, DC_HPD_INT_POLARITY, 1); 341d2486d25SAlex Deucher WREG32(mmDC_HPD_INT_CONTROL + hpd_offsets[hpd], tmp); 342aaa36a97SAlex Deucher } 343aaa36a97SAlex Deucher 344aaa36a97SAlex Deucher /** 345aaa36a97SAlex Deucher * dce_v11_0_hpd_init - hpd setup callback. 346aaa36a97SAlex Deucher * 347aaa36a97SAlex Deucher * @adev: amdgpu_device pointer 348aaa36a97SAlex Deucher * 349aaa36a97SAlex Deucher * Setup the hpd pins used by the card (evergreen+). 350aaa36a97SAlex Deucher * Enable the pin, set the polarity, and enable the hpd interrupts. 351aaa36a97SAlex Deucher */ 352aaa36a97SAlex Deucher static void dce_v11_0_hpd_init(struct amdgpu_device *adev) 353aaa36a97SAlex Deucher { 3544a580877SLuben Tuikov struct drm_device *dev = adev_to_drm(adev); 355aaa36a97SAlex Deucher struct drm_connector *connector; 356f8d2d39eSLyude Paul struct drm_connector_list_iter iter; 357aaa36a97SAlex Deucher u32 tmp; 358aaa36a97SAlex Deucher 359f8d2d39eSLyude Paul drm_connector_list_iter_begin(dev, &iter); 360f8d2d39eSLyude Paul drm_for_each_connector_iter(connector, &iter) { 361aaa36a97SAlex Deucher struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 362aaa36a97SAlex Deucher 363d2486d25SAlex Deucher if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) 364aaa36a97SAlex Deucher continue; 365aaa36a97SAlex Deucher 3663a9d993eSAlex Deucher if (connector->connector_type == DRM_MODE_CONNECTOR_eDP || 3673a9d993eSAlex Deucher connector->connector_type == DRM_MODE_CONNECTOR_LVDS) { 3683a9d993eSAlex Deucher /* don't try to enable hpd on eDP or LVDS avoid breaking the 3693a9d993eSAlex Deucher * aux dp channel on imac and help (but not completely fix) 3703a9d993eSAlex Deucher * https://bugzilla.redhat.com/show_bug.cgi?id=726143 3713a9d993eSAlex Deucher * also avoid interrupt storms during dpms. 3723a9d993eSAlex Deucher */ 373d2486d25SAlex Deucher tmp = RREG32(mmDC_HPD_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); 3743a9d993eSAlex Deucher tmp = REG_SET_FIELD(tmp, DC_HPD_INT_CONTROL, DC_HPD_INT_EN, 0); 375d2486d25SAlex Deucher WREG32(mmDC_HPD_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); 3763a9d993eSAlex Deucher continue; 3773a9d993eSAlex Deucher } 3783a9d993eSAlex Deucher 379d2486d25SAlex Deucher tmp = RREG32(mmDC_HPD_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); 380aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DC_HPD_CONTROL, DC_HPD_EN, 1); 381d2486d25SAlex Deucher WREG32(mmDC_HPD_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); 382aaa36a97SAlex Deucher 383d2486d25SAlex Deucher tmp = RREG32(mmDC_HPD_TOGGLE_FILT_CNTL + hpd_offsets[amdgpu_connector->hpd.hpd]); 384aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DC_HPD_TOGGLE_FILT_CNTL, 385aaa36a97SAlex Deucher DC_HPD_CONNECT_INT_DELAY, 386aaa36a97SAlex Deucher AMDGPU_HPD_CONNECT_INT_DELAY_IN_MS); 387aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DC_HPD_TOGGLE_FILT_CNTL, 388aaa36a97SAlex Deucher DC_HPD_DISCONNECT_INT_DELAY, 389aaa36a97SAlex Deucher AMDGPU_HPD_DISCONNECT_INT_DELAY_IN_MS); 390d2486d25SAlex Deucher WREG32(mmDC_HPD_TOGGLE_FILT_CNTL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); 391aaa36a97SAlex Deucher 392aeaf3e6cSQiang Ma dce_v11_0_hpd_int_ack(adev, amdgpu_connector->hpd.hpd); 393aaa36a97SAlex Deucher dce_v11_0_hpd_set_polarity(adev, amdgpu_connector->hpd.hpd); 394aaa36a97SAlex Deucher amdgpu_irq_get(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd); 395aaa36a97SAlex Deucher } 396f8d2d39eSLyude Paul drm_connector_list_iter_end(&iter); 397aaa36a97SAlex Deucher } 398aaa36a97SAlex Deucher 399aaa36a97SAlex Deucher /** 400aaa36a97SAlex Deucher * dce_v11_0_hpd_fini - hpd tear down callback. 401aaa36a97SAlex Deucher * 402aaa36a97SAlex Deucher * @adev: amdgpu_device pointer 403aaa36a97SAlex Deucher * 404aaa36a97SAlex Deucher * Tear down the hpd pins used by the card (evergreen+). 405aaa36a97SAlex Deucher * Disable the hpd interrupts. 406aaa36a97SAlex Deucher */ 407aaa36a97SAlex Deucher static void dce_v11_0_hpd_fini(struct amdgpu_device *adev) 408aaa36a97SAlex Deucher { 4094a580877SLuben Tuikov struct drm_device *dev = adev_to_drm(adev); 410aaa36a97SAlex Deucher struct drm_connector *connector; 411f8d2d39eSLyude Paul struct drm_connector_list_iter iter; 412aaa36a97SAlex Deucher u32 tmp; 413aaa36a97SAlex Deucher 414f8d2d39eSLyude Paul drm_connector_list_iter_begin(dev, &iter); 415f8d2d39eSLyude Paul drm_for_each_connector_iter(connector, &iter) { 416aaa36a97SAlex Deucher struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 417aaa36a97SAlex Deucher 418d2486d25SAlex Deucher if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) 419aaa36a97SAlex Deucher continue; 420aaa36a97SAlex Deucher 421d2486d25SAlex Deucher tmp = RREG32(mmDC_HPD_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); 422aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DC_HPD_CONTROL, DC_HPD_EN, 0); 423d2486d25SAlex Deucher WREG32(mmDC_HPD_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); 424aaa36a97SAlex Deucher 425aaa36a97SAlex Deucher amdgpu_irq_put(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd); 426aaa36a97SAlex Deucher } 427f8d2d39eSLyude Paul drm_connector_list_iter_end(&iter); 428aaa36a97SAlex Deucher } 429aaa36a97SAlex Deucher 430aaa36a97SAlex Deucher static u32 dce_v11_0_hpd_get_gpio_reg(struct amdgpu_device *adev) 431aaa36a97SAlex Deucher { 432aaa36a97SAlex Deucher return mmDC_GPIO_HPD_A; 433aaa36a97SAlex Deucher } 434aaa36a97SAlex Deucher 435aaa36a97SAlex Deucher static bool dce_v11_0_is_display_hung(struct amdgpu_device *adev) 436aaa36a97SAlex Deucher { 437aaa36a97SAlex Deucher u32 crtc_hung = 0; 438aaa36a97SAlex Deucher u32 crtc_status[6]; 439aaa36a97SAlex Deucher u32 i, j, tmp; 440aaa36a97SAlex Deucher 441aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.num_crtc; i++) { 442aaa36a97SAlex Deucher tmp = RREG32(mmCRTC_CONTROL + crtc_offsets[i]); 443aaa36a97SAlex Deucher if (REG_GET_FIELD(tmp, CRTC_CONTROL, CRTC_MASTER_EN)) { 444aaa36a97SAlex Deucher crtc_status[i] = RREG32(mmCRTC_STATUS_HV_COUNT + crtc_offsets[i]); 445aaa36a97SAlex Deucher crtc_hung |= (1 << i); 446aaa36a97SAlex Deucher } 447aaa36a97SAlex Deucher } 448aaa36a97SAlex Deucher 449aaa36a97SAlex Deucher for (j = 0; j < 10; j++) { 450aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.num_crtc; i++) { 451aaa36a97SAlex Deucher if (crtc_hung & (1 << i)) { 452aaa36a97SAlex Deucher tmp = RREG32(mmCRTC_STATUS_HV_COUNT + crtc_offsets[i]); 453aaa36a97SAlex Deucher if (tmp != crtc_status[i]) 454aaa36a97SAlex Deucher crtc_hung &= ~(1 << i); 455aaa36a97SAlex Deucher } 456aaa36a97SAlex Deucher } 457aaa36a97SAlex Deucher if (crtc_hung == 0) 458aaa36a97SAlex Deucher return false; 459aaa36a97SAlex Deucher udelay(100); 460aaa36a97SAlex Deucher } 461aaa36a97SAlex Deucher 462aaa36a97SAlex Deucher return true; 463aaa36a97SAlex Deucher } 464aaa36a97SAlex Deucher 465aaa36a97SAlex Deucher static void dce_v11_0_set_vga_render_state(struct amdgpu_device *adev, 466aaa36a97SAlex Deucher bool render) 467aaa36a97SAlex Deucher { 468aaa36a97SAlex Deucher u32 tmp; 469aaa36a97SAlex Deucher 470aaa36a97SAlex Deucher /* Lockout access through VGA aperture*/ 471aaa36a97SAlex Deucher tmp = RREG32(mmVGA_HDP_CONTROL); 472aaa36a97SAlex Deucher if (render) 473aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, VGA_HDP_CONTROL, VGA_MEMORY_DISABLE, 0); 474aaa36a97SAlex Deucher else 475aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, VGA_HDP_CONTROL, VGA_MEMORY_DISABLE, 1); 476aaa36a97SAlex Deucher WREG32(mmVGA_HDP_CONTROL, tmp); 477aaa36a97SAlex Deucher 478aaa36a97SAlex Deucher /* disable VGA render */ 479aaa36a97SAlex Deucher tmp = RREG32(mmVGA_RENDER_CONTROL); 480aaa36a97SAlex Deucher if (render) 481aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, VGA_RENDER_CONTROL, VGA_VSTATUS_CNTL, 1); 482aaa36a97SAlex Deucher else 483aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, VGA_RENDER_CONTROL, VGA_VSTATUS_CNTL, 0); 484aaa36a97SAlex Deucher WREG32(mmVGA_RENDER_CONTROL, tmp); 485aaa36a97SAlex Deucher } 486aaa36a97SAlex Deucher 48783c9b025SEmily Deng static int dce_v11_0_get_num_crtc (struct amdgpu_device *adev) 48883c9b025SEmily Deng { 48983c9b025SEmily Deng int num_crtc = 0; 49083c9b025SEmily Deng 49183c9b025SEmily Deng switch (adev->asic_type) { 49283c9b025SEmily Deng case CHIP_CARRIZO: 49383c9b025SEmily Deng num_crtc = 3; 49483c9b025SEmily Deng break; 49583c9b025SEmily Deng case CHIP_STONEY: 49683c9b025SEmily Deng num_crtc = 2; 49783c9b025SEmily Deng break; 49883c9b025SEmily Deng case CHIP_POLARIS10: 499221adb21SAlex Deucher case CHIP_VEGAM: 50083c9b025SEmily Deng num_crtc = 6; 50183c9b025SEmily Deng break; 50283c9b025SEmily Deng case CHIP_POLARIS11: 503c4642a47SJunwei Zhang case CHIP_POLARIS12: 50483c9b025SEmily Deng num_crtc = 5; 50583c9b025SEmily Deng break; 50683c9b025SEmily Deng default: 50783c9b025SEmily Deng num_crtc = 0; 50883c9b025SEmily Deng } 50983c9b025SEmily Deng return num_crtc; 51083c9b025SEmily Deng } 51183c9b025SEmily Deng 51283c9b025SEmily Deng void dce_v11_0_disable_dce(struct amdgpu_device *adev) 51383c9b025SEmily Deng { 51483c9b025SEmily Deng /*Disable VGA render and enabled crtc, if has DCE engine*/ 51583c9b025SEmily Deng if (amdgpu_atombios_has_dce_engine_info(adev)) { 51683c9b025SEmily Deng u32 tmp; 51783c9b025SEmily Deng int crtc_enabled, i; 51883c9b025SEmily Deng 51983c9b025SEmily Deng dce_v11_0_set_vga_render_state(adev, false); 52083c9b025SEmily Deng 52183c9b025SEmily Deng /*Disable crtc*/ 52283c9b025SEmily Deng for (i = 0; i < dce_v11_0_get_num_crtc(adev); i++) { 52383c9b025SEmily Deng crtc_enabled = REG_GET_FIELD(RREG32(mmCRTC_CONTROL + crtc_offsets[i]), 52483c9b025SEmily Deng CRTC_CONTROL, CRTC_MASTER_EN); 52583c9b025SEmily Deng if (crtc_enabled) { 52683c9b025SEmily Deng WREG32(mmCRTC_UPDATE_LOCK + crtc_offsets[i], 1); 52783c9b025SEmily Deng tmp = RREG32(mmCRTC_CONTROL + crtc_offsets[i]); 52883c9b025SEmily Deng tmp = REG_SET_FIELD(tmp, CRTC_CONTROL, CRTC_MASTER_EN, 0); 52983c9b025SEmily Deng WREG32(mmCRTC_CONTROL + crtc_offsets[i], tmp); 53083c9b025SEmily Deng WREG32(mmCRTC_UPDATE_LOCK + crtc_offsets[i], 0); 53183c9b025SEmily Deng } 53283c9b025SEmily Deng } 53383c9b025SEmily Deng } 53483c9b025SEmily Deng } 53583c9b025SEmily Deng 536aaa36a97SAlex Deucher static void dce_v11_0_program_fmt(struct drm_encoder *encoder) 537aaa36a97SAlex Deucher { 538aaa36a97SAlex Deucher struct drm_device *dev = encoder->dev; 5391348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 540aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 541aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 542aaa36a97SAlex Deucher struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 543aaa36a97SAlex Deucher int bpc = 0; 544aaa36a97SAlex Deucher u32 tmp = 0; 545aaa36a97SAlex Deucher enum amdgpu_connector_dither dither = AMDGPU_FMT_DITHER_DISABLE; 546aaa36a97SAlex Deucher 547aaa36a97SAlex Deucher if (connector) { 548aaa36a97SAlex Deucher struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 549aaa36a97SAlex Deucher bpc = amdgpu_connector_get_monitor_bpc(connector); 550aaa36a97SAlex Deucher dither = amdgpu_connector->dither; 551aaa36a97SAlex Deucher } 552aaa36a97SAlex Deucher 553aaa36a97SAlex Deucher /* LVDS/eDP FMT is set up by atom */ 554aaa36a97SAlex Deucher if (amdgpu_encoder->devices & ATOM_DEVICE_LCD_SUPPORT) 555aaa36a97SAlex Deucher return; 556aaa36a97SAlex Deucher 557aaa36a97SAlex Deucher /* not needed for analog */ 558aaa36a97SAlex Deucher if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) || 559aaa36a97SAlex Deucher (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2)) 560aaa36a97SAlex Deucher return; 561aaa36a97SAlex Deucher 562aaa36a97SAlex Deucher if (bpc == 0) 563aaa36a97SAlex Deucher return; 564aaa36a97SAlex Deucher 565aaa36a97SAlex Deucher switch (bpc) { 566aaa36a97SAlex Deucher case 6: 567aaa36a97SAlex Deucher if (dither == AMDGPU_FMT_DITHER_ENABLE) { 568aaa36a97SAlex Deucher /* XXX sort out optimal dither settings */ 569aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_FRAME_RANDOM_ENABLE, 1); 570aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_HIGHPASS_RANDOM_ENABLE, 1); 571aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_SPATIAL_DITHER_EN, 1); 572aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_SPATIAL_DITHER_DEPTH, 0); 573aaa36a97SAlex Deucher } else { 574aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_TRUNCATE_EN, 1); 575aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_TRUNCATE_DEPTH, 0); 576aaa36a97SAlex Deucher } 577aaa36a97SAlex Deucher break; 578aaa36a97SAlex Deucher case 8: 579aaa36a97SAlex Deucher if (dither == AMDGPU_FMT_DITHER_ENABLE) { 580aaa36a97SAlex Deucher /* XXX sort out optimal dither settings */ 581aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_FRAME_RANDOM_ENABLE, 1); 582aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_HIGHPASS_RANDOM_ENABLE, 1); 583aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_RGB_RANDOM_ENABLE, 1); 584aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_SPATIAL_DITHER_EN, 1); 585aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_SPATIAL_DITHER_DEPTH, 1); 586aaa36a97SAlex Deucher } else { 587aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_TRUNCATE_EN, 1); 588aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_TRUNCATE_DEPTH, 1); 589aaa36a97SAlex Deucher } 590aaa36a97SAlex Deucher break; 591aaa36a97SAlex Deucher case 10: 592aaa36a97SAlex Deucher if (dither == AMDGPU_FMT_DITHER_ENABLE) { 593aaa36a97SAlex Deucher /* XXX sort out optimal dither settings */ 594aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_FRAME_RANDOM_ENABLE, 1); 595aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_HIGHPASS_RANDOM_ENABLE, 1); 596aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_RGB_RANDOM_ENABLE, 1); 597aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_SPATIAL_DITHER_EN, 1); 598aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_SPATIAL_DITHER_DEPTH, 2); 599aaa36a97SAlex Deucher } else { 600aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_TRUNCATE_EN, 1); 601aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, FMT_BIT_DEPTH_CONTROL, FMT_TRUNCATE_DEPTH, 2); 602aaa36a97SAlex Deucher } 603aaa36a97SAlex Deucher break; 604aaa36a97SAlex Deucher default: 605aaa36a97SAlex Deucher /* not needed */ 606aaa36a97SAlex Deucher break; 607aaa36a97SAlex Deucher } 608aaa36a97SAlex Deucher 609aaa36a97SAlex Deucher WREG32(mmFMT_BIT_DEPTH_CONTROL + amdgpu_crtc->crtc_offset, tmp); 610aaa36a97SAlex Deucher } 611aaa36a97SAlex Deucher 612aaa36a97SAlex Deucher 613aaa36a97SAlex Deucher /* display watermark setup */ 614aaa36a97SAlex Deucher /** 615aaa36a97SAlex Deucher * dce_v11_0_line_buffer_adjust - Set up the line buffer 616aaa36a97SAlex Deucher * 617aaa36a97SAlex Deucher * @adev: amdgpu_device pointer 618aaa36a97SAlex Deucher * @amdgpu_crtc: the selected display controller 619aaa36a97SAlex Deucher * @mode: the current display mode on the selected display 620aaa36a97SAlex Deucher * controller 621aaa36a97SAlex Deucher * 622aaa36a97SAlex Deucher * Setup up the line buffer allocation for 623aaa36a97SAlex Deucher * the selected display controller (CIK). 624aaa36a97SAlex Deucher * Returns the line buffer size in pixels. 625aaa36a97SAlex Deucher */ 626aaa36a97SAlex Deucher static u32 dce_v11_0_line_buffer_adjust(struct amdgpu_device *adev, 627aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc, 628aaa36a97SAlex Deucher struct drm_display_mode *mode) 629aaa36a97SAlex Deucher { 630aaa36a97SAlex Deucher u32 tmp, buffer_alloc, i, mem_cfg; 631aaa36a97SAlex Deucher u32 pipe_offset = amdgpu_crtc->crtc_id; 632aaa36a97SAlex Deucher /* 633aaa36a97SAlex Deucher * Line Buffer Setup 634aaa36a97SAlex Deucher * There are 6 line buffers, one for each display controllers. 635aaa36a97SAlex Deucher * There are 3 partitions per LB. Select the number of partitions 636aaa36a97SAlex Deucher * to enable based on the display width. For display widths larger 637aaa36a97SAlex Deucher * than 4096, you need use to use 2 display controllers and combine 638aaa36a97SAlex Deucher * them using the stereo blender. 639aaa36a97SAlex Deucher */ 640aaa36a97SAlex Deucher if (amdgpu_crtc->base.enabled && mode) { 641aaa36a97SAlex Deucher if (mode->crtc_hdisplay < 1920) { 642aaa36a97SAlex Deucher mem_cfg = 1; 643aaa36a97SAlex Deucher buffer_alloc = 2; 644aaa36a97SAlex Deucher } else if (mode->crtc_hdisplay < 2560) { 645aaa36a97SAlex Deucher mem_cfg = 2; 646aaa36a97SAlex Deucher buffer_alloc = 2; 647aaa36a97SAlex Deucher } else if (mode->crtc_hdisplay < 4096) { 648aaa36a97SAlex Deucher mem_cfg = 0; 6492f7d10b3SJammy Zhou buffer_alloc = (adev->flags & AMD_IS_APU) ? 2 : 4; 650aaa36a97SAlex Deucher } else { 651aaa36a97SAlex Deucher DRM_DEBUG_KMS("Mode too big for LB!\n"); 652aaa36a97SAlex Deucher mem_cfg = 0; 6532f7d10b3SJammy Zhou buffer_alloc = (adev->flags & AMD_IS_APU) ? 2 : 4; 654aaa36a97SAlex Deucher } 655aaa36a97SAlex Deucher } else { 656aaa36a97SAlex Deucher mem_cfg = 1; 657aaa36a97SAlex Deucher buffer_alloc = 0; 658aaa36a97SAlex Deucher } 659aaa36a97SAlex Deucher 660aaa36a97SAlex Deucher tmp = RREG32(mmLB_MEMORY_CTRL + amdgpu_crtc->crtc_offset); 661aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, LB_MEMORY_CTRL, LB_MEMORY_CONFIG, mem_cfg); 662aaa36a97SAlex Deucher WREG32(mmLB_MEMORY_CTRL + amdgpu_crtc->crtc_offset, tmp); 663aaa36a97SAlex Deucher 664aaa36a97SAlex Deucher tmp = RREG32(mmPIPE0_DMIF_BUFFER_CONTROL + pipe_offset); 665aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, PIPE0_DMIF_BUFFER_CONTROL, DMIF_BUFFERS_ALLOCATED, buffer_alloc); 666aaa36a97SAlex Deucher WREG32(mmPIPE0_DMIF_BUFFER_CONTROL + pipe_offset, tmp); 667aaa36a97SAlex Deucher 668aaa36a97SAlex Deucher for (i = 0; i < adev->usec_timeout; i++) { 669aaa36a97SAlex Deucher tmp = RREG32(mmPIPE0_DMIF_BUFFER_CONTROL + pipe_offset); 670aaa36a97SAlex Deucher if (REG_GET_FIELD(tmp, PIPE0_DMIF_BUFFER_CONTROL, DMIF_BUFFERS_ALLOCATION_COMPLETED)) 671aaa36a97SAlex Deucher break; 672aaa36a97SAlex Deucher udelay(1); 673aaa36a97SAlex Deucher } 674aaa36a97SAlex Deucher 675aaa36a97SAlex Deucher if (amdgpu_crtc->base.enabled && mode) { 676aaa36a97SAlex Deucher switch (mem_cfg) { 677aaa36a97SAlex Deucher case 0: 678aaa36a97SAlex Deucher default: 679aaa36a97SAlex Deucher return 4096 * 2; 680aaa36a97SAlex Deucher case 1: 681aaa36a97SAlex Deucher return 1920 * 2; 682aaa36a97SAlex Deucher case 2: 683aaa36a97SAlex Deucher return 2560 * 2; 684aaa36a97SAlex Deucher } 685aaa36a97SAlex Deucher } 686aaa36a97SAlex Deucher 687aaa36a97SAlex Deucher /* controller not enabled, so no lb used */ 688aaa36a97SAlex Deucher return 0; 689aaa36a97SAlex Deucher } 690aaa36a97SAlex Deucher 691aaa36a97SAlex Deucher /** 692aaa36a97SAlex Deucher * cik_get_number_of_dram_channels - get the number of dram channels 693aaa36a97SAlex Deucher * 694aaa36a97SAlex Deucher * @adev: amdgpu_device pointer 695aaa36a97SAlex Deucher * 696aaa36a97SAlex Deucher * Look up the number of video ram channels (CIK). 697aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 698aaa36a97SAlex Deucher * Returns the number of dram channels 699aaa36a97SAlex Deucher */ 700aaa36a97SAlex Deucher static u32 cik_get_number_of_dram_channels(struct amdgpu_device *adev) 701aaa36a97SAlex Deucher { 702aaa36a97SAlex Deucher u32 tmp = RREG32(mmMC_SHARED_CHMAP); 703aaa36a97SAlex Deucher 704aaa36a97SAlex Deucher switch (REG_GET_FIELD(tmp, MC_SHARED_CHMAP, NOOFCHAN)) { 705aaa36a97SAlex Deucher case 0: 706aaa36a97SAlex Deucher default: 707aaa36a97SAlex Deucher return 1; 708aaa36a97SAlex Deucher case 1: 709aaa36a97SAlex Deucher return 2; 710aaa36a97SAlex Deucher case 2: 711aaa36a97SAlex Deucher return 4; 712aaa36a97SAlex Deucher case 3: 713aaa36a97SAlex Deucher return 8; 714aaa36a97SAlex Deucher case 4: 715aaa36a97SAlex Deucher return 3; 716aaa36a97SAlex Deucher case 5: 717aaa36a97SAlex Deucher return 6; 718aaa36a97SAlex Deucher case 6: 719aaa36a97SAlex Deucher return 10; 720aaa36a97SAlex Deucher case 7: 721aaa36a97SAlex Deucher return 12; 722aaa36a97SAlex Deucher case 8: 723aaa36a97SAlex Deucher return 16; 724aaa36a97SAlex Deucher } 725aaa36a97SAlex Deucher } 726aaa36a97SAlex Deucher 727aaa36a97SAlex Deucher struct dce10_wm_params { 728aaa36a97SAlex Deucher u32 dram_channels; /* number of dram channels */ 729aaa36a97SAlex Deucher u32 yclk; /* bandwidth per dram data pin in kHz */ 730aaa36a97SAlex Deucher u32 sclk; /* engine clock in kHz */ 731aaa36a97SAlex Deucher u32 disp_clk; /* display clock in kHz */ 732aaa36a97SAlex Deucher u32 src_width; /* viewport width */ 733aaa36a97SAlex Deucher u32 active_time; /* active display time in ns */ 734aaa36a97SAlex Deucher u32 blank_time; /* blank time in ns */ 735aaa36a97SAlex Deucher bool interlaced; /* mode is interlaced */ 736aaa36a97SAlex Deucher fixed20_12 vsc; /* vertical scale ratio */ 737aaa36a97SAlex Deucher u32 num_heads; /* number of active crtcs */ 738aaa36a97SAlex Deucher u32 bytes_per_pixel; /* bytes per pixel display + overlay */ 739aaa36a97SAlex Deucher u32 lb_size; /* line buffer allocated to pipe */ 740aaa36a97SAlex Deucher u32 vtaps; /* vertical scaler taps */ 741aaa36a97SAlex Deucher }; 742aaa36a97SAlex Deucher 743aaa36a97SAlex Deucher /** 744aaa36a97SAlex Deucher * dce_v11_0_dram_bandwidth - get the dram bandwidth 745aaa36a97SAlex Deucher * 746aaa36a97SAlex Deucher * @wm: watermark calculation data 747aaa36a97SAlex Deucher * 748aaa36a97SAlex Deucher * Calculate the raw dram bandwidth (CIK). 749aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 750aaa36a97SAlex Deucher * Returns the dram bandwidth in MBytes/s 751aaa36a97SAlex Deucher */ 752aaa36a97SAlex Deucher static u32 dce_v11_0_dram_bandwidth(struct dce10_wm_params *wm) 753aaa36a97SAlex Deucher { 754aaa36a97SAlex Deucher /* Calculate raw DRAM Bandwidth */ 755aaa36a97SAlex Deucher fixed20_12 dram_efficiency; /* 0.7 */ 756aaa36a97SAlex Deucher fixed20_12 yclk, dram_channels, bandwidth; 757aaa36a97SAlex Deucher fixed20_12 a; 758aaa36a97SAlex Deucher 759aaa36a97SAlex Deucher a.full = dfixed_const(1000); 760aaa36a97SAlex Deucher yclk.full = dfixed_const(wm->yclk); 761aaa36a97SAlex Deucher yclk.full = dfixed_div(yclk, a); 762aaa36a97SAlex Deucher dram_channels.full = dfixed_const(wm->dram_channels * 4); 763aaa36a97SAlex Deucher a.full = dfixed_const(10); 764aaa36a97SAlex Deucher dram_efficiency.full = dfixed_const(7); 765aaa36a97SAlex Deucher dram_efficiency.full = dfixed_div(dram_efficiency, a); 766aaa36a97SAlex Deucher bandwidth.full = dfixed_mul(dram_channels, yclk); 767aaa36a97SAlex Deucher bandwidth.full = dfixed_mul(bandwidth, dram_efficiency); 768aaa36a97SAlex Deucher 769aaa36a97SAlex Deucher return dfixed_trunc(bandwidth); 770aaa36a97SAlex Deucher } 771aaa36a97SAlex Deucher 772aaa36a97SAlex Deucher /** 773aaa36a97SAlex Deucher * dce_v11_0_dram_bandwidth_for_display - get the dram bandwidth for display 774aaa36a97SAlex Deucher * 775aaa36a97SAlex Deucher * @wm: watermark calculation data 776aaa36a97SAlex Deucher * 777aaa36a97SAlex Deucher * Calculate the dram bandwidth used for display (CIK). 778aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 779aaa36a97SAlex Deucher * Returns the dram bandwidth for display in MBytes/s 780aaa36a97SAlex Deucher */ 781aaa36a97SAlex Deucher static u32 dce_v11_0_dram_bandwidth_for_display(struct dce10_wm_params *wm) 782aaa36a97SAlex Deucher { 783aaa36a97SAlex Deucher /* Calculate DRAM Bandwidth and the part allocated to display. */ 784aaa36a97SAlex Deucher fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */ 785aaa36a97SAlex Deucher fixed20_12 yclk, dram_channels, bandwidth; 786aaa36a97SAlex Deucher fixed20_12 a; 787aaa36a97SAlex Deucher 788aaa36a97SAlex Deucher a.full = dfixed_const(1000); 789aaa36a97SAlex Deucher yclk.full = dfixed_const(wm->yclk); 790aaa36a97SAlex Deucher yclk.full = dfixed_div(yclk, a); 791aaa36a97SAlex Deucher dram_channels.full = dfixed_const(wm->dram_channels * 4); 792aaa36a97SAlex Deucher a.full = dfixed_const(10); 793aaa36a97SAlex Deucher disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */ 794aaa36a97SAlex Deucher disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a); 795aaa36a97SAlex Deucher bandwidth.full = dfixed_mul(dram_channels, yclk); 796aaa36a97SAlex Deucher bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation); 797aaa36a97SAlex Deucher 798aaa36a97SAlex Deucher return dfixed_trunc(bandwidth); 799aaa36a97SAlex Deucher } 800aaa36a97SAlex Deucher 801aaa36a97SAlex Deucher /** 802aaa36a97SAlex Deucher * dce_v11_0_data_return_bandwidth - get the data return bandwidth 803aaa36a97SAlex Deucher * 804aaa36a97SAlex Deucher * @wm: watermark calculation data 805aaa36a97SAlex Deucher * 806aaa36a97SAlex Deucher * Calculate the data return bandwidth used for display (CIK). 807aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 808aaa36a97SAlex Deucher * Returns the data return bandwidth in MBytes/s 809aaa36a97SAlex Deucher */ 810aaa36a97SAlex Deucher static u32 dce_v11_0_data_return_bandwidth(struct dce10_wm_params *wm) 811aaa36a97SAlex Deucher { 812aaa36a97SAlex Deucher /* Calculate the display Data return Bandwidth */ 813aaa36a97SAlex Deucher fixed20_12 return_efficiency; /* 0.8 */ 814aaa36a97SAlex Deucher fixed20_12 sclk, bandwidth; 815aaa36a97SAlex Deucher fixed20_12 a; 816aaa36a97SAlex Deucher 817aaa36a97SAlex Deucher a.full = dfixed_const(1000); 818aaa36a97SAlex Deucher sclk.full = dfixed_const(wm->sclk); 819aaa36a97SAlex Deucher sclk.full = dfixed_div(sclk, a); 820aaa36a97SAlex Deucher a.full = dfixed_const(10); 821aaa36a97SAlex Deucher return_efficiency.full = dfixed_const(8); 822aaa36a97SAlex Deucher return_efficiency.full = dfixed_div(return_efficiency, a); 823aaa36a97SAlex Deucher a.full = dfixed_const(32); 824aaa36a97SAlex Deucher bandwidth.full = dfixed_mul(a, sclk); 825aaa36a97SAlex Deucher bandwidth.full = dfixed_mul(bandwidth, return_efficiency); 826aaa36a97SAlex Deucher 827aaa36a97SAlex Deucher return dfixed_trunc(bandwidth); 828aaa36a97SAlex Deucher } 829aaa36a97SAlex Deucher 830aaa36a97SAlex Deucher /** 831aaa36a97SAlex Deucher * dce_v11_0_dmif_request_bandwidth - get the dmif bandwidth 832aaa36a97SAlex Deucher * 833aaa36a97SAlex Deucher * @wm: watermark calculation data 834aaa36a97SAlex Deucher * 835aaa36a97SAlex Deucher * Calculate the dmif bandwidth used for display (CIK). 836aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 837aaa36a97SAlex Deucher * Returns the dmif bandwidth in MBytes/s 838aaa36a97SAlex Deucher */ 839aaa36a97SAlex Deucher static u32 dce_v11_0_dmif_request_bandwidth(struct dce10_wm_params *wm) 840aaa36a97SAlex Deucher { 841aaa36a97SAlex Deucher /* Calculate the DMIF Request Bandwidth */ 842aaa36a97SAlex Deucher fixed20_12 disp_clk_request_efficiency; /* 0.8 */ 843aaa36a97SAlex Deucher fixed20_12 disp_clk, bandwidth; 844aaa36a97SAlex Deucher fixed20_12 a, b; 845aaa36a97SAlex Deucher 846aaa36a97SAlex Deucher a.full = dfixed_const(1000); 847aaa36a97SAlex Deucher disp_clk.full = dfixed_const(wm->disp_clk); 848aaa36a97SAlex Deucher disp_clk.full = dfixed_div(disp_clk, a); 849aaa36a97SAlex Deucher a.full = dfixed_const(32); 850aaa36a97SAlex Deucher b.full = dfixed_mul(a, disp_clk); 851aaa36a97SAlex Deucher 852aaa36a97SAlex Deucher a.full = dfixed_const(10); 853aaa36a97SAlex Deucher disp_clk_request_efficiency.full = dfixed_const(8); 854aaa36a97SAlex Deucher disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a); 855aaa36a97SAlex Deucher 856aaa36a97SAlex Deucher bandwidth.full = dfixed_mul(b, disp_clk_request_efficiency); 857aaa36a97SAlex Deucher 858aaa36a97SAlex Deucher return dfixed_trunc(bandwidth); 859aaa36a97SAlex Deucher } 860aaa36a97SAlex Deucher 861aaa36a97SAlex Deucher /** 862aaa36a97SAlex Deucher * dce_v11_0_available_bandwidth - get the min available bandwidth 863aaa36a97SAlex Deucher * 864aaa36a97SAlex Deucher * @wm: watermark calculation data 865aaa36a97SAlex Deucher * 866aaa36a97SAlex Deucher * Calculate the min available bandwidth used for display (CIK). 867aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 868aaa36a97SAlex Deucher * Returns the min available bandwidth in MBytes/s 869aaa36a97SAlex Deucher */ 870aaa36a97SAlex Deucher static u32 dce_v11_0_available_bandwidth(struct dce10_wm_params *wm) 871aaa36a97SAlex Deucher { 872aaa36a97SAlex Deucher /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */ 873aaa36a97SAlex Deucher u32 dram_bandwidth = dce_v11_0_dram_bandwidth(wm); 874aaa36a97SAlex Deucher u32 data_return_bandwidth = dce_v11_0_data_return_bandwidth(wm); 875aaa36a97SAlex Deucher u32 dmif_req_bandwidth = dce_v11_0_dmif_request_bandwidth(wm); 876aaa36a97SAlex Deucher 877aaa36a97SAlex Deucher return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth)); 878aaa36a97SAlex Deucher } 879aaa36a97SAlex Deucher 880aaa36a97SAlex Deucher /** 881aaa36a97SAlex Deucher * dce_v11_0_average_bandwidth - get the average available bandwidth 882aaa36a97SAlex Deucher * 883aaa36a97SAlex Deucher * @wm: watermark calculation data 884aaa36a97SAlex Deucher * 885aaa36a97SAlex Deucher * Calculate the average available bandwidth used for display (CIK). 886aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 887aaa36a97SAlex Deucher * Returns the average available bandwidth in MBytes/s 888aaa36a97SAlex Deucher */ 889aaa36a97SAlex Deucher static u32 dce_v11_0_average_bandwidth(struct dce10_wm_params *wm) 890aaa36a97SAlex Deucher { 891aaa36a97SAlex Deucher /* Calculate the display mode Average Bandwidth 892aaa36a97SAlex Deucher * DisplayMode should contain the source and destination dimensions, 893aaa36a97SAlex Deucher * timing, etc. 894aaa36a97SAlex Deucher */ 895aaa36a97SAlex Deucher fixed20_12 bpp; 896aaa36a97SAlex Deucher fixed20_12 line_time; 897aaa36a97SAlex Deucher fixed20_12 src_width; 898aaa36a97SAlex Deucher fixed20_12 bandwidth; 899aaa36a97SAlex Deucher fixed20_12 a; 900aaa36a97SAlex Deucher 901aaa36a97SAlex Deucher a.full = dfixed_const(1000); 902aaa36a97SAlex Deucher line_time.full = dfixed_const(wm->active_time + wm->blank_time); 903aaa36a97SAlex Deucher line_time.full = dfixed_div(line_time, a); 904aaa36a97SAlex Deucher bpp.full = dfixed_const(wm->bytes_per_pixel); 905aaa36a97SAlex Deucher src_width.full = dfixed_const(wm->src_width); 906aaa36a97SAlex Deucher bandwidth.full = dfixed_mul(src_width, bpp); 907aaa36a97SAlex Deucher bandwidth.full = dfixed_mul(bandwidth, wm->vsc); 908aaa36a97SAlex Deucher bandwidth.full = dfixed_div(bandwidth, line_time); 909aaa36a97SAlex Deucher 910aaa36a97SAlex Deucher return dfixed_trunc(bandwidth); 911aaa36a97SAlex Deucher } 912aaa36a97SAlex Deucher 913aaa36a97SAlex Deucher /** 914aaa36a97SAlex Deucher * dce_v11_0_latency_watermark - get the latency watermark 915aaa36a97SAlex Deucher * 916aaa36a97SAlex Deucher * @wm: watermark calculation data 917aaa36a97SAlex Deucher * 918aaa36a97SAlex Deucher * Calculate the latency watermark (CIK). 919aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 920aaa36a97SAlex Deucher * Returns the latency watermark in ns 921aaa36a97SAlex Deucher */ 922aaa36a97SAlex Deucher static u32 dce_v11_0_latency_watermark(struct dce10_wm_params *wm) 923aaa36a97SAlex Deucher { 924aaa36a97SAlex Deucher /* First calculate the latency in ns */ 925aaa36a97SAlex Deucher u32 mc_latency = 2000; /* 2000 ns. */ 926aaa36a97SAlex Deucher u32 available_bandwidth = dce_v11_0_available_bandwidth(wm); 927aaa36a97SAlex Deucher u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth; 928aaa36a97SAlex Deucher u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth; 929aaa36a97SAlex Deucher u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */ 930aaa36a97SAlex Deucher u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) + 931aaa36a97SAlex Deucher (wm->num_heads * cursor_line_pair_return_time); 932aaa36a97SAlex Deucher u32 latency = mc_latency + other_heads_data_return_time + dc_latency; 933aaa36a97SAlex Deucher u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time; 934aaa36a97SAlex Deucher u32 tmp, dmif_size = 12288; 935aaa36a97SAlex Deucher fixed20_12 a, b, c; 936aaa36a97SAlex Deucher 937aaa36a97SAlex Deucher if (wm->num_heads == 0) 938aaa36a97SAlex Deucher return 0; 939aaa36a97SAlex Deucher 940aaa36a97SAlex Deucher a.full = dfixed_const(2); 941aaa36a97SAlex Deucher b.full = dfixed_const(1); 942aaa36a97SAlex Deucher if ((wm->vsc.full > a.full) || 943aaa36a97SAlex Deucher ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) || 944aaa36a97SAlex Deucher (wm->vtaps >= 5) || 945aaa36a97SAlex Deucher ((wm->vsc.full >= a.full) && wm->interlaced)) 946aaa36a97SAlex Deucher max_src_lines_per_dst_line = 4; 947aaa36a97SAlex Deucher else 948aaa36a97SAlex Deucher max_src_lines_per_dst_line = 2; 949aaa36a97SAlex Deucher 950aaa36a97SAlex Deucher a.full = dfixed_const(available_bandwidth); 951aaa36a97SAlex Deucher b.full = dfixed_const(wm->num_heads); 952aaa36a97SAlex Deucher a.full = dfixed_div(a, b); 953e190ed1eSMario Kleiner tmp = div_u64((u64) dmif_size * (u64) wm->disp_clk, mc_latency + 512); 954e190ed1eSMario Kleiner tmp = min(dfixed_trunc(a), tmp); 955aaa36a97SAlex Deucher 956e190ed1eSMario Kleiner lb_fill_bw = min(tmp, wm->disp_clk * wm->bytes_per_pixel / 1000); 957aaa36a97SAlex Deucher 958aaa36a97SAlex Deucher a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel); 959aaa36a97SAlex Deucher b.full = dfixed_const(1000); 960aaa36a97SAlex Deucher c.full = dfixed_const(lb_fill_bw); 961aaa36a97SAlex Deucher b.full = dfixed_div(c, b); 962aaa36a97SAlex Deucher a.full = dfixed_div(a, b); 963aaa36a97SAlex Deucher line_fill_time = dfixed_trunc(a); 964aaa36a97SAlex Deucher 965aaa36a97SAlex Deucher if (line_fill_time < wm->active_time) 966aaa36a97SAlex Deucher return latency; 967aaa36a97SAlex Deucher else 968aaa36a97SAlex Deucher return latency + (line_fill_time - wm->active_time); 969aaa36a97SAlex Deucher 970aaa36a97SAlex Deucher } 971aaa36a97SAlex Deucher 972aaa36a97SAlex Deucher /** 973aaa36a97SAlex Deucher * dce_v11_0_average_bandwidth_vs_dram_bandwidth_for_display - check 974aaa36a97SAlex Deucher * average and available dram bandwidth 975aaa36a97SAlex Deucher * 976aaa36a97SAlex Deucher * @wm: watermark calculation data 977aaa36a97SAlex Deucher * 978aaa36a97SAlex Deucher * Check if the display average bandwidth fits in the display 979aaa36a97SAlex Deucher * dram bandwidth (CIK). 980aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 981aaa36a97SAlex Deucher * Returns true if the display fits, false if not. 982aaa36a97SAlex Deucher */ 983aaa36a97SAlex Deucher static bool dce_v11_0_average_bandwidth_vs_dram_bandwidth_for_display(struct dce10_wm_params *wm) 984aaa36a97SAlex Deucher { 985aaa36a97SAlex Deucher if (dce_v11_0_average_bandwidth(wm) <= 986aaa36a97SAlex Deucher (dce_v11_0_dram_bandwidth_for_display(wm) / wm->num_heads)) 987aaa36a97SAlex Deucher return true; 988aaa36a97SAlex Deucher else 989aaa36a97SAlex Deucher return false; 990aaa36a97SAlex Deucher } 991aaa36a97SAlex Deucher 992aaa36a97SAlex Deucher /** 993aaa36a97SAlex Deucher * dce_v11_0_average_bandwidth_vs_available_bandwidth - check 994aaa36a97SAlex Deucher * average and available bandwidth 995aaa36a97SAlex Deucher * 996aaa36a97SAlex Deucher * @wm: watermark calculation data 997aaa36a97SAlex Deucher * 998aaa36a97SAlex Deucher * Check if the display average bandwidth fits in the display 999aaa36a97SAlex Deucher * available bandwidth (CIK). 1000aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 1001aaa36a97SAlex Deucher * Returns true if the display fits, false if not. 1002aaa36a97SAlex Deucher */ 1003aaa36a97SAlex Deucher static bool dce_v11_0_average_bandwidth_vs_available_bandwidth(struct dce10_wm_params *wm) 1004aaa36a97SAlex Deucher { 1005aaa36a97SAlex Deucher if (dce_v11_0_average_bandwidth(wm) <= 1006aaa36a97SAlex Deucher (dce_v11_0_available_bandwidth(wm) / wm->num_heads)) 1007aaa36a97SAlex Deucher return true; 1008aaa36a97SAlex Deucher else 1009aaa36a97SAlex Deucher return false; 1010aaa36a97SAlex Deucher } 1011aaa36a97SAlex Deucher 1012aaa36a97SAlex Deucher /** 1013aaa36a97SAlex Deucher * dce_v11_0_check_latency_hiding - check latency hiding 1014aaa36a97SAlex Deucher * 1015aaa36a97SAlex Deucher * @wm: watermark calculation data 1016aaa36a97SAlex Deucher * 1017aaa36a97SAlex Deucher * Check latency hiding (CIK). 1018aaa36a97SAlex Deucher * Used for display watermark bandwidth calculations 1019aaa36a97SAlex Deucher * Returns true if the display fits, false if not. 1020aaa36a97SAlex Deucher */ 1021aaa36a97SAlex Deucher static bool dce_v11_0_check_latency_hiding(struct dce10_wm_params *wm) 1022aaa36a97SAlex Deucher { 1023aaa36a97SAlex Deucher u32 lb_partitions = wm->lb_size / wm->src_width; 1024aaa36a97SAlex Deucher u32 line_time = wm->active_time + wm->blank_time; 1025aaa36a97SAlex Deucher u32 latency_tolerant_lines; 1026aaa36a97SAlex Deucher u32 latency_hiding; 1027aaa36a97SAlex Deucher fixed20_12 a; 1028aaa36a97SAlex Deucher 1029aaa36a97SAlex Deucher a.full = dfixed_const(1); 1030aaa36a97SAlex Deucher if (wm->vsc.full > a.full) 1031aaa36a97SAlex Deucher latency_tolerant_lines = 1; 1032aaa36a97SAlex Deucher else { 1033aaa36a97SAlex Deucher if (lb_partitions <= (wm->vtaps + 1)) 1034aaa36a97SAlex Deucher latency_tolerant_lines = 1; 1035aaa36a97SAlex Deucher else 1036aaa36a97SAlex Deucher latency_tolerant_lines = 2; 1037aaa36a97SAlex Deucher } 1038aaa36a97SAlex Deucher 1039aaa36a97SAlex Deucher latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time); 1040aaa36a97SAlex Deucher 1041aaa36a97SAlex Deucher if (dce_v11_0_latency_watermark(wm) <= latency_hiding) 1042aaa36a97SAlex Deucher return true; 1043aaa36a97SAlex Deucher else 1044aaa36a97SAlex Deucher return false; 1045aaa36a97SAlex Deucher } 1046aaa36a97SAlex Deucher 1047aaa36a97SAlex Deucher /** 1048aaa36a97SAlex Deucher * dce_v11_0_program_watermarks - program display watermarks 1049aaa36a97SAlex Deucher * 1050aaa36a97SAlex Deucher * @adev: amdgpu_device pointer 1051aaa36a97SAlex Deucher * @amdgpu_crtc: the selected display controller 1052aaa36a97SAlex Deucher * @lb_size: line buffer size 1053aaa36a97SAlex Deucher * @num_heads: number of display controllers in use 1054aaa36a97SAlex Deucher * 1055aaa36a97SAlex Deucher * Calculate and program the display watermarks for the 1056aaa36a97SAlex Deucher * selected display controller (CIK). 1057aaa36a97SAlex Deucher */ 1058aaa36a97SAlex Deucher static void dce_v11_0_program_watermarks(struct amdgpu_device *adev, 1059aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc, 1060aaa36a97SAlex Deucher u32 lb_size, u32 num_heads) 1061aaa36a97SAlex Deucher { 1062aaa36a97SAlex Deucher struct drm_display_mode *mode = &amdgpu_crtc->base.mode; 1063aaa36a97SAlex Deucher struct dce10_wm_params wm_low, wm_high; 1064d63c277dSMario Kleiner u32 active_time; 1065aaa36a97SAlex Deucher u32 line_time = 0; 1066aaa36a97SAlex Deucher u32 latency_watermark_a = 0, latency_watermark_b = 0; 10678e36f9d3SAlex Deucher u32 tmp, wm_mask, lb_vblank_lead_lines = 0; 1068aaa36a97SAlex Deucher 1069aaa36a97SAlex Deucher if (amdgpu_crtc->base.enabled && num_heads && mode) { 1070bea10413SMario Kleiner active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000, 1071bea10413SMario Kleiner (u32)mode->clock); 1072bea10413SMario Kleiner line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000, 1073bea10413SMario Kleiner (u32)mode->clock); 1074eb3b214cSSrinivasan Shanmugam line_time = min_t(u32, line_time, 65535); 1075aaa36a97SAlex Deucher 1076aaa36a97SAlex Deucher /* watermark for high clocks */ 1077aaa36a97SAlex Deucher if (adev->pm.dpm_enabled) { 1078aaa36a97SAlex Deucher wm_high.yclk = 1079aaa36a97SAlex Deucher amdgpu_dpm_get_mclk(adev, false) * 10; 1080aaa36a97SAlex Deucher wm_high.sclk = 1081aaa36a97SAlex Deucher amdgpu_dpm_get_sclk(adev, false) * 10; 1082aaa36a97SAlex Deucher } else { 1083aaa36a97SAlex Deucher wm_high.yclk = adev->pm.current_mclk * 10; 1084aaa36a97SAlex Deucher wm_high.sclk = adev->pm.current_sclk * 10; 1085aaa36a97SAlex Deucher } 1086aaa36a97SAlex Deucher 1087aaa36a97SAlex Deucher wm_high.disp_clk = mode->clock; 1088aaa36a97SAlex Deucher wm_high.src_width = mode->crtc_hdisplay; 1089d63c277dSMario Kleiner wm_high.active_time = active_time; 1090aaa36a97SAlex Deucher wm_high.blank_time = line_time - wm_high.active_time; 1091aaa36a97SAlex Deucher wm_high.interlaced = false; 1092aaa36a97SAlex Deucher if (mode->flags & DRM_MODE_FLAG_INTERLACE) 1093aaa36a97SAlex Deucher wm_high.interlaced = true; 1094aaa36a97SAlex Deucher wm_high.vsc = amdgpu_crtc->vsc; 1095aaa36a97SAlex Deucher wm_high.vtaps = 1; 1096aaa36a97SAlex Deucher if (amdgpu_crtc->rmx_type != RMX_OFF) 1097aaa36a97SAlex Deucher wm_high.vtaps = 2; 1098aaa36a97SAlex Deucher wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */ 1099aaa36a97SAlex Deucher wm_high.lb_size = lb_size; 1100aaa36a97SAlex Deucher wm_high.dram_channels = cik_get_number_of_dram_channels(adev); 1101aaa36a97SAlex Deucher wm_high.num_heads = num_heads; 1102aaa36a97SAlex Deucher 1103aaa36a97SAlex Deucher /* set for high clocks */ 1104eb3b214cSSrinivasan Shanmugam latency_watermark_a = min_t(u32, dce_v11_0_latency_watermark(&wm_high), 65535); 1105aaa36a97SAlex Deucher 1106aaa36a97SAlex Deucher /* possibly force display priority to high */ 1107aaa36a97SAlex Deucher /* should really do this at mode validation time... */ 1108aaa36a97SAlex Deucher if (!dce_v11_0_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) || 1109aaa36a97SAlex Deucher !dce_v11_0_average_bandwidth_vs_available_bandwidth(&wm_high) || 1110aaa36a97SAlex Deucher !dce_v11_0_check_latency_hiding(&wm_high) || 1111aaa36a97SAlex Deucher (adev->mode_info.disp_priority == 2)) { 1112aaa36a97SAlex Deucher DRM_DEBUG_KMS("force priority to high\n"); 1113aaa36a97SAlex Deucher } 1114aaa36a97SAlex Deucher 1115aaa36a97SAlex Deucher /* watermark for low clocks */ 1116aaa36a97SAlex Deucher if (adev->pm.dpm_enabled) { 1117aaa36a97SAlex Deucher wm_low.yclk = 1118aaa36a97SAlex Deucher amdgpu_dpm_get_mclk(adev, true) * 10; 1119aaa36a97SAlex Deucher wm_low.sclk = 1120aaa36a97SAlex Deucher amdgpu_dpm_get_sclk(adev, true) * 10; 1121aaa36a97SAlex Deucher } else { 1122aaa36a97SAlex Deucher wm_low.yclk = adev->pm.current_mclk * 10; 1123aaa36a97SAlex Deucher wm_low.sclk = adev->pm.current_sclk * 10; 1124aaa36a97SAlex Deucher } 1125aaa36a97SAlex Deucher 1126aaa36a97SAlex Deucher wm_low.disp_clk = mode->clock; 1127aaa36a97SAlex Deucher wm_low.src_width = mode->crtc_hdisplay; 1128d63c277dSMario Kleiner wm_low.active_time = active_time; 1129aaa36a97SAlex Deucher wm_low.blank_time = line_time - wm_low.active_time; 1130aaa36a97SAlex Deucher wm_low.interlaced = false; 1131aaa36a97SAlex Deucher if (mode->flags & DRM_MODE_FLAG_INTERLACE) 1132aaa36a97SAlex Deucher wm_low.interlaced = true; 1133aaa36a97SAlex Deucher wm_low.vsc = amdgpu_crtc->vsc; 1134aaa36a97SAlex Deucher wm_low.vtaps = 1; 1135aaa36a97SAlex Deucher if (amdgpu_crtc->rmx_type != RMX_OFF) 1136aaa36a97SAlex Deucher wm_low.vtaps = 2; 1137aaa36a97SAlex Deucher wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */ 1138aaa36a97SAlex Deucher wm_low.lb_size = lb_size; 1139aaa36a97SAlex Deucher wm_low.dram_channels = cik_get_number_of_dram_channels(adev); 1140aaa36a97SAlex Deucher wm_low.num_heads = num_heads; 1141aaa36a97SAlex Deucher 1142aaa36a97SAlex Deucher /* set for low clocks */ 1143eb3b214cSSrinivasan Shanmugam latency_watermark_b = min_t(u32, dce_v11_0_latency_watermark(&wm_low), 65535); 1144aaa36a97SAlex Deucher 1145aaa36a97SAlex Deucher /* possibly force display priority to high */ 1146aaa36a97SAlex Deucher /* should really do this at mode validation time... */ 1147aaa36a97SAlex Deucher if (!dce_v11_0_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) || 1148aaa36a97SAlex Deucher !dce_v11_0_average_bandwidth_vs_available_bandwidth(&wm_low) || 1149aaa36a97SAlex Deucher !dce_v11_0_check_latency_hiding(&wm_low) || 1150aaa36a97SAlex Deucher (adev->mode_info.disp_priority == 2)) { 1151aaa36a97SAlex Deucher DRM_DEBUG_KMS("force priority to high\n"); 1152aaa36a97SAlex Deucher } 11538e36f9d3SAlex Deucher lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay); 1154aaa36a97SAlex Deucher } 1155aaa36a97SAlex Deucher 1156aaa36a97SAlex Deucher /* select wm A */ 1157aaa36a97SAlex Deucher wm_mask = RREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset); 1158aaa36a97SAlex Deucher tmp = REG_SET_FIELD(wm_mask, DPG_WATERMARK_MASK_CONTROL, URGENCY_WATERMARK_MASK, 1); 1159aaa36a97SAlex Deucher WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, tmp); 1160aaa36a97SAlex Deucher tmp = RREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset); 1161aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DPG_PIPE_URGENCY_CONTROL, URGENCY_LOW_WATERMARK, latency_watermark_a); 1162aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DPG_PIPE_URGENCY_CONTROL, URGENCY_HIGH_WATERMARK, line_time); 1163aaa36a97SAlex Deucher WREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset, tmp); 1164aaa36a97SAlex Deucher /* select wm B */ 1165aaa36a97SAlex Deucher tmp = REG_SET_FIELD(wm_mask, DPG_WATERMARK_MASK_CONTROL, URGENCY_WATERMARK_MASK, 2); 1166aaa36a97SAlex Deucher WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, tmp); 1167aaa36a97SAlex Deucher tmp = RREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset); 116899a09238SAlex Deucher tmp = REG_SET_FIELD(tmp, DPG_PIPE_URGENCY_CONTROL, URGENCY_LOW_WATERMARK, latency_watermark_b); 1169aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DPG_PIPE_URGENCY_CONTROL, URGENCY_HIGH_WATERMARK, line_time); 1170aaa36a97SAlex Deucher WREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset, tmp); 1171aaa36a97SAlex Deucher /* restore original selection */ 1172aaa36a97SAlex Deucher WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, wm_mask); 1173aaa36a97SAlex Deucher 1174aaa36a97SAlex Deucher /* save values for DPM */ 1175aaa36a97SAlex Deucher amdgpu_crtc->line_time = line_time; 1176aaa36a97SAlex Deucher amdgpu_crtc->wm_high = latency_watermark_a; 1177aaa36a97SAlex Deucher amdgpu_crtc->wm_low = latency_watermark_b; 11788e36f9d3SAlex Deucher /* Save number of lines the linebuffer leads before the scanout */ 11798e36f9d3SAlex Deucher amdgpu_crtc->lb_vblank_lead_lines = lb_vblank_lead_lines; 1180aaa36a97SAlex Deucher } 1181aaa36a97SAlex Deucher 1182aaa36a97SAlex Deucher /** 1183aaa36a97SAlex Deucher * dce_v11_0_bandwidth_update - program display watermarks 1184aaa36a97SAlex Deucher * 1185aaa36a97SAlex Deucher * @adev: amdgpu_device pointer 1186aaa36a97SAlex Deucher * 1187aaa36a97SAlex Deucher * Calculate and program the display watermarks and line 1188aaa36a97SAlex Deucher * buffer allocation (CIK). 1189aaa36a97SAlex Deucher */ 1190aaa36a97SAlex Deucher static void dce_v11_0_bandwidth_update(struct amdgpu_device *adev) 1191aaa36a97SAlex Deucher { 1192aaa36a97SAlex Deucher struct drm_display_mode *mode = NULL; 1193aaa36a97SAlex Deucher u32 num_heads = 0, lb_size; 1194aaa36a97SAlex Deucher int i; 1195aaa36a97SAlex Deucher 1196166140fbSSamuel Li amdgpu_display_update_priority(adev); 1197aaa36a97SAlex Deucher 1198aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.num_crtc; i++) { 1199aaa36a97SAlex Deucher if (adev->mode_info.crtcs[i]->base.enabled) 1200aaa36a97SAlex Deucher num_heads++; 1201aaa36a97SAlex Deucher } 1202aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.num_crtc; i++) { 1203aaa36a97SAlex Deucher mode = &adev->mode_info.crtcs[i]->base.mode; 1204aaa36a97SAlex Deucher lb_size = dce_v11_0_line_buffer_adjust(adev, adev->mode_info.crtcs[i], mode); 1205aaa36a97SAlex Deucher dce_v11_0_program_watermarks(adev, adev->mode_info.crtcs[i], 1206aaa36a97SAlex Deucher lb_size, num_heads); 1207aaa36a97SAlex Deucher } 1208aaa36a97SAlex Deucher } 1209aaa36a97SAlex Deucher 1210aaa36a97SAlex Deucher static void dce_v11_0_audio_get_connected_pins(struct amdgpu_device *adev) 1211aaa36a97SAlex Deucher { 1212aaa36a97SAlex Deucher int i; 1213aaa36a97SAlex Deucher u32 offset, tmp; 1214aaa36a97SAlex Deucher 1215aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.audio.num_pins; i++) { 1216aaa36a97SAlex Deucher offset = adev->mode_info.audio.pin[i].offset; 1217aaa36a97SAlex Deucher tmp = RREG32_AUDIO_ENDPT(offset, 1218aaa36a97SAlex Deucher ixAZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT); 1219aaa36a97SAlex Deucher if (((tmp & 1220aaa36a97SAlex Deucher AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT__PORT_CONNECTIVITY_MASK) >> 1221aaa36a97SAlex Deucher AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT__PORT_CONNECTIVITY__SHIFT) == 1) 1222aaa36a97SAlex Deucher adev->mode_info.audio.pin[i].connected = false; 1223aaa36a97SAlex Deucher else 1224aaa36a97SAlex Deucher adev->mode_info.audio.pin[i].connected = true; 1225aaa36a97SAlex Deucher } 1226aaa36a97SAlex Deucher } 1227aaa36a97SAlex Deucher 1228aaa36a97SAlex Deucher static struct amdgpu_audio_pin *dce_v11_0_audio_get_pin(struct amdgpu_device *adev) 1229aaa36a97SAlex Deucher { 1230aaa36a97SAlex Deucher int i; 1231aaa36a97SAlex Deucher 1232aaa36a97SAlex Deucher dce_v11_0_audio_get_connected_pins(adev); 1233aaa36a97SAlex Deucher 1234aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.audio.num_pins; i++) { 1235aaa36a97SAlex Deucher if (adev->mode_info.audio.pin[i].connected) 1236aaa36a97SAlex Deucher return &adev->mode_info.audio.pin[i]; 1237aaa36a97SAlex Deucher } 1238aaa36a97SAlex Deucher DRM_ERROR("No connected audio pins found!\n"); 1239aaa36a97SAlex Deucher return NULL; 1240aaa36a97SAlex Deucher } 1241aaa36a97SAlex Deucher 1242aaa36a97SAlex Deucher static void dce_v11_0_afmt_audio_select_pin(struct drm_encoder *encoder) 1243aaa36a97SAlex Deucher { 12441348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(encoder->dev); 1245aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1246aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1247aaa36a97SAlex Deucher u32 tmp; 1248aaa36a97SAlex Deucher 1249aaa36a97SAlex Deucher if (!dig || !dig->afmt || !dig->afmt->pin) 1250aaa36a97SAlex Deucher return; 1251aaa36a97SAlex Deucher 1252aaa36a97SAlex Deucher tmp = RREG32(mmAFMT_AUDIO_SRC_CONTROL + dig->afmt->offset); 1253aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_AUDIO_SRC_CONTROL, AFMT_AUDIO_SRC_SELECT, dig->afmt->pin->id); 1254aaa36a97SAlex Deucher WREG32(mmAFMT_AUDIO_SRC_CONTROL + dig->afmt->offset, tmp); 1255aaa36a97SAlex Deucher } 1256aaa36a97SAlex Deucher 1257aaa36a97SAlex Deucher static void dce_v11_0_audio_write_latency_fields(struct drm_encoder *encoder, 1258aaa36a97SAlex Deucher struct drm_display_mode *mode) 1259aaa36a97SAlex Deucher { 1260f8d2d39eSLyude Paul struct drm_device *dev = encoder->dev; 12611348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1262aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1263aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1264aaa36a97SAlex Deucher struct drm_connector *connector; 1265f8d2d39eSLyude Paul struct drm_connector_list_iter iter; 1266aaa36a97SAlex Deucher struct amdgpu_connector *amdgpu_connector = NULL; 1267aaa36a97SAlex Deucher u32 tmp; 1268aaa36a97SAlex Deucher int interlace = 0; 1269aaa36a97SAlex Deucher 1270aaa36a97SAlex Deucher if (!dig || !dig->afmt || !dig->afmt->pin) 1271aaa36a97SAlex Deucher return; 1272aaa36a97SAlex Deucher 1273f8d2d39eSLyude Paul drm_connector_list_iter_begin(dev, &iter); 1274f8d2d39eSLyude Paul drm_for_each_connector_iter(connector, &iter) { 1275aaa36a97SAlex Deucher if (connector->encoder == encoder) { 1276aaa36a97SAlex Deucher amdgpu_connector = to_amdgpu_connector(connector); 1277aaa36a97SAlex Deucher break; 1278aaa36a97SAlex Deucher } 1279aaa36a97SAlex Deucher } 1280f8d2d39eSLyude Paul drm_connector_list_iter_end(&iter); 1281aaa36a97SAlex Deucher 1282aaa36a97SAlex Deucher if (!amdgpu_connector) { 1283aaa36a97SAlex Deucher DRM_ERROR("Couldn't find encoder's connector\n"); 1284aaa36a97SAlex Deucher return; 1285aaa36a97SAlex Deucher } 1286aaa36a97SAlex Deucher 1287aaa36a97SAlex Deucher if (mode->flags & DRM_MODE_FLAG_INTERLACE) 1288aaa36a97SAlex Deucher interlace = 1; 1289aaa36a97SAlex Deucher if (connector->latency_present[interlace]) { 1290aaa36a97SAlex Deucher tmp = REG_SET_FIELD(0, AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC, 1291aaa36a97SAlex Deucher VIDEO_LIPSYNC, connector->video_latency[interlace]); 1292aaa36a97SAlex Deucher tmp = REG_SET_FIELD(0, AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC, 1293aaa36a97SAlex Deucher AUDIO_LIPSYNC, connector->audio_latency[interlace]); 1294aaa36a97SAlex Deucher } else { 1295aaa36a97SAlex Deucher tmp = REG_SET_FIELD(0, AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC, 1296aaa36a97SAlex Deucher VIDEO_LIPSYNC, 0); 1297aaa36a97SAlex Deucher tmp = REG_SET_FIELD(0, AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC, 1298aaa36a97SAlex Deucher AUDIO_LIPSYNC, 0); 1299aaa36a97SAlex Deucher } 1300aaa36a97SAlex Deucher WREG32_AUDIO_ENDPT(dig->afmt->pin->offset, 1301aaa36a97SAlex Deucher ixAZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC, tmp); 1302aaa36a97SAlex Deucher } 1303aaa36a97SAlex Deucher 1304aaa36a97SAlex Deucher static void dce_v11_0_audio_write_speaker_allocation(struct drm_encoder *encoder) 1305aaa36a97SAlex Deucher { 1306f8d2d39eSLyude Paul struct drm_device *dev = encoder->dev; 13071348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1308aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1309aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1310aaa36a97SAlex Deucher struct drm_connector *connector; 1311f8d2d39eSLyude Paul struct drm_connector_list_iter iter; 1312aaa36a97SAlex Deucher struct amdgpu_connector *amdgpu_connector = NULL; 1313aaa36a97SAlex Deucher u32 tmp; 1314aaa36a97SAlex Deucher u8 *sadb = NULL; 1315aaa36a97SAlex Deucher int sad_count; 1316aaa36a97SAlex Deucher 1317aaa36a97SAlex Deucher if (!dig || !dig->afmt || !dig->afmt->pin) 1318aaa36a97SAlex Deucher return; 1319aaa36a97SAlex Deucher 1320f8d2d39eSLyude Paul drm_connector_list_iter_begin(dev, &iter); 1321f8d2d39eSLyude Paul drm_for_each_connector_iter(connector, &iter) { 1322aaa36a97SAlex Deucher if (connector->encoder == encoder) { 1323aaa36a97SAlex Deucher amdgpu_connector = to_amdgpu_connector(connector); 1324aaa36a97SAlex Deucher break; 1325aaa36a97SAlex Deucher } 1326aaa36a97SAlex Deucher } 1327f8d2d39eSLyude Paul drm_connector_list_iter_end(&iter); 1328aaa36a97SAlex Deucher 1329aaa36a97SAlex Deucher if (!amdgpu_connector) { 1330aaa36a97SAlex Deucher DRM_ERROR("Couldn't find encoder's connector\n"); 1331aaa36a97SAlex Deucher return; 1332aaa36a97SAlex Deucher } 1333aaa36a97SAlex Deucher 133442505ab1SJani Nikula sad_count = drm_edid_to_speaker_allocation(amdgpu_connector->edid, &sadb); 1335aaa36a97SAlex Deucher if (sad_count < 0) { 1336aaa36a97SAlex Deucher DRM_ERROR("Couldn't read Speaker Allocation Data Block: %d\n", sad_count); 1337aaa36a97SAlex Deucher sad_count = 0; 1338aaa36a97SAlex Deucher } 1339aaa36a97SAlex Deucher 1340aaa36a97SAlex Deucher /* program the speaker allocation */ 1341aaa36a97SAlex Deucher tmp = RREG32_AUDIO_ENDPT(dig->afmt->pin->offset, 1342aaa36a97SAlex Deucher ixAZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER); 1343aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER, 1344aaa36a97SAlex Deucher DP_CONNECTION, 0); 1345aaa36a97SAlex Deucher /* set HDMI mode */ 1346aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER, 1347aaa36a97SAlex Deucher HDMI_CONNECTION, 1); 1348aaa36a97SAlex Deucher if (sad_count) 1349aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER, 1350aaa36a97SAlex Deucher SPEAKER_ALLOCATION, sadb[0]); 1351aaa36a97SAlex Deucher else 1352aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER, 1353aaa36a97SAlex Deucher SPEAKER_ALLOCATION, 5); /* stereo */ 1354aaa36a97SAlex Deucher WREG32_AUDIO_ENDPT(dig->afmt->pin->offset, 1355aaa36a97SAlex Deucher ixAZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER, tmp); 1356aaa36a97SAlex Deucher 1357aaa36a97SAlex Deucher kfree(sadb); 1358aaa36a97SAlex Deucher } 1359aaa36a97SAlex Deucher 1360aaa36a97SAlex Deucher static void dce_v11_0_audio_write_sad_regs(struct drm_encoder *encoder) 1361aaa36a97SAlex Deucher { 1362f8d2d39eSLyude Paul struct drm_device *dev = encoder->dev; 13631348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1364aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1365aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1366aaa36a97SAlex Deucher struct drm_connector *connector; 1367f8d2d39eSLyude Paul struct drm_connector_list_iter iter; 1368aaa36a97SAlex Deucher struct amdgpu_connector *amdgpu_connector = NULL; 1369aaa36a97SAlex Deucher struct cea_sad *sads; 1370aaa36a97SAlex Deucher int i, sad_count; 1371aaa36a97SAlex Deucher 1372aaa36a97SAlex Deucher static const u16 eld_reg_to_type[][2] = { 1373aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR0, HDMI_AUDIO_CODING_TYPE_PCM }, 1374aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR1, HDMI_AUDIO_CODING_TYPE_AC3 }, 1375aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR2, HDMI_AUDIO_CODING_TYPE_MPEG1 }, 1376aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR3, HDMI_AUDIO_CODING_TYPE_MP3 }, 1377aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR4, HDMI_AUDIO_CODING_TYPE_MPEG2 }, 1378aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR5, HDMI_AUDIO_CODING_TYPE_AAC_LC }, 1379aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR6, HDMI_AUDIO_CODING_TYPE_DTS }, 1380aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR7, HDMI_AUDIO_CODING_TYPE_ATRAC }, 1381aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR9, HDMI_AUDIO_CODING_TYPE_EAC3 }, 1382aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR10, HDMI_AUDIO_CODING_TYPE_DTS_HD }, 1383aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR11, HDMI_AUDIO_CODING_TYPE_MLP }, 1384aaa36a97SAlex Deucher { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR13, HDMI_AUDIO_CODING_TYPE_WMA_PRO }, 1385aaa36a97SAlex Deucher }; 1386aaa36a97SAlex Deucher 1387aaa36a97SAlex Deucher if (!dig || !dig->afmt || !dig->afmt->pin) 1388aaa36a97SAlex Deucher return; 1389aaa36a97SAlex Deucher 1390f8d2d39eSLyude Paul drm_connector_list_iter_begin(dev, &iter); 1391f8d2d39eSLyude Paul drm_for_each_connector_iter(connector, &iter) { 1392aaa36a97SAlex Deucher if (connector->encoder == encoder) { 1393aaa36a97SAlex Deucher amdgpu_connector = to_amdgpu_connector(connector); 1394aaa36a97SAlex Deucher break; 1395aaa36a97SAlex Deucher } 1396aaa36a97SAlex Deucher } 1397f8d2d39eSLyude Paul drm_connector_list_iter_end(&iter); 1398aaa36a97SAlex Deucher 1399aaa36a97SAlex Deucher if (!amdgpu_connector) { 1400aaa36a97SAlex Deucher DRM_ERROR("Couldn't find encoder's connector\n"); 1401aaa36a97SAlex Deucher return; 1402aaa36a97SAlex Deucher } 1403aaa36a97SAlex Deucher 140442505ab1SJani Nikula sad_count = drm_edid_to_sad(amdgpu_connector->edid, &sads); 1405ae2a3495SJean Delvare if (sad_count < 0) 1406aaa36a97SAlex Deucher DRM_ERROR("Couldn't read SADs: %d\n", sad_count); 1407ae2a3495SJean Delvare if (sad_count <= 0) 1408aaa36a97SAlex Deucher return; 1409aaa36a97SAlex Deucher BUG_ON(!sads); 1410aaa36a97SAlex Deucher 1411aaa36a97SAlex Deucher for (i = 0; i < ARRAY_SIZE(eld_reg_to_type); i++) { 1412aaa36a97SAlex Deucher u32 tmp = 0; 1413aaa36a97SAlex Deucher u8 stereo_freqs = 0; 1414aaa36a97SAlex Deucher int max_channels = -1; 1415aaa36a97SAlex Deucher int j; 1416aaa36a97SAlex Deucher 1417aaa36a97SAlex Deucher for (j = 0; j < sad_count; j++) { 1418aaa36a97SAlex Deucher struct cea_sad *sad = &sads[j]; 1419aaa36a97SAlex Deucher 1420aaa36a97SAlex Deucher if (sad->format == eld_reg_to_type[i][1]) { 1421aaa36a97SAlex Deucher if (sad->channels > max_channels) { 1422aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR0, 1423aaa36a97SAlex Deucher MAX_CHANNELS, sad->channels); 1424aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR0, 1425aaa36a97SAlex Deucher DESCRIPTOR_BYTE_2, sad->byte2); 1426aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR0, 1427aaa36a97SAlex Deucher SUPPORTED_FREQUENCIES, sad->freq); 1428aaa36a97SAlex Deucher max_channels = sad->channels; 1429aaa36a97SAlex Deucher } 1430aaa36a97SAlex Deucher 1431aaa36a97SAlex Deucher if (sad->format == HDMI_AUDIO_CODING_TYPE_PCM) 1432aaa36a97SAlex Deucher stereo_freqs |= sad->freq; 1433aaa36a97SAlex Deucher else 1434aaa36a97SAlex Deucher break; 1435aaa36a97SAlex Deucher } 1436aaa36a97SAlex Deucher } 1437aaa36a97SAlex Deucher 1438aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR0, 1439aaa36a97SAlex Deucher SUPPORTED_FREQUENCIES_STEREO, stereo_freqs); 1440aaa36a97SAlex Deucher WREG32_AUDIO_ENDPT(dig->afmt->pin->offset, eld_reg_to_type[i][0], tmp); 1441aaa36a97SAlex Deucher } 1442aaa36a97SAlex Deucher 1443aaa36a97SAlex Deucher kfree(sads); 1444aaa36a97SAlex Deucher } 1445aaa36a97SAlex Deucher 1446aaa36a97SAlex Deucher static void dce_v11_0_audio_enable(struct amdgpu_device *adev, 1447aaa36a97SAlex Deucher struct amdgpu_audio_pin *pin, 1448aaa36a97SAlex Deucher bool enable) 1449aaa36a97SAlex Deucher { 1450aaa36a97SAlex Deucher if (!pin) 1451aaa36a97SAlex Deucher return; 1452aaa36a97SAlex Deucher 1453aaa36a97SAlex Deucher WREG32_AUDIO_ENDPT(pin->offset, ixAZALIA_F0_CODEC_PIN_CONTROL_HOT_PLUG_CONTROL, 1454aaa36a97SAlex Deucher enable ? AZALIA_F0_CODEC_PIN_CONTROL_HOT_PLUG_CONTROL__AUDIO_ENABLED_MASK : 0); 1455aaa36a97SAlex Deucher } 1456aaa36a97SAlex Deucher 1457aaa36a97SAlex Deucher static const u32 pin_offsets[] = 1458aaa36a97SAlex Deucher { 1459aaa36a97SAlex Deucher AUD0_REGISTER_OFFSET, 1460aaa36a97SAlex Deucher AUD1_REGISTER_OFFSET, 1461aaa36a97SAlex Deucher AUD2_REGISTER_OFFSET, 1462aaa36a97SAlex Deucher AUD3_REGISTER_OFFSET, 1463aaa36a97SAlex Deucher AUD4_REGISTER_OFFSET, 1464aaa36a97SAlex Deucher AUD5_REGISTER_OFFSET, 1465aaa36a97SAlex Deucher AUD6_REGISTER_OFFSET, 146667b1fcc9SAlex Deucher AUD7_REGISTER_OFFSET, 1467aaa36a97SAlex Deucher }; 1468aaa36a97SAlex Deucher 1469aaa36a97SAlex Deucher static int dce_v11_0_audio_init(struct amdgpu_device *adev) 1470aaa36a97SAlex Deucher { 1471aaa36a97SAlex Deucher int i; 1472aaa36a97SAlex Deucher 1473aaa36a97SAlex Deucher if (!amdgpu_audio) 1474aaa36a97SAlex Deucher return 0; 1475aaa36a97SAlex Deucher 1476aaa36a97SAlex Deucher adev->mode_info.audio.enabled = true; 1477aaa36a97SAlex Deucher 1478d525eb8dSAlex Deucher switch (adev->asic_type) { 1479d525eb8dSAlex Deucher case CHIP_CARRIZO: 1480d525eb8dSAlex Deucher case CHIP_STONEY: 1481aaa36a97SAlex Deucher adev->mode_info.audio.num_pins = 7; 1482d525eb8dSAlex Deucher break; 14832cc0c0b5SFlora Cui case CHIP_POLARIS10: 1484221adb21SAlex Deucher case CHIP_VEGAM: 1485d525eb8dSAlex Deucher adev->mode_info.audio.num_pins = 8; 1486d525eb8dSAlex Deucher break; 14872cc0c0b5SFlora Cui case CHIP_POLARIS11: 1488c4642a47SJunwei Zhang case CHIP_POLARIS12: 1489d525eb8dSAlex Deucher adev->mode_info.audio.num_pins = 6; 1490d525eb8dSAlex Deucher break; 1491d525eb8dSAlex Deucher default: 1492d525eb8dSAlex Deucher return -EINVAL; 1493d525eb8dSAlex Deucher } 1494aaa36a97SAlex Deucher 1495aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.audio.num_pins; i++) { 1496aaa36a97SAlex Deucher adev->mode_info.audio.pin[i].channels = -1; 1497aaa36a97SAlex Deucher adev->mode_info.audio.pin[i].rate = -1; 1498aaa36a97SAlex Deucher adev->mode_info.audio.pin[i].bits_per_sample = -1; 1499aaa36a97SAlex Deucher adev->mode_info.audio.pin[i].status_bits = 0; 1500aaa36a97SAlex Deucher adev->mode_info.audio.pin[i].category_code = 0; 1501aaa36a97SAlex Deucher adev->mode_info.audio.pin[i].connected = false; 1502aaa36a97SAlex Deucher adev->mode_info.audio.pin[i].offset = pin_offsets[i]; 1503aaa36a97SAlex Deucher adev->mode_info.audio.pin[i].id = i; 1504aaa36a97SAlex Deucher /* disable audio. it will be set up later */ 1505aaa36a97SAlex Deucher /* XXX remove once we switch to ip funcs */ 1506aaa36a97SAlex Deucher dce_v11_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); 1507aaa36a97SAlex Deucher } 1508aaa36a97SAlex Deucher 1509aaa36a97SAlex Deucher return 0; 1510aaa36a97SAlex Deucher } 1511aaa36a97SAlex Deucher 1512aaa36a97SAlex Deucher static void dce_v11_0_audio_fini(struct amdgpu_device *adev) 1513aaa36a97SAlex Deucher { 1514aaa36a97SAlex Deucher int i; 1515aaa36a97SAlex Deucher 151629f646dfSTom St Denis if (!amdgpu_audio) 151729f646dfSTom St Denis return; 151829f646dfSTom St Denis 1519aaa36a97SAlex Deucher if (!adev->mode_info.audio.enabled) 1520aaa36a97SAlex Deucher return; 1521aaa36a97SAlex Deucher 1522aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.audio.num_pins; i++) 1523aaa36a97SAlex Deucher dce_v11_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); 1524aaa36a97SAlex Deucher 1525aaa36a97SAlex Deucher adev->mode_info.audio.enabled = false; 1526aaa36a97SAlex Deucher } 1527aaa36a97SAlex Deucher 1528aaa36a97SAlex Deucher /* 1529aaa36a97SAlex Deucher * update the N and CTS parameters for a given pixel clock rate 1530aaa36a97SAlex Deucher */ 1531aaa36a97SAlex Deucher static void dce_v11_0_afmt_update_ACR(struct drm_encoder *encoder, uint32_t clock) 1532aaa36a97SAlex Deucher { 1533aaa36a97SAlex Deucher struct drm_device *dev = encoder->dev; 15341348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1535aaa36a97SAlex Deucher struct amdgpu_afmt_acr acr = amdgpu_afmt_acr(clock); 1536aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1537aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1538aaa36a97SAlex Deucher u32 tmp; 1539aaa36a97SAlex Deucher 1540aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_ACR_32_0 + dig->afmt->offset); 1541aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_ACR_32_0, HDMI_ACR_CTS_32, acr.cts_32khz); 1542aaa36a97SAlex Deucher WREG32(mmHDMI_ACR_32_0 + dig->afmt->offset, tmp); 1543aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_ACR_32_1 + dig->afmt->offset); 1544aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_ACR_32_1, HDMI_ACR_N_32, acr.n_32khz); 1545aaa36a97SAlex Deucher WREG32(mmHDMI_ACR_32_1 + dig->afmt->offset, tmp); 1546aaa36a97SAlex Deucher 1547aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_ACR_44_0 + dig->afmt->offset); 1548aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_ACR_44_0, HDMI_ACR_CTS_44, acr.cts_44_1khz); 1549aaa36a97SAlex Deucher WREG32(mmHDMI_ACR_44_0 + dig->afmt->offset, tmp); 1550aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_ACR_44_1 + dig->afmt->offset); 1551aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_ACR_44_1, HDMI_ACR_N_44, acr.n_44_1khz); 1552aaa36a97SAlex Deucher WREG32(mmHDMI_ACR_44_1 + dig->afmt->offset, tmp); 1553aaa36a97SAlex Deucher 1554aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_ACR_48_0 + dig->afmt->offset); 1555aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_ACR_48_0, HDMI_ACR_CTS_48, acr.cts_48khz); 1556aaa36a97SAlex Deucher WREG32(mmHDMI_ACR_48_0 + dig->afmt->offset, tmp); 1557aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_ACR_48_1 + dig->afmt->offset); 1558aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_ACR_48_1, HDMI_ACR_N_48, acr.n_48khz); 1559aaa36a97SAlex Deucher WREG32(mmHDMI_ACR_48_1 + dig->afmt->offset, tmp); 1560aaa36a97SAlex Deucher 1561aaa36a97SAlex Deucher } 1562aaa36a97SAlex Deucher 1563aaa36a97SAlex Deucher /* 1564aaa36a97SAlex Deucher * build a HDMI Video Info Frame 1565aaa36a97SAlex Deucher */ 1566aaa36a97SAlex Deucher static void dce_v11_0_afmt_update_avi_infoframe(struct drm_encoder *encoder, 1567aaa36a97SAlex Deucher void *buffer, size_t size) 1568aaa36a97SAlex Deucher { 1569aaa36a97SAlex Deucher struct drm_device *dev = encoder->dev; 15701348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1571aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1572aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1573aaa36a97SAlex Deucher uint8_t *frame = buffer + 3; 1574aaa36a97SAlex Deucher uint8_t *header = buffer; 1575aaa36a97SAlex Deucher 1576aaa36a97SAlex Deucher WREG32(mmAFMT_AVI_INFO0 + dig->afmt->offset, 1577aaa36a97SAlex Deucher frame[0x0] | (frame[0x1] << 8) | (frame[0x2] << 16) | (frame[0x3] << 24)); 1578aaa36a97SAlex Deucher WREG32(mmAFMT_AVI_INFO1 + dig->afmt->offset, 1579aaa36a97SAlex Deucher frame[0x4] | (frame[0x5] << 8) | (frame[0x6] << 16) | (frame[0x7] << 24)); 1580aaa36a97SAlex Deucher WREG32(mmAFMT_AVI_INFO2 + dig->afmt->offset, 1581aaa36a97SAlex Deucher frame[0x8] | (frame[0x9] << 8) | (frame[0xA] << 16) | (frame[0xB] << 24)); 1582aaa36a97SAlex Deucher WREG32(mmAFMT_AVI_INFO3 + dig->afmt->offset, 1583aaa36a97SAlex Deucher frame[0xC] | (frame[0xD] << 8) | (header[1] << 24)); 1584aaa36a97SAlex Deucher } 1585aaa36a97SAlex Deucher 1586aaa36a97SAlex Deucher static void dce_v11_0_audio_set_dto(struct drm_encoder *encoder, u32 clock) 1587aaa36a97SAlex Deucher { 1588aaa36a97SAlex Deucher struct drm_device *dev = encoder->dev; 15891348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1590aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1591aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1592aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 1593aaa36a97SAlex Deucher u32 dto_phase = 24 * 1000; 1594aaa36a97SAlex Deucher u32 dto_modulo = clock; 1595aaa36a97SAlex Deucher u32 tmp; 1596aaa36a97SAlex Deucher 1597aaa36a97SAlex Deucher if (!dig || !dig->afmt) 1598aaa36a97SAlex Deucher return; 1599aaa36a97SAlex Deucher 1600aaa36a97SAlex Deucher /* XXX two dtos; generally use dto0 for hdmi */ 1601aaa36a97SAlex Deucher /* Express [24MHz / target pixel clock] as an exact rational 1602aaa36a97SAlex Deucher * number (coefficient of two integer numbers. DCCG_AUDIO_DTOx_PHASE 1603aaa36a97SAlex Deucher * is the numerator, DCCG_AUDIO_DTOx_MODULE is the denominator 1604aaa36a97SAlex Deucher */ 1605aaa36a97SAlex Deucher tmp = RREG32(mmDCCG_AUDIO_DTO_SOURCE); 1606aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DCCG_AUDIO_DTO_SOURCE, DCCG_AUDIO_DTO0_SOURCE_SEL, 1607aaa36a97SAlex Deucher amdgpu_crtc->crtc_id); 1608aaa36a97SAlex Deucher WREG32(mmDCCG_AUDIO_DTO_SOURCE, tmp); 1609aaa36a97SAlex Deucher WREG32(mmDCCG_AUDIO_DTO0_PHASE, dto_phase); 1610aaa36a97SAlex Deucher WREG32(mmDCCG_AUDIO_DTO0_MODULE, dto_modulo); 1611aaa36a97SAlex Deucher } 1612aaa36a97SAlex Deucher 1613aaa36a97SAlex Deucher /* 1614aaa36a97SAlex Deucher * update the info frames with the data from the current display mode 1615aaa36a97SAlex Deucher */ 1616aaa36a97SAlex Deucher static void dce_v11_0_afmt_setmode(struct drm_encoder *encoder, 1617aaa36a97SAlex Deucher struct drm_display_mode *mode) 1618aaa36a97SAlex Deucher { 1619aaa36a97SAlex Deucher struct drm_device *dev = encoder->dev; 16201348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1621aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1622aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1623aaa36a97SAlex Deucher struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1624aaa36a97SAlex Deucher u8 buffer[HDMI_INFOFRAME_HEADER_SIZE + HDMI_AVI_INFOFRAME_SIZE]; 1625aaa36a97SAlex Deucher struct hdmi_avi_infoframe frame; 1626aaa36a97SAlex Deucher ssize_t err; 1627aaa36a97SAlex Deucher u32 tmp; 1628aaa36a97SAlex Deucher int bpc = 8; 1629aaa36a97SAlex Deucher 1630aaa36a97SAlex Deucher if (!dig || !dig->afmt) 1631aaa36a97SAlex Deucher return; 1632aaa36a97SAlex Deucher 1633aaa36a97SAlex Deucher /* Silent, r600_hdmi_enable will raise WARN for us */ 1634aaa36a97SAlex Deucher if (!dig->afmt->enabled) 1635aaa36a97SAlex Deucher return; 1636aaa36a97SAlex Deucher 1637aaa36a97SAlex Deucher /* hdmi deep color mode general control packets setup, if bpc > 8 */ 1638aaa36a97SAlex Deucher if (encoder->crtc) { 1639aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 1640aaa36a97SAlex Deucher bpc = amdgpu_crtc->bpc; 1641aaa36a97SAlex Deucher } 1642aaa36a97SAlex Deucher 1643aaa36a97SAlex Deucher /* disable audio prior to setting up hw */ 1644aaa36a97SAlex Deucher dig->afmt->pin = dce_v11_0_audio_get_pin(adev); 1645aaa36a97SAlex Deucher dce_v11_0_audio_enable(adev, dig->afmt->pin, false); 1646aaa36a97SAlex Deucher 1647aaa36a97SAlex Deucher dce_v11_0_audio_set_dto(encoder, mode->clock); 1648aaa36a97SAlex Deucher 1649aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_VBI_PACKET_CONTROL + dig->afmt->offset); 1650aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_VBI_PACKET_CONTROL, HDMI_NULL_SEND, 1); 1651aaa36a97SAlex Deucher WREG32(mmHDMI_VBI_PACKET_CONTROL + dig->afmt->offset, tmp); /* send null packets when required */ 1652aaa36a97SAlex Deucher 1653aaa36a97SAlex Deucher WREG32(mmAFMT_AUDIO_CRC_CONTROL + dig->afmt->offset, 0x1000); 1654aaa36a97SAlex Deucher 1655aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_CONTROL + dig->afmt->offset); 1656aaa36a97SAlex Deucher switch (bpc) { 1657aaa36a97SAlex Deucher case 0: 1658aaa36a97SAlex Deucher case 6: 1659aaa36a97SAlex Deucher case 8: 1660aaa36a97SAlex Deucher case 16: 1661aaa36a97SAlex Deucher default: 1662aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_CONTROL, HDMI_DEEP_COLOR_ENABLE, 0); 1663aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_CONTROL, HDMI_DEEP_COLOR_DEPTH, 0); 1664aaa36a97SAlex Deucher DRM_DEBUG("%s: Disabling hdmi deep color for %d bpc.\n", 1665aaa36a97SAlex Deucher connector->name, bpc); 1666aaa36a97SAlex Deucher break; 1667aaa36a97SAlex Deucher case 10: 1668aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_CONTROL, HDMI_DEEP_COLOR_ENABLE, 1); 1669aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_CONTROL, HDMI_DEEP_COLOR_DEPTH, 1); 1670aaa36a97SAlex Deucher DRM_DEBUG("%s: Enabling hdmi deep color 30 for 10 bpc.\n", 1671aaa36a97SAlex Deucher connector->name); 1672aaa36a97SAlex Deucher break; 1673aaa36a97SAlex Deucher case 12: 1674aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_CONTROL, HDMI_DEEP_COLOR_ENABLE, 1); 1675aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_CONTROL, HDMI_DEEP_COLOR_DEPTH, 2); 1676aaa36a97SAlex Deucher DRM_DEBUG("%s: Enabling hdmi deep color 36 for 12 bpc.\n", 1677aaa36a97SAlex Deucher connector->name); 1678aaa36a97SAlex Deucher break; 1679aaa36a97SAlex Deucher } 1680aaa36a97SAlex Deucher WREG32(mmHDMI_CONTROL + dig->afmt->offset, tmp); 1681aaa36a97SAlex Deucher 1682aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_VBI_PACKET_CONTROL + dig->afmt->offset); 1683aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_VBI_PACKET_CONTROL, HDMI_NULL_SEND, 1); /* send null packets when required */ 1684aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_VBI_PACKET_CONTROL, HDMI_GC_SEND, 1); /* send general control packets */ 1685aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_VBI_PACKET_CONTROL, HDMI_GC_CONT, 1); /* send general control packets every frame */ 1686aaa36a97SAlex Deucher WREG32(mmHDMI_VBI_PACKET_CONTROL + dig->afmt->offset, tmp); 1687aaa36a97SAlex Deucher 1688aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_INFOFRAME_CONTROL0 + dig->afmt->offset); 1689aaa36a97SAlex Deucher /* enable audio info frames (frames won't be set until audio is enabled) */ 1690aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_INFOFRAME_CONTROL0, HDMI_AUDIO_INFO_SEND, 1); 1691aaa36a97SAlex Deucher /* required for audio info values to be updated */ 1692aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_INFOFRAME_CONTROL0, HDMI_AUDIO_INFO_CONT, 1); 1693aaa36a97SAlex Deucher WREG32(mmHDMI_INFOFRAME_CONTROL0 + dig->afmt->offset, tmp); 1694aaa36a97SAlex Deucher 1695aaa36a97SAlex Deucher tmp = RREG32(mmAFMT_INFOFRAME_CONTROL0 + dig->afmt->offset); 1696aaa36a97SAlex Deucher /* required for audio info values to be updated */ 1697aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_INFOFRAME_CONTROL0, AFMT_AUDIO_INFO_UPDATE, 1); 1698aaa36a97SAlex Deucher WREG32(mmAFMT_INFOFRAME_CONTROL0 + dig->afmt->offset, tmp); 1699aaa36a97SAlex Deucher 1700aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_INFOFRAME_CONTROL1 + dig->afmt->offset); 1701aaa36a97SAlex Deucher /* anything other than 0 */ 1702aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_INFOFRAME_CONTROL1, HDMI_AUDIO_INFO_LINE, 2); 1703aaa36a97SAlex Deucher WREG32(mmHDMI_INFOFRAME_CONTROL1 + dig->afmt->offset, tmp); 1704aaa36a97SAlex Deucher 1705aaa36a97SAlex Deucher WREG32(mmHDMI_GC + dig->afmt->offset, 0); /* unset HDMI_GC_AVMUTE */ 1706aaa36a97SAlex Deucher 1707aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_AUDIO_PACKET_CONTROL + dig->afmt->offset); 1708aaa36a97SAlex Deucher /* set the default audio delay */ 1709aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_AUDIO_PACKET_CONTROL, HDMI_AUDIO_DELAY_EN, 1); 1710aaa36a97SAlex Deucher /* should be suffient for all audio modes and small enough for all hblanks */ 1711aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_AUDIO_PACKET_CONTROL, HDMI_AUDIO_PACKETS_PER_LINE, 3); 1712aaa36a97SAlex Deucher WREG32(mmHDMI_AUDIO_PACKET_CONTROL + dig->afmt->offset, tmp); 1713aaa36a97SAlex Deucher 1714aaa36a97SAlex Deucher tmp = RREG32(mmAFMT_AUDIO_PACKET_CONTROL + dig->afmt->offset); 1715aaa36a97SAlex Deucher /* allow 60958 channel status fields to be updated */ 1716aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_AUDIO_PACKET_CONTROL, AFMT_60958_CS_UPDATE, 1); 1717aaa36a97SAlex Deucher WREG32(mmAFMT_AUDIO_PACKET_CONTROL + dig->afmt->offset, tmp); 1718aaa36a97SAlex Deucher 1719aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_ACR_PACKET_CONTROL + dig->afmt->offset); 1720aaa36a97SAlex Deucher if (bpc > 8) 1721aaa36a97SAlex Deucher /* clear SW CTS value */ 1722aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_ACR_PACKET_CONTROL, HDMI_ACR_SOURCE, 0); 1723aaa36a97SAlex Deucher else 1724aaa36a97SAlex Deucher /* select SW CTS value */ 1725aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_ACR_PACKET_CONTROL, HDMI_ACR_SOURCE, 1); 1726aaa36a97SAlex Deucher /* allow hw to sent ACR packets when required */ 1727aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_ACR_PACKET_CONTROL, HDMI_ACR_AUTO_SEND, 1); 1728aaa36a97SAlex Deucher WREG32(mmHDMI_ACR_PACKET_CONTROL + dig->afmt->offset, tmp); 1729aaa36a97SAlex Deucher 1730aaa36a97SAlex Deucher dce_v11_0_afmt_update_ACR(encoder, mode->clock); 1731aaa36a97SAlex Deucher 1732aaa36a97SAlex Deucher tmp = RREG32(mmAFMT_60958_0 + dig->afmt->offset); 1733aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_60958_0, AFMT_60958_CS_CHANNEL_NUMBER_L, 1); 1734aaa36a97SAlex Deucher WREG32(mmAFMT_60958_0 + dig->afmt->offset, tmp); 1735aaa36a97SAlex Deucher 1736aaa36a97SAlex Deucher tmp = RREG32(mmAFMT_60958_1 + dig->afmt->offset); 1737aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_60958_1, AFMT_60958_CS_CHANNEL_NUMBER_R, 2); 1738aaa36a97SAlex Deucher WREG32(mmAFMT_60958_1 + dig->afmt->offset, tmp); 1739aaa36a97SAlex Deucher 1740aaa36a97SAlex Deucher tmp = RREG32(mmAFMT_60958_2 + dig->afmt->offset); 1741aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_60958_2, AFMT_60958_CS_CHANNEL_NUMBER_2, 3); 1742aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_60958_2, AFMT_60958_CS_CHANNEL_NUMBER_3, 4); 1743aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_60958_2, AFMT_60958_CS_CHANNEL_NUMBER_4, 5); 1744aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_60958_2, AFMT_60958_CS_CHANNEL_NUMBER_5, 6); 1745aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_60958_2, AFMT_60958_CS_CHANNEL_NUMBER_6, 7); 1746aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_60958_2, AFMT_60958_CS_CHANNEL_NUMBER_7, 8); 1747aaa36a97SAlex Deucher WREG32(mmAFMT_60958_2 + dig->afmt->offset, tmp); 1748aaa36a97SAlex Deucher 1749aaa36a97SAlex Deucher dce_v11_0_audio_write_speaker_allocation(encoder); 1750aaa36a97SAlex Deucher 1751aaa36a97SAlex Deucher WREG32(mmAFMT_AUDIO_PACKET_CONTROL2 + dig->afmt->offset, 1752aaa36a97SAlex Deucher (0xff << AFMT_AUDIO_PACKET_CONTROL2__AFMT_AUDIO_CHANNEL_ENABLE__SHIFT)); 1753aaa36a97SAlex Deucher 1754aaa36a97SAlex Deucher dce_v11_0_afmt_audio_select_pin(encoder); 1755aaa36a97SAlex Deucher dce_v11_0_audio_write_sad_regs(encoder); 1756aaa36a97SAlex Deucher dce_v11_0_audio_write_latency_fields(encoder, mode); 1757aaa36a97SAlex Deucher 175813d0add3SVille Syrjälä err = drm_hdmi_avi_infoframe_from_display_mode(&frame, connector, mode); 1759aaa36a97SAlex Deucher if (err < 0) { 1760aaa36a97SAlex Deucher DRM_ERROR("failed to setup AVI infoframe: %zd\n", err); 1761aaa36a97SAlex Deucher return; 1762aaa36a97SAlex Deucher } 1763aaa36a97SAlex Deucher 1764aaa36a97SAlex Deucher err = hdmi_avi_infoframe_pack(&frame, buffer, sizeof(buffer)); 1765aaa36a97SAlex Deucher if (err < 0) { 1766aaa36a97SAlex Deucher DRM_ERROR("failed to pack AVI infoframe: %zd\n", err); 1767aaa36a97SAlex Deucher return; 1768aaa36a97SAlex Deucher } 1769aaa36a97SAlex Deucher 1770aaa36a97SAlex Deucher dce_v11_0_afmt_update_avi_infoframe(encoder, buffer, sizeof(buffer)); 1771aaa36a97SAlex Deucher 1772aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_INFOFRAME_CONTROL0 + dig->afmt->offset); 1773aaa36a97SAlex Deucher /* enable AVI info frames */ 1774aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_INFOFRAME_CONTROL0, HDMI_AVI_INFO_SEND, 1); 1775aaa36a97SAlex Deucher /* required for audio info values to be updated */ 1776aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_INFOFRAME_CONTROL0, HDMI_AVI_INFO_CONT, 1); 1777aaa36a97SAlex Deucher WREG32(mmHDMI_INFOFRAME_CONTROL0 + dig->afmt->offset, tmp); 1778aaa36a97SAlex Deucher 1779aaa36a97SAlex Deucher tmp = RREG32(mmHDMI_INFOFRAME_CONTROL1 + dig->afmt->offset); 1780aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, HDMI_INFOFRAME_CONTROL1, HDMI_AVI_INFO_LINE, 2); 1781aaa36a97SAlex Deucher WREG32(mmHDMI_INFOFRAME_CONTROL1 + dig->afmt->offset, tmp); 1782aaa36a97SAlex Deucher 1783aaa36a97SAlex Deucher tmp = RREG32(mmAFMT_AUDIO_PACKET_CONTROL + dig->afmt->offset); 1784aaa36a97SAlex Deucher /* send audio packets */ 1785aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, AFMT_AUDIO_PACKET_CONTROL, AFMT_AUDIO_SAMPLE_SEND, 1); 1786aaa36a97SAlex Deucher WREG32(mmAFMT_AUDIO_PACKET_CONTROL + dig->afmt->offset, tmp); 1787aaa36a97SAlex Deucher 1788aaa36a97SAlex Deucher WREG32(mmAFMT_RAMP_CONTROL0 + dig->afmt->offset, 0x00FFFFFF); 1789aaa36a97SAlex Deucher WREG32(mmAFMT_RAMP_CONTROL1 + dig->afmt->offset, 0x007FFFFF); 1790aaa36a97SAlex Deucher WREG32(mmAFMT_RAMP_CONTROL2 + dig->afmt->offset, 0x00000001); 1791aaa36a97SAlex Deucher WREG32(mmAFMT_RAMP_CONTROL3 + dig->afmt->offset, 0x00000001); 1792aaa36a97SAlex Deucher 1793aaa36a97SAlex Deucher /* enable audio after to setting up hw */ 1794aaa36a97SAlex Deucher dce_v11_0_audio_enable(adev, dig->afmt->pin, true); 1795aaa36a97SAlex Deucher } 1796aaa36a97SAlex Deucher 1797aaa36a97SAlex Deucher static void dce_v11_0_afmt_enable(struct drm_encoder *encoder, bool enable) 1798aaa36a97SAlex Deucher { 1799aaa36a97SAlex Deucher struct drm_device *dev = encoder->dev; 18001348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1801aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1802aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1803aaa36a97SAlex Deucher 1804aaa36a97SAlex Deucher if (!dig || !dig->afmt) 1805aaa36a97SAlex Deucher return; 1806aaa36a97SAlex Deucher 1807aaa36a97SAlex Deucher /* Silent, r600_hdmi_enable will raise WARN for us */ 1808aaa36a97SAlex Deucher if (enable && dig->afmt->enabled) 1809aaa36a97SAlex Deucher return; 1810aaa36a97SAlex Deucher if (!enable && !dig->afmt->enabled) 1811aaa36a97SAlex Deucher return; 1812aaa36a97SAlex Deucher 1813aaa36a97SAlex Deucher if (!enable && dig->afmt->pin) { 1814aaa36a97SAlex Deucher dce_v11_0_audio_enable(adev, dig->afmt->pin, false); 1815aaa36a97SAlex Deucher dig->afmt->pin = NULL; 1816aaa36a97SAlex Deucher } 1817aaa36a97SAlex Deucher 1818aaa36a97SAlex Deucher dig->afmt->enabled = enable; 1819aaa36a97SAlex Deucher 1820aaa36a97SAlex Deucher DRM_DEBUG("%sabling AFMT interface @ 0x%04X for encoder 0x%x\n", 1821aaa36a97SAlex Deucher enable ? "En" : "Dis", dig->afmt->offset, amdgpu_encoder->encoder_id); 1822aaa36a97SAlex Deucher } 1823aaa36a97SAlex Deucher 1824041ab0a4STom St Denis static int dce_v11_0_afmt_init(struct amdgpu_device *adev) 1825aaa36a97SAlex Deucher { 1826aaa36a97SAlex Deucher int i; 1827aaa36a97SAlex Deucher 1828aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.num_dig; i++) 1829aaa36a97SAlex Deucher adev->mode_info.afmt[i] = NULL; 1830aaa36a97SAlex Deucher 1831aaa36a97SAlex Deucher /* DCE11 has audio blocks tied to DIG encoders */ 1832aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.num_dig; i++) { 1833aaa36a97SAlex Deucher adev->mode_info.afmt[i] = kzalloc(sizeof(struct amdgpu_afmt), GFP_KERNEL); 1834aaa36a97SAlex Deucher if (adev->mode_info.afmt[i]) { 1835aaa36a97SAlex Deucher adev->mode_info.afmt[i]->offset = dig_offsets[i]; 1836aaa36a97SAlex Deucher adev->mode_info.afmt[i]->id = i; 1837041ab0a4STom St Denis } else { 1838041ab0a4STom St Denis int j; 1839041ab0a4STom St Denis for (j = 0; j < i; j++) { 1840041ab0a4STom St Denis kfree(adev->mode_info.afmt[j]); 1841041ab0a4STom St Denis adev->mode_info.afmt[j] = NULL; 1842041ab0a4STom St Denis } 1843041ab0a4STom St Denis return -ENOMEM; 1844aaa36a97SAlex Deucher } 1845aaa36a97SAlex Deucher } 1846041ab0a4STom St Denis return 0; 1847aaa36a97SAlex Deucher } 1848aaa36a97SAlex Deucher 1849aaa36a97SAlex Deucher static void dce_v11_0_afmt_fini(struct amdgpu_device *adev) 1850aaa36a97SAlex Deucher { 1851aaa36a97SAlex Deucher int i; 1852aaa36a97SAlex Deucher 1853aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.num_dig; i++) { 1854aaa36a97SAlex Deucher kfree(adev->mode_info.afmt[i]); 1855aaa36a97SAlex Deucher adev->mode_info.afmt[i] = NULL; 1856aaa36a97SAlex Deucher } 1857aaa36a97SAlex Deucher } 1858aaa36a97SAlex Deucher 1859aaa36a97SAlex Deucher static const u32 vga_control_regs[6] = 1860aaa36a97SAlex Deucher { 1861aaa36a97SAlex Deucher mmD1VGA_CONTROL, 1862aaa36a97SAlex Deucher mmD2VGA_CONTROL, 1863aaa36a97SAlex Deucher mmD3VGA_CONTROL, 1864aaa36a97SAlex Deucher mmD4VGA_CONTROL, 1865aaa36a97SAlex Deucher mmD5VGA_CONTROL, 1866aaa36a97SAlex Deucher mmD6VGA_CONTROL, 1867aaa36a97SAlex Deucher }; 1868aaa36a97SAlex Deucher 1869aaa36a97SAlex Deucher static void dce_v11_0_vga_enable(struct drm_crtc *crtc, bool enable) 1870aaa36a97SAlex Deucher { 1871aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 1872aaa36a97SAlex Deucher struct drm_device *dev = crtc->dev; 18731348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1874aaa36a97SAlex Deucher u32 vga_control; 1875aaa36a97SAlex Deucher 1876aaa36a97SAlex Deucher vga_control = RREG32(vga_control_regs[amdgpu_crtc->crtc_id]) & ~1; 1877aaa36a97SAlex Deucher if (enable) 1878aaa36a97SAlex Deucher WREG32(vga_control_regs[amdgpu_crtc->crtc_id], vga_control | 1); 1879aaa36a97SAlex Deucher else 1880aaa36a97SAlex Deucher WREG32(vga_control_regs[amdgpu_crtc->crtc_id], vga_control); 1881aaa36a97SAlex Deucher } 1882aaa36a97SAlex Deucher 1883aaa36a97SAlex Deucher static void dce_v11_0_grph_enable(struct drm_crtc *crtc, bool enable) 1884aaa36a97SAlex Deucher { 1885aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 1886aaa36a97SAlex Deucher struct drm_device *dev = crtc->dev; 18871348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1888aaa36a97SAlex Deucher 1889aaa36a97SAlex Deucher if (enable) 1890aaa36a97SAlex Deucher WREG32(mmGRPH_ENABLE + amdgpu_crtc->crtc_offset, 1); 1891aaa36a97SAlex Deucher else 1892aaa36a97SAlex Deucher WREG32(mmGRPH_ENABLE + amdgpu_crtc->crtc_offset, 0); 1893aaa36a97SAlex Deucher } 1894aaa36a97SAlex Deucher 1895aaa36a97SAlex Deucher static int dce_v11_0_crtc_do_set_base(struct drm_crtc *crtc, 1896aaa36a97SAlex Deucher struct drm_framebuffer *fb, 1897aaa36a97SAlex Deucher int x, int y, int atomic) 1898aaa36a97SAlex Deucher { 1899aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 1900aaa36a97SAlex Deucher struct drm_device *dev = crtc->dev; 19011348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 1902aaa36a97SAlex Deucher struct drm_framebuffer *target_fb; 1903aaa36a97SAlex Deucher struct drm_gem_object *obj; 1904765e7fbfSChristian König struct amdgpu_bo *abo; 1905aaa36a97SAlex Deucher uint64_t fb_location, tiling_flags; 1906aaa36a97SAlex Deucher uint32_t fb_format, fb_pitch_pixels; 1907aaa36a97SAlex Deucher u32 fb_swap = REG_SET_FIELD(0, GRPH_SWAP_CNTL, GRPH_ENDIAN_SWAP, ENDIAN_NONE); 1908fbd76d59SMarek Olšák u32 pipe_config; 1909aaa36a97SAlex Deucher u32 tmp, viewport_w, viewport_h; 1910aaa36a97SAlex Deucher int r; 1911aaa36a97SAlex Deucher bool bypass_lut = false; 1912aaa36a97SAlex Deucher 1913aaa36a97SAlex Deucher /* no fb bound */ 1914aaa36a97SAlex Deucher if (!atomic && !crtc->primary->fb) { 1915aaa36a97SAlex Deucher DRM_DEBUG_KMS("No FB bound\n"); 1916aaa36a97SAlex Deucher return 0; 1917aaa36a97SAlex Deucher } 1918aaa36a97SAlex Deucher 1919e68d14ddSDaniel Stone if (atomic) 1920aaa36a97SAlex Deucher target_fb = fb; 1921e68d14ddSDaniel Stone else 1922aaa36a97SAlex Deucher target_fb = crtc->primary->fb; 1923aaa36a97SAlex Deucher 1924aaa36a97SAlex Deucher /* If atomic, assume fb object is pinned & idle & fenced and 1925aaa36a97SAlex Deucher * just update base pointers 1926aaa36a97SAlex Deucher */ 1927e68d14ddSDaniel Stone obj = target_fb->obj[0]; 1928765e7fbfSChristian König abo = gem_to_amdgpu_bo(obj); 1929765e7fbfSChristian König r = amdgpu_bo_reserve(abo, false); 1930aaa36a97SAlex Deucher if (unlikely(r != 0)) 1931aaa36a97SAlex Deucher return r; 1932aaa36a97SAlex Deucher 19337b7c6c81SJunwei Zhang if (!atomic) { 19347b7c6c81SJunwei Zhang r = amdgpu_bo_pin(abo, AMDGPU_GEM_DOMAIN_VRAM); 1935aaa36a97SAlex Deucher if (unlikely(r != 0)) { 1936765e7fbfSChristian König amdgpu_bo_unreserve(abo); 1937aaa36a97SAlex Deucher return -EINVAL; 1938aaa36a97SAlex Deucher } 1939aaa36a97SAlex Deucher } 19407b7c6c81SJunwei Zhang fb_location = amdgpu_bo_gpu_offset(abo); 1941aaa36a97SAlex Deucher 1942765e7fbfSChristian König amdgpu_bo_get_tiling_flags(abo, &tiling_flags); 1943765e7fbfSChristian König amdgpu_bo_unreserve(abo); 1944aaa36a97SAlex Deucher 1945fbd76d59SMarek Olšák pipe_config = AMDGPU_TILING_GET(tiling_flags, PIPE_CONFIG); 1946fbd76d59SMarek Olšák 1947438b74a5SVille Syrjälä switch (target_fb->format->format) { 1948aaa36a97SAlex Deucher case DRM_FORMAT_C8: 1949aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(0, GRPH_CONTROL, GRPH_DEPTH, 0); 1950aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_FORMAT, 0); 1951aaa36a97SAlex Deucher break; 1952aaa36a97SAlex Deucher case DRM_FORMAT_XRGB4444: 1953aaa36a97SAlex Deucher case DRM_FORMAT_ARGB4444: 1954aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(0, GRPH_CONTROL, GRPH_DEPTH, 1); 1955aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_FORMAT, 2); 1956aaa36a97SAlex Deucher #ifdef __BIG_ENDIAN 1957aaa36a97SAlex Deucher fb_swap = REG_SET_FIELD(fb_swap, GRPH_SWAP_CNTL, GRPH_ENDIAN_SWAP, 1958aaa36a97SAlex Deucher ENDIAN_8IN16); 1959aaa36a97SAlex Deucher #endif 1960aaa36a97SAlex Deucher break; 1961aaa36a97SAlex Deucher case DRM_FORMAT_XRGB1555: 1962aaa36a97SAlex Deucher case DRM_FORMAT_ARGB1555: 1963aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(0, GRPH_CONTROL, GRPH_DEPTH, 1); 1964aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_FORMAT, 0); 1965aaa36a97SAlex Deucher #ifdef __BIG_ENDIAN 1966aaa36a97SAlex Deucher fb_swap = REG_SET_FIELD(fb_swap, GRPH_SWAP_CNTL, GRPH_ENDIAN_SWAP, 1967aaa36a97SAlex Deucher ENDIAN_8IN16); 1968aaa36a97SAlex Deucher #endif 1969aaa36a97SAlex Deucher break; 1970aaa36a97SAlex Deucher case DRM_FORMAT_BGRX5551: 1971aaa36a97SAlex Deucher case DRM_FORMAT_BGRA5551: 1972aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(0, GRPH_CONTROL, GRPH_DEPTH, 1); 1973aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_FORMAT, 5); 1974aaa36a97SAlex Deucher #ifdef __BIG_ENDIAN 1975aaa36a97SAlex Deucher fb_swap = REG_SET_FIELD(fb_swap, GRPH_SWAP_CNTL, GRPH_ENDIAN_SWAP, 1976aaa36a97SAlex Deucher ENDIAN_8IN16); 1977aaa36a97SAlex Deucher #endif 1978aaa36a97SAlex Deucher break; 1979aaa36a97SAlex Deucher case DRM_FORMAT_RGB565: 1980aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(0, GRPH_CONTROL, GRPH_DEPTH, 1); 1981aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_FORMAT, 1); 1982aaa36a97SAlex Deucher #ifdef __BIG_ENDIAN 1983aaa36a97SAlex Deucher fb_swap = REG_SET_FIELD(fb_swap, GRPH_SWAP_CNTL, GRPH_ENDIAN_SWAP, 1984aaa36a97SAlex Deucher ENDIAN_8IN16); 1985aaa36a97SAlex Deucher #endif 1986aaa36a97SAlex Deucher break; 1987aaa36a97SAlex Deucher case DRM_FORMAT_XRGB8888: 1988aaa36a97SAlex Deucher case DRM_FORMAT_ARGB8888: 1989aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(0, GRPH_CONTROL, GRPH_DEPTH, 2); 1990aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_FORMAT, 0); 1991aaa36a97SAlex Deucher #ifdef __BIG_ENDIAN 1992aaa36a97SAlex Deucher fb_swap = REG_SET_FIELD(fb_swap, GRPH_SWAP_CNTL, GRPH_ENDIAN_SWAP, 1993aaa36a97SAlex Deucher ENDIAN_8IN32); 1994aaa36a97SAlex Deucher #endif 1995aaa36a97SAlex Deucher break; 1996aaa36a97SAlex Deucher case DRM_FORMAT_XRGB2101010: 1997aaa36a97SAlex Deucher case DRM_FORMAT_ARGB2101010: 1998aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(0, GRPH_CONTROL, GRPH_DEPTH, 2); 1999aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_FORMAT, 1); 2000aaa36a97SAlex Deucher #ifdef __BIG_ENDIAN 2001aaa36a97SAlex Deucher fb_swap = REG_SET_FIELD(fb_swap, GRPH_SWAP_CNTL, GRPH_ENDIAN_SWAP, 2002aaa36a97SAlex Deucher ENDIAN_8IN32); 2003aaa36a97SAlex Deucher #endif 2004aaa36a97SAlex Deucher /* Greater 8 bpc fb needs to bypass hw-lut to retain precision */ 2005aaa36a97SAlex Deucher bypass_lut = true; 2006aaa36a97SAlex Deucher break; 2007aaa36a97SAlex Deucher case DRM_FORMAT_BGRX1010102: 2008aaa36a97SAlex Deucher case DRM_FORMAT_BGRA1010102: 2009aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(0, GRPH_CONTROL, GRPH_DEPTH, 2); 2010aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_FORMAT, 4); 2011aaa36a97SAlex Deucher #ifdef __BIG_ENDIAN 2012aaa36a97SAlex Deucher fb_swap = REG_SET_FIELD(fb_swap, GRPH_SWAP_CNTL, GRPH_ENDIAN_SWAP, 2013aaa36a97SAlex Deucher ENDIAN_8IN32); 2014aaa36a97SAlex Deucher #endif 2015aaa36a97SAlex Deucher /* Greater 8 bpc fb needs to bypass hw-lut to retain precision */ 2016aaa36a97SAlex Deucher bypass_lut = true; 2017aaa36a97SAlex Deucher break; 201800ecc6e6SMauro Rossi case DRM_FORMAT_XBGR8888: 201900ecc6e6SMauro Rossi case DRM_FORMAT_ABGR8888: 202000ecc6e6SMauro Rossi fb_format = REG_SET_FIELD(0, GRPH_CONTROL, GRPH_DEPTH, 2); 202100ecc6e6SMauro Rossi fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_FORMAT, 0); 202200ecc6e6SMauro Rossi fb_swap = REG_SET_FIELD(fb_swap, GRPH_SWAP_CNTL, GRPH_RED_CROSSBAR, 2); 202300ecc6e6SMauro Rossi fb_swap = REG_SET_FIELD(fb_swap, GRPH_SWAP_CNTL, GRPH_BLUE_CROSSBAR, 2); 202400ecc6e6SMauro Rossi #ifdef __BIG_ENDIAN 202500ecc6e6SMauro Rossi fb_swap = REG_SET_FIELD(fb_swap, GRPH_SWAP_CNTL, GRPH_ENDIAN_SWAP, 202600ecc6e6SMauro Rossi ENDIAN_8IN32); 202700ecc6e6SMauro Rossi #endif 202800ecc6e6SMauro Rossi break; 2029aaa36a97SAlex Deucher default: 203092f1d09cSSakari Ailus DRM_ERROR("Unsupported screen format %p4cc\n", 203192f1d09cSSakari Ailus &target_fb->format->format); 2032aaa36a97SAlex Deucher return -EINVAL; 2033aaa36a97SAlex Deucher } 2034aaa36a97SAlex Deucher 2035fbd76d59SMarek Olšák if (AMDGPU_TILING_GET(tiling_flags, ARRAY_MODE) == ARRAY_2D_TILED_THIN1) { 2036fbd76d59SMarek Olšák unsigned bankw, bankh, mtaspect, tile_split, num_banks; 2037aaa36a97SAlex Deucher 2038fbd76d59SMarek Olšák bankw = AMDGPU_TILING_GET(tiling_flags, BANK_WIDTH); 2039fbd76d59SMarek Olšák bankh = AMDGPU_TILING_GET(tiling_flags, BANK_HEIGHT); 2040fbd76d59SMarek Olšák mtaspect = AMDGPU_TILING_GET(tiling_flags, MACRO_TILE_ASPECT); 2041fbd76d59SMarek Olšák tile_split = AMDGPU_TILING_GET(tiling_flags, TILE_SPLIT); 2042fbd76d59SMarek Olšák num_banks = AMDGPU_TILING_GET(tiling_flags, NUM_BANKS); 2043aaa36a97SAlex Deucher 2044aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_NUM_BANKS, num_banks); 2045aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_ARRAY_MODE, 2046aaa36a97SAlex Deucher ARRAY_2D_TILED_THIN1); 2047aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_TILE_SPLIT, 2048aaa36a97SAlex Deucher tile_split); 2049aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_BANK_WIDTH, bankw); 2050aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_BANK_HEIGHT, bankh); 2051aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_MACRO_TILE_ASPECT, 2052aaa36a97SAlex Deucher mtaspect); 2053aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_MICRO_TILE_MODE, 2054aaa36a97SAlex Deucher ADDR_SURF_MICRO_TILING_DISPLAY); 2055fbd76d59SMarek Olšák } else if (AMDGPU_TILING_GET(tiling_flags, ARRAY_MODE) == ARRAY_1D_TILED_THIN1) { 2056aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_ARRAY_MODE, 2057aaa36a97SAlex Deucher ARRAY_1D_TILED_THIN1); 2058aaa36a97SAlex Deucher } 2059aaa36a97SAlex Deucher 2060aaa36a97SAlex Deucher fb_format = REG_SET_FIELD(fb_format, GRPH_CONTROL, GRPH_PIPE_CONFIG, 2061aaa36a97SAlex Deucher pipe_config); 2062aaa36a97SAlex Deucher 2063aaa36a97SAlex Deucher dce_v11_0_vga_enable(crtc, false); 2064aaa36a97SAlex Deucher 2065cb9e59d7SAlex Deucher /* Make sure surface address is updated at vertical blank rather than 2066cb9e59d7SAlex Deucher * horizontal blank 2067cb9e59d7SAlex Deucher */ 2068cb9e59d7SAlex Deucher tmp = RREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset); 2069cb9e59d7SAlex Deucher tmp = REG_SET_FIELD(tmp, GRPH_FLIP_CONTROL, 2070cb9e59d7SAlex Deucher GRPH_SURFACE_UPDATE_H_RETRACE_EN, 0); 2071cb9e59d7SAlex Deucher WREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2072cb9e59d7SAlex Deucher 2073aaa36a97SAlex Deucher WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, 2074aaa36a97SAlex Deucher upper_32_bits(fb_location)); 2075aaa36a97SAlex Deucher WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, 2076aaa36a97SAlex Deucher upper_32_bits(fb_location)); 2077aaa36a97SAlex Deucher WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, 2078aaa36a97SAlex Deucher (u32)fb_location & GRPH_PRIMARY_SURFACE_ADDRESS__GRPH_PRIMARY_SURFACE_ADDRESS_MASK); 2079aaa36a97SAlex Deucher WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, 2080aaa36a97SAlex Deucher (u32) fb_location & GRPH_SECONDARY_SURFACE_ADDRESS__GRPH_SECONDARY_SURFACE_ADDRESS_MASK); 2081aaa36a97SAlex Deucher WREG32(mmGRPH_CONTROL + amdgpu_crtc->crtc_offset, fb_format); 2082aaa36a97SAlex Deucher WREG32(mmGRPH_SWAP_CNTL + amdgpu_crtc->crtc_offset, fb_swap); 2083aaa36a97SAlex Deucher 2084aaa36a97SAlex Deucher /* 2085aaa36a97SAlex Deucher * The LUT only has 256 slots for indexing by a 8 bpc fb. Bypass the LUT 2086aaa36a97SAlex Deucher * for > 8 bpc scanout to avoid truncation of fb indices to 8 msb's, to 2087aaa36a97SAlex Deucher * retain the full precision throughout the pipeline. 2088aaa36a97SAlex Deucher */ 2089aaa36a97SAlex Deucher tmp = RREG32(mmGRPH_LUT_10BIT_BYPASS + amdgpu_crtc->crtc_offset); 2090aaa36a97SAlex Deucher if (bypass_lut) 2091aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, GRPH_LUT_10BIT_BYPASS, GRPH_LUT_10BIT_BYPASS_EN, 1); 2092aaa36a97SAlex Deucher else 2093aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, GRPH_LUT_10BIT_BYPASS, GRPH_LUT_10BIT_BYPASS_EN, 0); 2094aaa36a97SAlex Deucher WREG32(mmGRPH_LUT_10BIT_BYPASS + amdgpu_crtc->crtc_offset, tmp); 2095aaa36a97SAlex Deucher 2096aaa36a97SAlex Deucher if (bypass_lut) 2097aaa36a97SAlex Deucher DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n"); 2098aaa36a97SAlex Deucher 2099aaa36a97SAlex Deucher WREG32(mmGRPH_SURFACE_OFFSET_X + amdgpu_crtc->crtc_offset, 0); 2100aaa36a97SAlex Deucher WREG32(mmGRPH_SURFACE_OFFSET_Y + amdgpu_crtc->crtc_offset, 0); 2101aaa36a97SAlex Deucher WREG32(mmGRPH_X_START + amdgpu_crtc->crtc_offset, 0); 2102aaa36a97SAlex Deucher WREG32(mmGRPH_Y_START + amdgpu_crtc->crtc_offset, 0); 2103aaa36a97SAlex Deucher WREG32(mmGRPH_X_END + amdgpu_crtc->crtc_offset, target_fb->width); 2104aaa36a97SAlex Deucher WREG32(mmGRPH_Y_END + amdgpu_crtc->crtc_offset, target_fb->height); 2105aaa36a97SAlex Deucher 2106272725c7SVille Syrjälä fb_pitch_pixels = target_fb->pitches[0] / target_fb->format->cpp[0]; 2107aaa36a97SAlex Deucher WREG32(mmGRPH_PITCH + amdgpu_crtc->crtc_offset, fb_pitch_pixels); 2108aaa36a97SAlex Deucher 2109aaa36a97SAlex Deucher dce_v11_0_grph_enable(crtc, true); 2110aaa36a97SAlex Deucher 2111aaa36a97SAlex Deucher WREG32(mmLB_DESKTOP_HEIGHT + amdgpu_crtc->crtc_offset, 2112aaa36a97SAlex Deucher target_fb->height); 2113aaa36a97SAlex Deucher 2114aaa36a97SAlex Deucher x &= ~3; 2115aaa36a97SAlex Deucher y &= ~1; 2116aaa36a97SAlex Deucher WREG32(mmVIEWPORT_START + amdgpu_crtc->crtc_offset, 2117aaa36a97SAlex Deucher (x << 16) | y); 2118aaa36a97SAlex Deucher viewport_w = crtc->mode.hdisplay; 2119aaa36a97SAlex Deucher viewport_h = (crtc->mode.vdisplay + 1) & ~1; 2120aaa36a97SAlex Deucher WREG32(mmVIEWPORT_SIZE + amdgpu_crtc->crtc_offset, 2121aaa36a97SAlex Deucher (viewport_w << 16) | viewport_h); 2122aaa36a97SAlex Deucher 21233fd4b751SMichel Dänzer /* set pageflip to happen anywhere in vblank interval */ 21243fd4b751SMichel Dänzer WREG32(mmCRTC_MASTER_UPDATE_MODE + amdgpu_crtc->crtc_offset, 0); 2125aaa36a97SAlex Deucher 2126aaa36a97SAlex Deucher if (!atomic && fb && fb != crtc->primary->fb) { 2127e68d14ddSDaniel Stone abo = gem_to_amdgpu_bo(fb->obj[0]); 2128c81a1a74SMichel Dänzer r = amdgpu_bo_reserve(abo, true); 2129aaa36a97SAlex Deucher if (unlikely(r != 0)) 2130aaa36a97SAlex Deucher return r; 2131765e7fbfSChristian König amdgpu_bo_unpin(abo); 2132765e7fbfSChristian König amdgpu_bo_unreserve(abo); 2133aaa36a97SAlex Deucher } 2134aaa36a97SAlex Deucher 2135aaa36a97SAlex Deucher /* Bytes per pixel may have changed */ 2136aaa36a97SAlex Deucher dce_v11_0_bandwidth_update(adev); 2137aaa36a97SAlex Deucher 2138aaa36a97SAlex Deucher return 0; 2139aaa36a97SAlex Deucher } 2140aaa36a97SAlex Deucher 2141aaa36a97SAlex Deucher static void dce_v11_0_set_interleave(struct drm_crtc *crtc, 2142aaa36a97SAlex Deucher struct drm_display_mode *mode) 2143aaa36a97SAlex Deucher { 2144aaa36a97SAlex Deucher struct drm_device *dev = crtc->dev; 21451348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 2146aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 2147aaa36a97SAlex Deucher u32 tmp; 2148aaa36a97SAlex Deucher 2149aaa36a97SAlex Deucher tmp = RREG32(mmLB_DATA_FORMAT + amdgpu_crtc->crtc_offset); 2150aaa36a97SAlex Deucher if (mode->flags & DRM_MODE_FLAG_INTERLACE) 2151aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, LB_DATA_FORMAT, INTERLEAVE_EN, 1); 2152aaa36a97SAlex Deucher else 2153aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, LB_DATA_FORMAT, INTERLEAVE_EN, 0); 2154aaa36a97SAlex Deucher WREG32(mmLB_DATA_FORMAT + amdgpu_crtc->crtc_offset, tmp); 2155aaa36a97SAlex Deucher } 2156aaa36a97SAlex Deucher 2157aaa36a97SAlex Deucher static void dce_v11_0_crtc_load_lut(struct drm_crtc *crtc) 2158aaa36a97SAlex Deucher { 2159aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 2160aaa36a97SAlex Deucher struct drm_device *dev = crtc->dev; 21611348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 216276dd3cd8SPeter Rosin u16 *r, *g, *b; 2163aaa36a97SAlex Deucher int i; 2164aaa36a97SAlex Deucher u32 tmp; 2165aaa36a97SAlex Deucher 2166aaa36a97SAlex Deucher DRM_DEBUG_KMS("%d\n", amdgpu_crtc->crtc_id); 2167aaa36a97SAlex Deucher 2168aaa36a97SAlex Deucher tmp = RREG32(mmINPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset); 2169aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, INPUT_CSC_CONTROL, INPUT_CSC_GRPH_MODE, 0); 2170aaa36a97SAlex Deucher WREG32(mmINPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2171aaa36a97SAlex Deucher 2172aaa36a97SAlex Deucher tmp = RREG32(mmPRESCALE_GRPH_CONTROL + amdgpu_crtc->crtc_offset); 2173aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, PRESCALE_GRPH_CONTROL, GRPH_PRESCALE_BYPASS, 1); 2174aaa36a97SAlex Deucher WREG32(mmPRESCALE_GRPH_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2175aaa36a97SAlex Deucher 2176aaa36a97SAlex Deucher tmp = RREG32(mmINPUT_GAMMA_CONTROL + amdgpu_crtc->crtc_offset); 2177aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, INPUT_GAMMA_CONTROL, GRPH_INPUT_GAMMA_MODE, 0); 2178aaa36a97SAlex Deucher WREG32(mmINPUT_GAMMA_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2179aaa36a97SAlex Deucher 2180aaa36a97SAlex Deucher WREG32(mmDC_LUT_CONTROL + amdgpu_crtc->crtc_offset, 0); 2181aaa36a97SAlex Deucher 2182aaa36a97SAlex Deucher WREG32(mmDC_LUT_BLACK_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0); 2183aaa36a97SAlex Deucher WREG32(mmDC_LUT_BLACK_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0); 2184aaa36a97SAlex Deucher WREG32(mmDC_LUT_BLACK_OFFSET_RED + amdgpu_crtc->crtc_offset, 0); 2185aaa36a97SAlex Deucher 2186aaa36a97SAlex Deucher WREG32(mmDC_LUT_WHITE_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0xffff); 2187aaa36a97SAlex Deucher WREG32(mmDC_LUT_WHITE_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0xffff); 2188aaa36a97SAlex Deucher WREG32(mmDC_LUT_WHITE_OFFSET_RED + amdgpu_crtc->crtc_offset, 0xffff); 2189aaa36a97SAlex Deucher 2190aaa36a97SAlex Deucher WREG32(mmDC_LUT_RW_MODE + amdgpu_crtc->crtc_offset, 0); 2191aaa36a97SAlex Deucher WREG32(mmDC_LUT_WRITE_EN_MASK + amdgpu_crtc->crtc_offset, 0x00000007); 2192aaa36a97SAlex Deucher 2193aaa36a97SAlex Deucher WREG32(mmDC_LUT_RW_INDEX + amdgpu_crtc->crtc_offset, 0); 219476dd3cd8SPeter Rosin r = crtc->gamma_store; 219576dd3cd8SPeter Rosin g = r + crtc->gamma_size; 219676dd3cd8SPeter Rosin b = g + crtc->gamma_size; 2197aaa36a97SAlex Deucher for (i = 0; i < 256; i++) { 2198aaa36a97SAlex Deucher WREG32(mmDC_LUT_30_COLOR + amdgpu_crtc->crtc_offset, 219976dd3cd8SPeter Rosin ((*r++ & 0xffc0) << 14) | 220076dd3cd8SPeter Rosin ((*g++ & 0xffc0) << 4) | 220176dd3cd8SPeter Rosin (*b++ >> 6)); 2202aaa36a97SAlex Deucher } 2203aaa36a97SAlex Deucher 2204aaa36a97SAlex Deucher tmp = RREG32(mmDEGAMMA_CONTROL + amdgpu_crtc->crtc_offset); 2205aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DEGAMMA_CONTROL, GRPH_DEGAMMA_MODE, 0); 2206aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DEGAMMA_CONTROL, CURSOR_DEGAMMA_MODE, 0); 2207aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DEGAMMA_CONTROL, CURSOR2_DEGAMMA_MODE, 0); 2208aaa36a97SAlex Deucher WREG32(mmDEGAMMA_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2209aaa36a97SAlex Deucher 2210aaa36a97SAlex Deucher tmp = RREG32(mmGAMUT_REMAP_CONTROL + amdgpu_crtc->crtc_offset); 2211aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, GAMUT_REMAP_CONTROL, GRPH_GAMUT_REMAP_MODE, 0); 2212aaa36a97SAlex Deucher WREG32(mmGAMUT_REMAP_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2213aaa36a97SAlex Deucher 2214aaa36a97SAlex Deucher tmp = RREG32(mmREGAMMA_CONTROL + amdgpu_crtc->crtc_offset); 2215aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, REGAMMA_CONTROL, GRPH_REGAMMA_MODE, 0); 2216aaa36a97SAlex Deucher WREG32(mmREGAMMA_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2217aaa36a97SAlex Deucher 2218aaa36a97SAlex Deucher tmp = RREG32(mmOUTPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset); 2219aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, OUTPUT_CSC_CONTROL, OUTPUT_CSC_GRPH_MODE, 0); 2220aaa36a97SAlex Deucher WREG32(mmOUTPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2221aaa36a97SAlex Deucher 2222aaa36a97SAlex Deucher /* XXX match this to the depth of the crtc fmt block, move to modeset? */ 2223aaa36a97SAlex Deucher WREG32(mmDENORM_CONTROL + amdgpu_crtc->crtc_offset, 0); 2224aaa36a97SAlex Deucher /* XXX this only needs to be programmed once per crtc at startup, 2225aaa36a97SAlex Deucher * not sure where the best place for it is 2226aaa36a97SAlex Deucher */ 2227aaa36a97SAlex Deucher tmp = RREG32(mmALPHA_CONTROL + amdgpu_crtc->crtc_offset); 2228aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, ALPHA_CONTROL, CURSOR_ALPHA_BLND_ENA, 1); 2229aaa36a97SAlex Deucher WREG32(mmALPHA_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2230aaa36a97SAlex Deucher } 2231aaa36a97SAlex Deucher 2232aaa36a97SAlex Deucher static int dce_v11_0_pick_dig_encoder(struct drm_encoder *encoder) 2233aaa36a97SAlex Deucher { 2234aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 2235aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 2236aaa36a97SAlex Deucher 2237aaa36a97SAlex Deucher switch (amdgpu_encoder->encoder_id) { 2238aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 2239aaa36a97SAlex Deucher if (dig->linkb) 2240aaa36a97SAlex Deucher return 1; 2241aaa36a97SAlex Deucher else 2242aaa36a97SAlex Deucher return 0; 2243aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 2244aaa36a97SAlex Deucher if (dig->linkb) 2245aaa36a97SAlex Deucher return 3; 2246aaa36a97SAlex Deucher else 2247aaa36a97SAlex Deucher return 2; 2248aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 2249aaa36a97SAlex Deucher if (dig->linkb) 2250aaa36a97SAlex Deucher return 5; 2251aaa36a97SAlex Deucher else 2252aaa36a97SAlex Deucher return 4; 2253aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 2254aaa36a97SAlex Deucher return 6; 2255aaa36a97SAlex Deucher default: 2256aaa36a97SAlex Deucher DRM_ERROR("invalid encoder_id: 0x%x\n", amdgpu_encoder->encoder_id); 2257aaa36a97SAlex Deucher return 0; 2258aaa36a97SAlex Deucher } 2259aaa36a97SAlex Deucher } 2260aaa36a97SAlex Deucher 2261aaa36a97SAlex Deucher /** 2262aaa36a97SAlex Deucher * dce_v11_0_pick_pll - Allocate a PPLL for use by the crtc. 2263aaa36a97SAlex Deucher * 2264aaa36a97SAlex Deucher * @crtc: drm crtc 2265aaa36a97SAlex Deucher * 2266aaa36a97SAlex Deucher * Returns the PPLL (Pixel PLL) to be used by the crtc. For DP monitors 2267aaa36a97SAlex Deucher * a single PPLL can be used for all DP crtcs/encoders. For non-DP 2268aaa36a97SAlex Deucher * monitors a dedicated PPLL must be used. If a particular board has 2269aaa36a97SAlex Deucher * an external DP PLL, return ATOM_PPLL_INVALID to skip PLL programming 2270aaa36a97SAlex Deucher * as there is no need to program the PLL itself. If we are not able to 2271aaa36a97SAlex Deucher * allocate a PLL, return ATOM_PPLL_INVALID to skip PLL programming to 2272aaa36a97SAlex Deucher * avoid messing up an existing monitor. 2273aaa36a97SAlex Deucher * 2274aaa36a97SAlex Deucher * Asic specific PLL information 2275aaa36a97SAlex Deucher * 2276aaa36a97SAlex Deucher * DCE 10.x 2277aaa36a97SAlex Deucher * Tonga 2278aaa36a97SAlex Deucher * - PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) 2279aaa36a97SAlex Deucher * CI 2280aaa36a97SAlex Deucher * - PPLL0, PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) and DAC 2281aaa36a97SAlex Deucher * 2282aaa36a97SAlex Deucher */ 2283aaa36a97SAlex Deucher static u32 dce_v11_0_pick_pll(struct drm_crtc *crtc) 2284aaa36a97SAlex Deucher { 2285aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 2286aaa36a97SAlex Deucher struct drm_device *dev = crtc->dev; 22871348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 2288aaa36a97SAlex Deucher u32 pll_in_use; 2289aaa36a97SAlex Deucher int pll; 2290aaa36a97SAlex Deucher 22912cc0c0b5SFlora Cui if ((adev->asic_type == CHIP_POLARIS10) || 2292c4642a47SJunwei Zhang (adev->asic_type == CHIP_POLARIS11) || 2293221adb21SAlex Deucher (adev->asic_type == CHIP_POLARIS12) || 2294221adb21SAlex Deucher (adev->asic_type == CHIP_VEGAM)) { 2295927a81c9SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = 2296927a81c9SAlex Deucher to_amdgpu_encoder(amdgpu_crtc->encoder); 2297927a81c9SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 2298927a81c9SAlex Deucher 2299927a81c9SAlex Deucher if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(amdgpu_crtc->encoder))) 2300927a81c9SAlex Deucher return ATOM_DP_DTO; 2301927a81c9SAlex Deucher 2302927a81c9SAlex Deucher switch (amdgpu_encoder->encoder_id) { 2303927a81c9SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 2304927a81c9SAlex Deucher if (dig->linkb) 2305927a81c9SAlex Deucher return ATOM_COMBOPHY_PLL1; 2306927a81c9SAlex Deucher else 2307927a81c9SAlex Deucher return ATOM_COMBOPHY_PLL0; 2308927a81c9SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 2309927a81c9SAlex Deucher if (dig->linkb) 2310927a81c9SAlex Deucher return ATOM_COMBOPHY_PLL3; 2311927a81c9SAlex Deucher else 2312927a81c9SAlex Deucher return ATOM_COMBOPHY_PLL2; 2313927a81c9SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 2314927a81c9SAlex Deucher if (dig->linkb) 2315927a81c9SAlex Deucher return ATOM_COMBOPHY_PLL5; 2316927a81c9SAlex Deucher else 2317927a81c9SAlex Deucher return ATOM_COMBOPHY_PLL4; 2318927a81c9SAlex Deucher default: 2319927a81c9SAlex Deucher DRM_ERROR("invalid encoder_id: 0x%x\n", amdgpu_encoder->encoder_id); 2320927a81c9SAlex Deucher return ATOM_PPLL_INVALID; 2321927a81c9SAlex Deucher } 2322927a81c9SAlex Deucher } 2323927a81c9SAlex Deucher 2324aaa36a97SAlex Deucher if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(amdgpu_crtc->encoder))) { 2325aaa36a97SAlex Deucher if (adev->clock.dp_extclk) 2326aaa36a97SAlex Deucher /* skip PPLL programming if using ext clock */ 2327aaa36a97SAlex Deucher return ATOM_PPLL_INVALID; 2328aaa36a97SAlex Deucher else { 2329aaa36a97SAlex Deucher /* use the same PPLL for all DP monitors */ 2330aaa36a97SAlex Deucher pll = amdgpu_pll_get_shared_dp_ppll(crtc); 2331aaa36a97SAlex Deucher if (pll != ATOM_PPLL_INVALID) 2332aaa36a97SAlex Deucher return pll; 2333aaa36a97SAlex Deucher } 2334aaa36a97SAlex Deucher } else { 2335aaa36a97SAlex Deucher /* use the same PPLL for all monitors with the same clock */ 2336aaa36a97SAlex Deucher pll = amdgpu_pll_get_shared_nondp_ppll(crtc); 2337aaa36a97SAlex Deucher if (pll != ATOM_PPLL_INVALID) 2338aaa36a97SAlex Deucher return pll; 2339aaa36a97SAlex Deucher } 2340aaa36a97SAlex Deucher 2341aaa36a97SAlex Deucher /* XXX need to determine what plls are available on each DCE11 part */ 2342aaa36a97SAlex Deucher pll_in_use = amdgpu_pll_get_use_mask(crtc); 2343e1ad2d53SAlex Deucher if (adev->flags & AMD_IS_APU) { 2344aaa36a97SAlex Deucher if (!(pll_in_use & (1 << ATOM_PPLL1))) 2345aaa36a97SAlex Deucher return ATOM_PPLL1; 2346aaa36a97SAlex Deucher if (!(pll_in_use & (1 << ATOM_PPLL0))) 2347aaa36a97SAlex Deucher return ATOM_PPLL0; 2348aaa36a97SAlex Deucher DRM_ERROR("unable to allocate a PPLL\n"); 2349aaa36a97SAlex Deucher return ATOM_PPLL_INVALID; 2350aaa36a97SAlex Deucher } else { 2351aaa36a97SAlex Deucher if (!(pll_in_use & (1 << ATOM_PPLL2))) 2352aaa36a97SAlex Deucher return ATOM_PPLL2; 2353aaa36a97SAlex Deucher if (!(pll_in_use & (1 << ATOM_PPLL1))) 2354aaa36a97SAlex Deucher return ATOM_PPLL1; 2355aaa36a97SAlex Deucher if (!(pll_in_use & (1 << ATOM_PPLL0))) 2356aaa36a97SAlex Deucher return ATOM_PPLL0; 2357aaa36a97SAlex Deucher DRM_ERROR("unable to allocate a PPLL\n"); 2358aaa36a97SAlex Deucher return ATOM_PPLL_INVALID; 2359aaa36a97SAlex Deucher } 2360aaa36a97SAlex Deucher return ATOM_PPLL_INVALID; 2361aaa36a97SAlex Deucher } 2362aaa36a97SAlex Deucher 2363aaa36a97SAlex Deucher static void dce_v11_0_lock_cursor(struct drm_crtc *crtc, bool lock) 2364aaa36a97SAlex Deucher { 23651348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(crtc->dev); 2366aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 2367aaa36a97SAlex Deucher uint32_t cur_lock; 2368aaa36a97SAlex Deucher 2369aaa36a97SAlex Deucher cur_lock = RREG32(mmCUR_UPDATE + amdgpu_crtc->crtc_offset); 2370aaa36a97SAlex Deucher if (lock) 2371aaa36a97SAlex Deucher cur_lock = REG_SET_FIELD(cur_lock, CUR_UPDATE, CURSOR_UPDATE_LOCK, 1); 2372aaa36a97SAlex Deucher else 2373aaa36a97SAlex Deucher cur_lock = REG_SET_FIELD(cur_lock, CUR_UPDATE, CURSOR_UPDATE_LOCK, 0); 2374aaa36a97SAlex Deucher WREG32(mmCUR_UPDATE + amdgpu_crtc->crtc_offset, cur_lock); 2375aaa36a97SAlex Deucher } 2376aaa36a97SAlex Deucher 2377aaa36a97SAlex Deucher static void dce_v11_0_hide_cursor(struct drm_crtc *crtc) 2378aaa36a97SAlex Deucher { 2379aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 23801348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(crtc->dev); 2381aaa36a97SAlex Deucher u32 tmp; 2382aaa36a97SAlex Deucher 238346e840edSHawking Zhang tmp = RREG32(mmCUR_CONTROL + amdgpu_crtc->crtc_offset); 2384aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, CUR_CONTROL, CURSOR_EN, 0); 238546e840edSHawking Zhang WREG32(mmCUR_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2386aaa36a97SAlex Deucher } 2387aaa36a97SAlex Deucher 2388aaa36a97SAlex Deucher static void dce_v11_0_show_cursor(struct drm_crtc *crtc) 2389aaa36a97SAlex Deucher { 2390aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 23911348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(crtc->dev); 2392aaa36a97SAlex Deucher u32 tmp; 2393aaa36a97SAlex Deucher 2394ec9353dcSAlex Deucher WREG32(mmCUR_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, 2395ec9353dcSAlex Deucher upper_32_bits(amdgpu_crtc->cursor_addr)); 2396ec9353dcSAlex Deucher WREG32(mmCUR_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, 2397ec9353dcSAlex Deucher lower_32_bits(amdgpu_crtc->cursor_addr)); 2398ec9353dcSAlex Deucher 239946e840edSHawking Zhang tmp = RREG32(mmCUR_CONTROL + amdgpu_crtc->crtc_offset); 2400aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, CUR_CONTROL, CURSOR_EN, 1); 2401aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, CUR_CONTROL, CURSOR_MODE, 2); 240246e840edSHawking Zhang WREG32(mmCUR_CONTROL + amdgpu_crtc->crtc_offset, tmp); 2403aaa36a97SAlex Deucher } 2404aaa36a97SAlex Deucher 2405d8ee89c6SAlex Deucher static int dce_v11_0_cursor_move_locked(struct drm_crtc *crtc, 2406aaa36a97SAlex Deucher int x, int y) 2407aaa36a97SAlex Deucher { 2408aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 24091348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(crtc->dev); 2410aaa36a97SAlex Deucher int xorigin = 0, yorigin = 0; 2411aaa36a97SAlex Deucher 24128e57ec61SMichel Dänzer amdgpu_crtc->cursor_x = x; 24138e57ec61SMichel Dänzer amdgpu_crtc->cursor_y = y; 24148e57ec61SMichel Dänzer 2415aaa36a97SAlex Deucher /* avivo cursor are offset into the total surface */ 2416aaa36a97SAlex Deucher x += crtc->x; 2417aaa36a97SAlex Deucher y += crtc->y; 2418aaa36a97SAlex Deucher DRM_DEBUG("x %d y %d c->x %d c->y %d\n", x, y, crtc->x, crtc->y); 2419aaa36a97SAlex Deucher 2420aaa36a97SAlex Deucher if (x < 0) { 2421aaa36a97SAlex Deucher xorigin = min(-x, amdgpu_crtc->max_cursor_width - 1); 2422aaa36a97SAlex Deucher x = 0; 2423aaa36a97SAlex Deucher } 2424aaa36a97SAlex Deucher if (y < 0) { 2425aaa36a97SAlex Deucher yorigin = min(-y, amdgpu_crtc->max_cursor_height - 1); 2426aaa36a97SAlex Deucher y = 0; 2427aaa36a97SAlex Deucher } 2428aaa36a97SAlex Deucher 2429aaa36a97SAlex Deucher WREG32(mmCUR_POSITION + amdgpu_crtc->crtc_offset, (x << 16) | y); 2430aaa36a97SAlex Deucher WREG32(mmCUR_HOT_SPOT + amdgpu_crtc->crtc_offset, (xorigin << 16) | yorigin); 243169bcc0b7SMichel Dänzer WREG32(mmCUR_SIZE + amdgpu_crtc->crtc_offset, 243269bcc0b7SMichel Dänzer ((amdgpu_crtc->cursor_width - 1) << 16) | (amdgpu_crtc->cursor_height - 1)); 2433d8ee89c6SAlex Deucher 2434aaa36a97SAlex Deucher return 0; 2435aaa36a97SAlex Deucher } 2436aaa36a97SAlex Deucher 2437d8ee89c6SAlex Deucher static int dce_v11_0_crtc_cursor_move(struct drm_crtc *crtc, 2438d8ee89c6SAlex Deucher int x, int y) 2439d8ee89c6SAlex Deucher { 2440d8ee89c6SAlex Deucher int ret; 2441d8ee89c6SAlex Deucher 2442d8ee89c6SAlex Deucher dce_v11_0_lock_cursor(crtc, true); 2443d8ee89c6SAlex Deucher ret = dce_v11_0_cursor_move_locked(crtc, x, y); 2444d8ee89c6SAlex Deucher dce_v11_0_lock_cursor(crtc, false); 2445d8ee89c6SAlex Deucher 2446d8ee89c6SAlex Deucher return ret; 2447d8ee89c6SAlex Deucher } 2448d8ee89c6SAlex Deucher 2449d8ee89c6SAlex Deucher static int dce_v11_0_crtc_cursor_set2(struct drm_crtc *crtc, 2450aaa36a97SAlex Deucher struct drm_file *file_priv, 2451aaa36a97SAlex Deucher uint32_t handle, 2452aaa36a97SAlex Deucher uint32_t width, 2453d8ee89c6SAlex Deucher uint32_t height, 2454d8ee89c6SAlex Deucher int32_t hot_x, 2455d8ee89c6SAlex Deucher int32_t hot_y) 2456aaa36a97SAlex Deucher { 2457aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 2458aaa36a97SAlex Deucher struct drm_gem_object *obj; 2459232cc652SAlex Deucher struct amdgpu_bo *aobj; 2460aaa36a97SAlex Deucher int ret; 2461aaa36a97SAlex Deucher 2462aaa36a97SAlex Deucher if (!handle) { 2463aaa36a97SAlex Deucher /* turn off cursor */ 2464aaa36a97SAlex Deucher dce_v11_0_hide_cursor(crtc); 2465aaa36a97SAlex Deucher obj = NULL; 2466aaa36a97SAlex Deucher goto unpin; 2467aaa36a97SAlex Deucher } 2468aaa36a97SAlex Deucher 2469aaa36a97SAlex Deucher if ((width > amdgpu_crtc->max_cursor_width) || 2470aaa36a97SAlex Deucher (height > amdgpu_crtc->max_cursor_height)) { 2471aaa36a97SAlex Deucher DRM_ERROR("bad cursor width or height %d x %d\n", width, height); 2472aaa36a97SAlex Deucher return -EINVAL; 2473aaa36a97SAlex Deucher } 2474aaa36a97SAlex Deucher 2475a8ad0bd8SChris Wilson obj = drm_gem_object_lookup(file_priv, handle); 2476aaa36a97SAlex Deucher if (!obj) { 2477aaa36a97SAlex Deucher DRM_ERROR("Cannot find cursor object %x for crtc %d\n", handle, amdgpu_crtc->crtc_id); 2478aaa36a97SAlex Deucher return -ENOENT; 2479aaa36a97SAlex Deucher } 2480aaa36a97SAlex Deucher 2481232cc652SAlex Deucher aobj = gem_to_amdgpu_bo(obj); 2482232cc652SAlex Deucher ret = amdgpu_bo_reserve(aobj, false); 2483232cc652SAlex Deucher if (ret != 0) { 2484e07ddb0cSEmil Velikov drm_gem_object_put(obj); 2485232cc652SAlex Deucher return ret; 2486232cc652SAlex Deucher } 2487232cc652SAlex Deucher 24887b7c6c81SJunwei Zhang ret = amdgpu_bo_pin(aobj, AMDGPU_GEM_DOMAIN_VRAM); 2489232cc652SAlex Deucher amdgpu_bo_unreserve(aobj); 2490232cc652SAlex Deucher if (ret) { 2491232cc652SAlex Deucher DRM_ERROR("Failed to pin new cursor BO (%d)\n", ret); 2492e07ddb0cSEmil Velikov drm_gem_object_put(obj); 2493232cc652SAlex Deucher return ret; 2494232cc652SAlex Deucher } 24957b7c6c81SJunwei Zhang amdgpu_crtc->cursor_addr = amdgpu_bo_gpu_offset(aobj); 2496aaa36a97SAlex Deucher 2497aaa36a97SAlex Deucher dce_v11_0_lock_cursor(crtc, true); 24981996ea09SAlex Deucher 249969bcc0b7SMichel Dänzer if (width != amdgpu_crtc->cursor_width || 250069bcc0b7SMichel Dänzer height != amdgpu_crtc->cursor_height || 250169bcc0b7SMichel Dänzer hot_x != amdgpu_crtc->cursor_hot_x || 25021996ea09SAlex Deucher hot_y != amdgpu_crtc->cursor_hot_y) { 25031996ea09SAlex Deucher int x, y; 25041996ea09SAlex Deucher 25051996ea09SAlex Deucher x = amdgpu_crtc->cursor_x + amdgpu_crtc->cursor_hot_x - hot_x; 25061996ea09SAlex Deucher y = amdgpu_crtc->cursor_y + amdgpu_crtc->cursor_hot_y - hot_y; 25071996ea09SAlex Deucher 25081996ea09SAlex Deucher dce_v11_0_cursor_move_locked(crtc, x, y); 25091996ea09SAlex Deucher 25107c83d7abSMichel Dänzer amdgpu_crtc->cursor_width = width; 25117c83d7abSMichel Dänzer amdgpu_crtc->cursor_height = height; 251269bcc0b7SMichel Dänzer amdgpu_crtc->cursor_hot_x = hot_x; 251369bcc0b7SMichel Dänzer amdgpu_crtc->cursor_hot_y = hot_y; 25147c83d7abSMichel Dänzer } 25157c83d7abSMichel Dänzer 2516aaa36a97SAlex Deucher dce_v11_0_show_cursor(crtc); 2517aaa36a97SAlex Deucher dce_v11_0_lock_cursor(crtc, false); 2518aaa36a97SAlex Deucher 2519aaa36a97SAlex Deucher unpin: 2520aaa36a97SAlex Deucher if (amdgpu_crtc->cursor_bo) { 25218ddef5a5SAlex Deucher struct amdgpu_bo *aobj = gem_to_amdgpu_bo(amdgpu_crtc->cursor_bo); 2522c81a1a74SMichel Dänzer ret = amdgpu_bo_reserve(aobj, true); 2523aaa36a97SAlex Deucher if (likely(ret == 0)) { 25248ddef5a5SAlex Deucher amdgpu_bo_unpin(aobj); 25258ddef5a5SAlex Deucher amdgpu_bo_unreserve(aobj); 2526aaa36a97SAlex Deucher } 2527e07ddb0cSEmil Velikov drm_gem_object_put(amdgpu_crtc->cursor_bo); 2528aaa36a97SAlex Deucher } 2529aaa36a97SAlex Deucher 2530aaa36a97SAlex Deucher amdgpu_crtc->cursor_bo = obj; 2531aaa36a97SAlex Deucher return 0; 25328ddef5a5SAlex Deucher } 2533aaa36a97SAlex Deucher 25348ddef5a5SAlex Deucher static void dce_v11_0_cursor_reset(struct drm_crtc *crtc) 25358ddef5a5SAlex Deucher { 25368ddef5a5SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 25378ddef5a5SAlex Deucher 25388ddef5a5SAlex Deucher if (amdgpu_crtc->cursor_bo) { 25398ddef5a5SAlex Deucher dce_v11_0_lock_cursor(crtc, true); 25408ddef5a5SAlex Deucher 25418ddef5a5SAlex Deucher dce_v11_0_cursor_move_locked(crtc, amdgpu_crtc->cursor_x, 25428ddef5a5SAlex Deucher amdgpu_crtc->cursor_y); 25438ddef5a5SAlex Deucher 25448ddef5a5SAlex Deucher dce_v11_0_show_cursor(crtc); 25458ddef5a5SAlex Deucher 25468ddef5a5SAlex Deucher dce_v11_0_lock_cursor(crtc, false); 25478ddef5a5SAlex Deucher } 2548aaa36a97SAlex Deucher } 2549aaa36a97SAlex Deucher 25507ea77283SMaarten Lankhorst static int dce_v11_0_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green, 25516d124ff8SDaniel Vetter u16 *blue, uint32_t size, 25526d124ff8SDaniel Vetter struct drm_modeset_acquire_ctx *ctx) 2553aaa36a97SAlex Deucher { 2554aaa36a97SAlex Deucher dce_v11_0_crtc_load_lut(crtc); 25557ea77283SMaarten Lankhorst 25567ea77283SMaarten Lankhorst return 0; 2557aaa36a97SAlex Deucher } 2558aaa36a97SAlex Deucher 2559aaa36a97SAlex Deucher static void dce_v11_0_crtc_destroy(struct drm_crtc *crtc) 2560aaa36a97SAlex Deucher { 2561aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 2562aaa36a97SAlex Deucher 2563aaa36a97SAlex Deucher drm_crtc_cleanup(crtc); 2564aaa36a97SAlex Deucher kfree(amdgpu_crtc); 2565aaa36a97SAlex Deucher } 2566aaa36a97SAlex Deucher 2567aaa36a97SAlex Deucher static const struct drm_crtc_funcs dce_v11_0_crtc_funcs = { 2568d8ee89c6SAlex Deucher .cursor_set2 = dce_v11_0_crtc_cursor_set2, 2569aaa36a97SAlex Deucher .cursor_move = dce_v11_0_crtc_cursor_move, 2570aaa36a97SAlex Deucher .gamma_set = dce_v11_0_crtc_gamma_set, 2571775a8364SSamuel Li .set_config = amdgpu_display_crtc_set_config, 2572aaa36a97SAlex Deucher .destroy = dce_v11_0_crtc_destroy, 25730cd11932SSamuel Li .page_flip_target = amdgpu_display_crtc_page_flip_target, 2574e3eff4b5SThomas Zimmermann .get_vblank_counter = amdgpu_get_vblank_counter_kms, 2575e3eff4b5SThomas Zimmermann .enable_vblank = amdgpu_enable_vblank_kms, 2576e3eff4b5SThomas Zimmermann .disable_vblank = amdgpu_disable_vblank_kms, 2577e3eff4b5SThomas Zimmermann .get_vblank_timestamp = drm_crtc_vblank_helper_get_vblank_timestamp, 2578aaa36a97SAlex Deucher }; 2579aaa36a97SAlex Deucher 2580aaa36a97SAlex Deucher static void dce_v11_0_crtc_dpms(struct drm_crtc *crtc, int mode) 2581aaa36a97SAlex Deucher { 2582aaa36a97SAlex Deucher struct drm_device *dev = crtc->dev; 25831348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 2584aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 2585d84b272aSMichel Dänzer unsigned type; 2586aaa36a97SAlex Deucher 2587aaa36a97SAlex Deucher switch (mode) { 2588aaa36a97SAlex Deucher case DRM_MODE_DPMS_ON: 2589aaa36a97SAlex Deucher amdgpu_crtc->enabled = true; 2590aaa36a97SAlex Deucher amdgpu_atombios_crtc_enable(crtc, ATOM_ENABLE); 2591aaa36a97SAlex Deucher dce_v11_0_vga_enable(crtc, true); 2592aaa36a97SAlex Deucher amdgpu_atombios_crtc_blank(crtc, ATOM_DISABLE); 2593aaa36a97SAlex Deucher dce_v11_0_vga_enable(crtc, false); 2594f6c7aba4SMichel Dänzer /* Make sure VBLANK and PFLIP interrupts are still enabled */ 2595734dd01dSSamuel Li type = amdgpu_display_crtc_idx_to_irq_type(adev, 2596734dd01dSSamuel Li amdgpu_crtc->crtc_id); 2597d84b272aSMichel Dänzer amdgpu_irq_update(adev, &adev->crtc_irq, type); 2598f6c7aba4SMichel Dänzer amdgpu_irq_update(adev, &adev->pageflip_irq, type); 25999a7841e9SGustavo Padovan drm_crtc_vblank_on(crtc); 2600aaa36a97SAlex Deucher dce_v11_0_crtc_load_lut(crtc); 2601aaa36a97SAlex Deucher break; 2602aaa36a97SAlex Deucher case DRM_MODE_DPMS_STANDBY: 2603aaa36a97SAlex Deucher case DRM_MODE_DPMS_SUSPEND: 2604aaa36a97SAlex Deucher case DRM_MODE_DPMS_OFF: 26059a7841e9SGustavo Padovan drm_crtc_vblank_off(crtc); 2606aaa36a97SAlex Deucher if (amdgpu_crtc->enabled) { 2607aaa36a97SAlex Deucher dce_v11_0_vga_enable(crtc, true); 2608aaa36a97SAlex Deucher amdgpu_atombios_crtc_blank(crtc, ATOM_ENABLE); 2609aaa36a97SAlex Deucher dce_v11_0_vga_enable(crtc, false); 2610aaa36a97SAlex Deucher } 2611aaa36a97SAlex Deucher amdgpu_atombios_crtc_enable(crtc, ATOM_DISABLE); 2612aaa36a97SAlex Deucher amdgpu_crtc->enabled = false; 2613aaa36a97SAlex Deucher break; 2614aaa36a97SAlex Deucher } 2615aaa36a97SAlex Deucher /* adjust pm to dpms */ 261684176663SEvan Quan amdgpu_dpm_compute_clocks(adev); 2617aaa36a97SAlex Deucher } 2618aaa36a97SAlex Deucher 2619aaa36a97SAlex Deucher static void dce_v11_0_crtc_prepare(struct drm_crtc *crtc) 2620aaa36a97SAlex Deucher { 2621aaa36a97SAlex Deucher /* disable crtc pair power gating before programming */ 2622aaa36a97SAlex Deucher amdgpu_atombios_crtc_powergate(crtc, ATOM_DISABLE); 2623aaa36a97SAlex Deucher amdgpu_atombios_crtc_lock(crtc, ATOM_ENABLE); 2624aaa36a97SAlex Deucher dce_v11_0_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); 2625aaa36a97SAlex Deucher } 2626aaa36a97SAlex Deucher 2627aaa36a97SAlex Deucher static void dce_v11_0_crtc_commit(struct drm_crtc *crtc) 2628aaa36a97SAlex Deucher { 2629aaa36a97SAlex Deucher dce_v11_0_crtc_dpms(crtc, DRM_MODE_DPMS_ON); 2630aaa36a97SAlex Deucher amdgpu_atombios_crtc_lock(crtc, ATOM_DISABLE); 2631aaa36a97SAlex Deucher } 2632aaa36a97SAlex Deucher 2633aaa36a97SAlex Deucher static void dce_v11_0_crtc_disable(struct drm_crtc *crtc) 2634aaa36a97SAlex Deucher { 2635aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 2636aaa36a97SAlex Deucher struct drm_device *dev = crtc->dev; 26371348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 2638aaa36a97SAlex Deucher struct amdgpu_atom_ss ss; 2639aaa36a97SAlex Deucher int i; 2640aaa36a97SAlex Deucher 2641aaa36a97SAlex Deucher dce_v11_0_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); 2642aaa36a97SAlex Deucher if (crtc->primary->fb) { 2643aaa36a97SAlex Deucher int r; 2644765e7fbfSChristian König struct amdgpu_bo *abo; 2645aaa36a97SAlex Deucher 2646e68d14ddSDaniel Stone abo = gem_to_amdgpu_bo(crtc->primary->fb->obj[0]); 2647c81a1a74SMichel Dänzer r = amdgpu_bo_reserve(abo, true); 2648aaa36a97SAlex Deucher if (unlikely(r)) 2649765e7fbfSChristian König DRM_ERROR("failed to reserve abo before unpin\n"); 2650aaa36a97SAlex Deucher else { 2651765e7fbfSChristian König amdgpu_bo_unpin(abo); 2652765e7fbfSChristian König amdgpu_bo_unreserve(abo); 2653aaa36a97SAlex Deucher } 2654aaa36a97SAlex Deucher } 2655aaa36a97SAlex Deucher /* disable the GRPH */ 2656aaa36a97SAlex Deucher dce_v11_0_grph_enable(crtc, false); 2657aaa36a97SAlex Deucher 2658aaa36a97SAlex Deucher amdgpu_atombios_crtc_powergate(crtc, ATOM_ENABLE); 2659aaa36a97SAlex Deucher 2660aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.num_crtc; i++) { 2661aaa36a97SAlex Deucher if (adev->mode_info.crtcs[i] && 2662aaa36a97SAlex Deucher adev->mode_info.crtcs[i]->enabled && 2663aaa36a97SAlex Deucher i != amdgpu_crtc->crtc_id && 2664aaa36a97SAlex Deucher amdgpu_crtc->pll_id == adev->mode_info.crtcs[i]->pll_id) { 2665aaa36a97SAlex Deucher /* one other crtc is using this pll don't turn 2666aaa36a97SAlex Deucher * off the pll 2667aaa36a97SAlex Deucher */ 2668aaa36a97SAlex Deucher goto done; 2669aaa36a97SAlex Deucher } 2670aaa36a97SAlex Deucher } 2671aaa36a97SAlex Deucher 2672aaa36a97SAlex Deucher switch (amdgpu_crtc->pll_id) { 2673aaa36a97SAlex Deucher case ATOM_PPLL0: 2674aaa36a97SAlex Deucher case ATOM_PPLL1: 2675aaa36a97SAlex Deucher case ATOM_PPLL2: 2676aaa36a97SAlex Deucher /* disable the ppll */ 2677aaa36a97SAlex Deucher amdgpu_atombios_crtc_program_pll(crtc, amdgpu_crtc->crtc_id, amdgpu_crtc->pll_id, 2678aaa36a97SAlex Deucher 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss); 2679aaa36a97SAlex Deucher break; 268022f0c5bdSAlex Deucher case ATOM_COMBOPHY_PLL0: 268122f0c5bdSAlex Deucher case ATOM_COMBOPHY_PLL1: 268222f0c5bdSAlex Deucher case ATOM_COMBOPHY_PLL2: 268322f0c5bdSAlex Deucher case ATOM_COMBOPHY_PLL3: 268422f0c5bdSAlex Deucher case ATOM_COMBOPHY_PLL4: 268522f0c5bdSAlex Deucher case ATOM_COMBOPHY_PLL5: 268622f0c5bdSAlex Deucher /* disable the ppll */ 268722f0c5bdSAlex Deucher amdgpu_atombios_crtc_program_pll(crtc, ATOM_CRTC_INVALID, amdgpu_crtc->pll_id, 268822f0c5bdSAlex Deucher 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss); 268922f0c5bdSAlex Deucher break; 2690aaa36a97SAlex Deucher default: 2691aaa36a97SAlex Deucher break; 2692aaa36a97SAlex Deucher } 2693aaa36a97SAlex Deucher done: 2694aaa36a97SAlex Deucher amdgpu_crtc->pll_id = ATOM_PPLL_INVALID; 2695aaa36a97SAlex Deucher amdgpu_crtc->adjusted_clock = 0; 2696aaa36a97SAlex Deucher amdgpu_crtc->encoder = NULL; 2697aaa36a97SAlex Deucher amdgpu_crtc->connector = NULL; 2698aaa36a97SAlex Deucher } 2699aaa36a97SAlex Deucher 2700aaa36a97SAlex Deucher static int dce_v11_0_crtc_mode_set(struct drm_crtc *crtc, 2701aaa36a97SAlex Deucher struct drm_display_mode *mode, 2702aaa36a97SAlex Deucher struct drm_display_mode *adjusted_mode, 2703aaa36a97SAlex Deucher int x, int y, struct drm_framebuffer *old_fb) 2704aaa36a97SAlex Deucher { 2705aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 270622f0c5bdSAlex Deucher struct drm_device *dev = crtc->dev; 27071348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 2708aaa36a97SAlex Deucher 2709aaa36a97SAlex Deucher if (!amdgpu_crtc->adjusted_clock) 2710aaa36a97SAlex Deucher return -EINVAL; 2711aaa36a97SAlex Deucher 27122cc0c0b5SFlora Cui if ((adev->asic_type == CHIP_POLARIS10) || 2713c4642a47SJunwei Zhang (adev->asic_type == CHIP_POLARIS11) || 2714221adb21SAlex Deucher (adev->asic_type == CHIP_POLARIS12) || 2715221adb21SAlex Deucher (adev->asic_type == CHIP_VEGAM)) { 271622f0c5bdSAlex Deucher struct amdgpu_encoder *amdgpu_encoder = 271722f0c5bdSAlex Deucher to_amdgpu_encoder(amdgpu_crtc->encoder); 271822f0c5bdSAlex Deucher int encoder_mode = 271922f0c5bdSAlex Deucher amdgpu_atombios_encoder_get_encoder_mode(amdgpu_crtc->encoder); 272022f0c5bdSAlex Deucher 272122f0c5bdSAlex Deucher /* SetPixelClock calculates the plls and ss values now */ 272222f0c5bdSAlex Deucher amdgpu_atombios_crtc_program_pll(crtc, amdgpu_crtc->crtc_id, 272322f0c5bdSAlex Deucher amdgpu_crtc->pll_id, 272422f0c5bdSAlex Deucher encoder_mode, amdgpu_encoder->encoder_id, 272522f0c5bdSAlex Deucher adjusted_mode->clock, 0, 0, 0, 0, 272622f0c5bdSAlex Deucher amdgpu_crtc->bpc, amdgpu_crtc->ss_enabled, &amdgpu_crtc->ss); 272722f0c5bdSAlex Deucher } else { 2728aaa36a97SAlex Deucher amdgpu_atombios_crtc_set_pll(crtc, adjusted_mode); 272922f0c5bdSAlex Deucher } 2730aaa36a97SAlex Deucher amdgpu_atombios_crtc_set_dtd_timing(crtc, adjusted_mode); 2731aaa36a97SAlex Deucher dce_v11_0_crtc_do_set_base(crtc, old_fb, x, y, 0); 2732aaa36a97SAlex Deucher amdgpu_atombios_crtc_overscan_setup(crtc, mode, adjusted_mode); 2733aaa36a97SAlex Deucher amdgpu_atombios_crtc_scaler_setup(crtc); 27348ddef5a5SAlex Deucher dce_v11_0_cursor_reset(crtc); 2735aaa36a97SAlex Deucher /* update the hw version fpr dpm */ 2736aaa36a97SAlex Deucher amdgpu_crtc->hw_mode = *adjusted_mode; 2737aaa36a97SAlex Deucher 2738aaa36a97SAlex Deucher return 0; 2739aaa36a97SAlex Deucher } 2740aaa36a97SAlex Deucher 2741aaa36a97SAlex Deucher static bool dce_v11_0_crtc_mode_fixup(struct drm_crtc *crtc, 2742aaa36a97SAlex Deucher const struct drm_display_mode *mode, 2743aaa36a97SAlex Deucher struct drm_display_mode *adjusted_mode) 2744aaa36a97SAlex Deucher { 2745aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); 2746aaa36a97SAlex Deucher struct drm_device *dev = crtc->dev; 2747aaa36a97SAlex Deucher struct drm_encoder *encoder; 2748aaa36a97SAlex Deucher 2749aaa36a97SAlex Deucher /* assign the encoder to the amdgpu crtc to avoid repeated lookups later */ 2750aaa36a97SAlex Deucher list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 2751aaa36a97SAlex Deucher if (encoder->crtc == crtc) { 2752aaa36a97SAlex Deucher amdgpu_crtc->encoder = encoder; 2753aaa36a97SAlex Deucher amdgpu_crtc->connector = amdgpu_get_connector_for_encoder(encoder); 2754aaa36a97SAlex Deucher break; 2755aaa36a97SAlex Deucher } 2756aaa36a97SAlex Deucher } 2757aaa36a97SAlex Deucher if ((amdgpu_crtc->encoder == NULL) || (amdgpu_crtc->connector == NULL)) { 2758aaa36a97SAlex Deucher amdgpu_crtc->encoder = NULL; 2759aaa36a97SAlex Deucher amdgpu_crtc->connector = NULL; 2760aaa36a97SAlex Deucher return false; 2761aaa36a97SAlex Deucher } 27620c16443aSSamuel Li if (!amdgpu_display_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode)) 2763aaa36a97SAlex Deucher return false; 2764aaa36a97SAlex Deucher if (amdgpu_atombios_crtc_prepare_pll(crtc, adjusted_mode)) 2765aaa36a97SAlex Deucher return false; 2766aaa36a97SAlex Deucher /* pick pll */ 2767aaa36a97SAlex Deucher amdgpu_crtc->pll_id = dce_v11_0_pick_pll(crtc); 2768aaa36a97SAlex Deucher /* if we can't get a PPLL for a non-DP encoder, fail */ 2769aaa36a97SAlex Deucher if ((amdgpu_crtc->pll_id == ATOM_PPLL_INVALID) && 2770aaa36a97SAlex Deucher !ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(amdgpu_crtc->encoder))) 2771aaa36a97SAlex Deucher return false; 2772aaa36a97SAlex Deucher 2773aaa36a97SAlex Deucher return true; 2774aaa36a97SAlex Deucher } 2775aaa36a97SAlex Deucher 2776aaa36a97SAlex Deucher static int dce_v11_0_crtc_set_base(struct drm_crtc *crtc, int x, int y, 2777aaa36a97SAlex Deucher struct drm_framebuffer *old_fb) 2778aaa36a97SAlex Deucher { 2779aaa36a97SAlex Deucher return dce_v11_0_crtc_do_set_base(crtc, old_fb, x, y, 0); 2780aaa36a97SAlex Deucher } 2781aaa36a97SAlex Deucher 2782aaa36a97SAlex Deucher static int dce_v11_0_crtc_set_base_atomic(struct drm_crtc *crtc, 2783aaa36a97SAlex Deucher struct drm_framebuffer *fb, 2784aaa36a97SAlex Deucher int x, int y, enum mode_set_atomic state) 2785aaa36a97SAlex Deucher { 2786aaa36a97SAlex Deucher return dce_v11_0_crtc_do_set_base(crtc, fb, x, y, 1); 2787aaa36a97SAlex Deucher } 2788aaa36a97SAlex Deucher 2789aaa36a97SAlex Deucher static const struct drm_crtc_helper_funcs dce_v11_0_crtc_helper_funcs = { 2790aaa36a97SAlex Deucher .dpms = dce_v11_0_crtc_dpms, 2791aaa36a97SAlex Deucher .mode_fixup = dce_v11_0_crtc_mode_fixup, 2792aaa36a97SAlex Deucher .mode_set = dce_v11_0_crtc_mode_set, 2793aaa36a97SAlex Deucher .mode_set_base = dce_v11_0_crtc_set_base, 2794aaa36a97SAlex Deucher .mode_set_base_atomic = dce_v11_0_crtc_set_base_atomic, 2795aaa36a97SAlex Deucher .prepare = dce_v11_0_crtc_prepare, 2796aaa36a97SAlex Deucher .commit = dce_v11_0_crtc_commit, 2797aaa36a97SAlex Deucher .disable = dce_v11_0_crtc_disable, 2798ea702333SThomas Zimmermann .get_scanout_position = amdgpu_crtc_get_scanout_position, 2799aaa36a97SAlex Deucher }; 2800aaa36a97SAlex Deucher 2801aaa36a97SAlex Deucher static int dce_v11_0_crtc_init(struct amdgpu_device *adev, int index) 2802aaa36a97SAlex Deucher { 2803aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc; 2804aaa36a97SAlex Deucher 2805aaa36a97SAlex Deucher amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) + 2806aaa36a97SAlex Deucher (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL); 2807aaa36a97SAlex Deucher if (amdgpu_crtc == NULL) 2808aaa36a97SAlex Deucher return -ENOMEM; 2809aaa36a97SAlex Deucher 28104a580877SLuben Tuikov drm_crtc_init(adev_to_drm(adev), &amdgpu_crtc->base, &dce_v11_0_crtc_funcs); 2811aaa36a97SAlex Deucher 2812aaa36a97SAlex Deucher drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256); 2813aaa36a97SAlex Deucher amdgpu_crtc->crtc_id = index; 2814aaa36a97SAlex Deucher adev->mode_info.crtcs[index] = amdgpu_crtc; 2815aaa36a97SAlex Deucher 2816aaa36a97SAlex Deucher amdgpu_crtc->max_cursor_width = 128; 2817aaa36a97SAlex Deucher amdgpu_crtc->max_cursor_height = 128; 28184a580877SLuben Tuikov adev_to_drm(adev)->mode_config.cursor_width = amdgpu_crtc->max_cursor_width; 28194a580877SLuben Tuikov adev_to_drm(adev)->mode_config.cursor_height = amdgpu_crtc->max_cursor_height; 2820aaa36a97SAlex Deucher 2821aaa36a97SAlex Deucher switch (amdgpu_crtc->crtc_id) { 2822aaa36a97SAlex Deucher case 0: 2823aaa36a97SAlex Deucher default: 2824aaa36a97SAlex Deucher amdgpu_crtc->crtc_offset = CRTC0_REGISTER_OFFSET; 2825aaa36a97SAlex Deucher break; 2826aaa36a97SAlex Deucher case 1: 2827aaa36a97SAlex Deucher amdgpu_crtc->crtc_offset = CRTC1_REGISTER_OFFSET; 2828aaa36a97SAlex Deucher break; 2829aaa36a97SAlex Deucher case 2: 2830aaa36a97SAlex Deucher amdgpu_crtc->crtc_offset = CRTC2_REGISTER_OFFSET; 2831aaa36a97SAlex Deucher break; 2832aaa36a97SAlex Deucher case 3: 2833aaa36a97SAlex Deucher amdgpu_crtc->crtc_offset = CRTC3_REGISTER_OFFSET; 2834aaa36a97SAlex Deucher break; 2835aaa36a97SAlex Deucher case 4: 2836aaa36a97SAlex Deucher amdgpu_crtc->crtc_offset = CRTC4_REGISTER_OFFSET; 2837aaa36a97SAlex Deucher break; 2838aaa36a97SAlex Deucher case 5: 2839aaa36a97SAlex Deucher amdgpu_crtc->crtc_offset = CRTC5_REGISTER_OFFSET; 2840aaa36a97SAlex Deucher break; 2841aaa36a97SAlex Deucher } 2842aaa36a97SAlex Deucher 2843aaa36a97SAlex Deucher amdgpu_crtc->pll_id = ATOM_PPLL_INVALID; 2844aaa36a97SAlex Deucher amdgpu_crtc->adjusted_clock = 0; 2845aaa36a97SAlex Deucher amdgpu_crtc->encoder = NULL; 2846aaa36a97SAlex Deucher amdgpu_crtc->connector = NULL; 2847aaa36a97SAlex Deucher drm_crtc_helper_add(&amdgpu_crtc->base, &dce_v11_0_crtc_helper_funcs); 2848aaa36a97SAlex Deucher 2849aaa36a97SAlex Deucher return 0; 2850aaa36a97SAlex Deucher } 2851aaa36a97SAlex Deucher 28525fc3aeebSyanyang1 static int dce_v11_0_early_init(void *handle) 2853aaa36a97SAlex Deucher { 28545fc3aeebSyanyang1 struct amdgpu_device *adev = (struct amdgpu_device *)handle; 28555fc3aeebSyanyang1 2856aaa36a97SAlex Deucher adev->audio_endpt_rreg = &dce_v11_0_audio_endpt_rreg; 2857aaa36a97SAlex Deucher adev->audio_endpt_wreg = &dce_v11_0_audio_endpt_wreg; 2858aaa36a97SAlex Deucher 2859aaa36a97SAlex Deucher dce_v11_0_set_display_funcs(adev); 2860aaa36a97SAlex Deucher 286183c9b025SEmily Deng adev->mode_info.num_crtc = dce_v11_0_get_num_crtc(adev); 286283c9b025SEmily Deng 2863aaa36a97SAlex Deucher switch (adev->asic_type) { 2864aaa36a97SAlex Deucher case CHIP_CARRIZO: 2865aaa36a97SAlex Deucher adev->mode_info.num_hpd = 6; 2866aaa36a97SAlex Deucher adev->mode_info.num_dig = 9; 2867aaa36a97SAlex Deucher break; 2868fa2f9befSSamuel Li case CHIP_STONEY: 2869fa2f9befSSamuel Li adev->mode_info.num_hpd = 6; 2870fa2f9befSSamuel Li adev->mode_info.num_dig = 9; 2871fa2f9befSSamuel Li break; 28722cc0c0b5SFlora Cui case CHIP_POLARIS10: 2873221adb21SAlex Deucher case CHIP_VEGAM: 2874d525eb8dSAlex Deucher adev->mode_info.num_hpd = 6; 2875d525eb8dSAlex Deucher adev->mode_info.num_dig = 6; 2876d525eb8dSAlex Deucher break; 28772cc0c0b5SFlora Cui case CHIP_POLARIS11: 2878c4642a47SJunwei Zhang case CHIP_POLARIS12: 2879d525eb8dSAlex Deucher adev->mode_info.num_hpd = 5; 2880d525eb8dSAlex Deucher adev->mode_info.num_dig = 5; 2881d525eb8dSAlex Deucher break; 2882aaa36a97SAlex Deucher default: 2883aaa36a97SAlex Deucher /* FIXME: not supported yet */ 2884aaa36a97SAlex Deucher return -EINVAL; 2885aaa36a97SAlex Deucher } 2886aaa36a97SAlex Deucher 2887d794b9f8SMichel Dänzer dce_v11_0_set_irq_funcs(adev); 2888d794b9f8SMichel Dänzer 2889aaa36a97SAlex Deucher return 0; 2890aaa36a97SAlex Deucher } 2891aaa36a97SAlex Deucher 28925fc3aeebSyanyang1 static int dce_v11_0_sw_init(void *handle) 2893aaa36a97SAlex Deucher { 2894aaa36a97SAlex Deucher int r, i; 28955fc3aeebSyanyang1 struct amdgpu_device *adev = (struct amdgpu_device *)handle; 2896aaa36a97SAlex Deucher 2897aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.num_crtc; i++) { 28981ffdeca6SChristian König r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, i + 1, &adev->crtc_irq); 2899aaa36a97SAlex Deucher if (r) 2900aaa36a97SAlex Deucher return r; 2901aaa36a97SAlex Deucher } 2902aaa36a97SAlex Deucher 2903091aec0bSAndrey Grodzovsky for (i = VISLANDS30_IV_SRCID_D1_GRPH_PFLIP; i < 20; i += 2) { 29041ffdeca6SChristian König r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, i, &adev->pageflip_irq); 2905aaa36a97SAlex Deucher if (r) 2906aaa36a97SAlex Deucher return r; 2907aaa36a97SAlex Deucher } 2908aaa36a97SAlex Deucher 2909aaa36a97SAlex Deucher /* HPD hotplug */ 29101ffdeca6SChristian König r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_HOTPLUG_DETECT_A, &adev->hpd_irq); 2911aaa36a97SAlex Deucher if (r) 2912aaa36a97SAlex Deucher return r; 2913aaa36a97SAlex Deucher 29144a580877SLuben Tuikov adev_to_drm(adev)->mode_config.funcs = &amdgpu_mode_funcs; 2915aaa36a97SAlex Deucher 29164a580877SLuben Tuikov adev_to_drm(adev)->mode_config.async_page_flip = true; 2917cb9e59d7SAlex Deucher 29184a580877SLuben Tuikov adev_to_drm(adev)->mode_config.max_width = 16384; 29194a580877SLuben Tuikov adev_to_drm(adev)->mode_config.max_height = 16384; 2920aaa36a97SAlex Deucher 29214a580877SLuben Tuikov adev_to_drm(adev)->mode_config.preferred_depth = 24; 2922a6250bdbSAlex Deucher adev_to_drm(adev)->mode_config.prefer_shadow = 1; 2923aaa36a97SAlex Deucher 29242af10429STomohito Esaki adev_to_drm(adev)->mode_config.fb_modifiers_not_supported = true; 29252af10429STomohito Esaki 29263dc9b1ceSSamuel Li r = amdgpu_display_modeset_create_props(adev); 2927aaa36a97SAlex Deucher if (r) 2928aaa36a97SAlex Deucher return r; 2929aaa36a97SAlex Deucher 29304a580877SLuben Tuikov adev_to_drm(adev)->mode_config.max_width = 16384; 29314a580877SLuben Tuikov adev_to_drm(adev)->mode_config.max_height = 16384; 2932aaa36a97SAlex Deucher 2933c437b9d6STom St Denis 2934aaa36a97SAlex Deucher /* allocate crtcs */ 2935aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.num_crtc; i++) { 2936aaa36a97SAlex Deucher r = dce_v11_0_crtc_init(adev, i); 2937aaa36a97SAlex Deucher if (r) 2938aaa36a97SAlex Deucher return r; 2939aaa36a97SAlex Deucher } 2940aaa36a97SAlex Deucher 2941aaa36a97SAlex Deucher if (amdgpu_atombios_get_connector_info_from_object_table(adev)) 29424a580877SLuben Tuikov amdgpu_display_print_display_setup(adev_to_drm(adev)); 2943aaa36a97SAlex Deucher else 2944aaa36a97SAlex Deucher return -EINVAL; 2945aaa36a97SAlex Deucher 2946aaa36a97SAlex Deucher /* setup afmt */ 2947041ab0a4STom St Denis r = dce_v11_0_afmt_init(adev); 2948041ab0a4STom St Denis if (r) 2949041ab0a4STom St Denis return r; 2950aaa36a97SAlex Deucher 2951aaa36a97SAlex Deucher r = dce_v11_0_audio_init(adev); 2952aaa36a97SAlex Deucher if (r) 2953aaa36a97SAlex Deucher return r; 2954aaa36a97SAlex Deucher 2955a347ca97SAlex Deucher /* Disable vblank IRQs aggressively for power-saving */ 2956a347ca97SAlex Deucher /* XXX: can this be enabled for DC? */ 2957a347ca97SAlex Deucher adev_to_drm(adev)->vblank_disable_immediate = true; 2958a347ca97SAlex Deucher 2959a347ca97SAlex Deucher r = drm_vblank_init(adev_to_drm(adev), adev->mode_info.num_crtc); 2960a347ca97SAlex Deucher if (r) 2961a347ca97SAlex Deucher return r; 2962a347ca97SAlex Deucher 296390f56611Sxurui INIT_DELAYED_WORK(&adev->hotplug_work, 2964a347ca97SAlex Deucher amdgpu_display_hotplug_work_func); 2965a347ca97SAlex Deucher 29664a580877SLuben Tuikov drm_kms_helper_poll_init(adev_to_drm(adev)); 2967aaa36a97SAlex Deucher 2968c437b9d6STom St Denis adev->mode_info.mode_config_initialized = true; 2969c437b9d6STom St Denis return 0; 2970aaa36a97SAlex Deucher } 2971aaa36a97SAlex Deucher 29725fc3aeebSyanyang1 static int dce_v11_0_sw_fini(void *handle) 2973aaa36a97SAlex Deucher { 29745fc3aeebSyanyang1 struct amdgpu_device *adev = (struct amdgpu_device *)handle; 29755fc3aeebSyanyang1 2976*aeb81b62SThomas Weißschuh drm_edid_free(adev->mode_info.bios_hardcoded_edid); 2977aaa36a97SAlex Deucher 29784a580877SLuben Tuikov drm_kms_helper_poll_fini(adev_to_drm(adev)); 2979aaa36a97SAlex Deucher 2980aaa36a97SAlex Deucher dce_v11_0_audio_fini(adev); 2981aaa36a97SAlex Deucher 2982aaa36a97SAlex Deucher dce_v11_0_afmt_fini(adev); 2983aaa36a97SAlex Deucher 29844a580877SLuben Tuikov drm_mode_config_cleanup(adev_to_drm(adev)); 2985aaa36a97SAlex Deucher adev->mode_info.mode_config_initialized = false; 2986aaa36a97SAlex Deucher 2987aaa36a97SAlex Deucher return 0; 2988aaa36a97SAlex Deucher } 2989aaa36a97SAlex Deucher 29905fc3aeebSyanyang1 static int dce_v11_0_hw_init(void *handle) 2991aaa36a97SAlex Deucher { 2992aaa36a97SAlex Deucher int i; 29935fc3aeebSyanyang1 struct amdgpu_device *adev = (struct amdgpu_device *)handle; 2994aaa36a97SAlex Deucher 2995aaa36a97SAlex Deucher dce_v11_0_init_golden_registers(adev); 2996aaa36a97SAlex Deucher 299784b5d3d1SAlex Deucher /* disable vga render */ 299884b5d3d1SAlex Deucher dce_v11_0_set_vga_render_state(adev, false); 2999aaa36a97SAlex Deucher /* init dig PHYs, disp eng pll */ 3000f9fff064SAlex Deucher amdgpu_atombios_crtc_powergate_init(adev); 3001aaa36a97SAlex Deucher amdgpu_atombios_encoder_init_dig(adev); 30022cc0c0b5SFlora Cui if ((adev->asic_type == CHIP_POLARIS10) || 3003c4642a47SJunwei Zhang (adev->asic_type == CHIP_POLARIS11) || 3004221adb21SAlex Deucher (adev->asic_type == CHIP_POLARIS12) || 3005221adb21SAlex Deucher (adev->asic_type == CHIP_VEGAM)) { 3006b18e6ad7SAlex Deucher amdgpu_atombios_crtc_set_dce_clock(adev, adev->clock.default_dispclk, 3007b18e6ad7SAlex Deucher DCE_CLOCK_TYPE_DISPCLK, ATOM_GCK_DFS); 3008b18e6ad7SAlex Deucher amdgpu_atombios_crtc_set_dce_clock(adev, 0, 3009b18e6ad7SAlex Deucher DCE_CLOCK_TYPE_DPREFCLK, ATOM_GCK_DFS); 3010b18e6ad7SAlex Deucher } else { 3011aaa36a97SAlex Deucher amdgpu_atombios_crtc_set_disp_eng_pll(adev, adev->clock.default_dispclk); 3012b18e6ad7SAlex Deucher } 3013aaa36a97SAlex Deucher 3014aaa36a97SAlex Deucher /* initialize hpd */ 3015aaa36a97SAlex Deucher dce_v11_0_hpd_init(adev); 3016aaa36a97SAlex Deucher 3017aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.audio.num_pins; i++) { 3018aaa36a97SAlex Deucher dce_v11_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); 3019aaa36a97SAlex Deucher } 3020aaa36a97SAlex Deucher 3021f6c7aba4SMichel Dänzer dce_v11_0_pageflip_interrupt_init(adev); 3022f6c7aba4SMichel Dänzer 3023aaa36a97SAlex Deucher return 0; 3024aaa36a97SAlex Deucher } 3025aaa36a97SAlex Deucher 30265fc3aeebSyanyang1 static int dce_v11_0_hw_fini(void *handle) 3027aaa36a97SAlex Deucher { 3028aaa36a97SAlex Deucher int i; 30295fc3aeebSyanyang1 struct amdgpu_device *adev = (struct amdgpu_device *)handle; 3030aaa36a97SAlex Deucher 3031aaa36a97SAlex Deucher dce_v11_0_hpd_fini(adev); 3032aaa36a97SAlex Deucher 3033aaa36a97SAlex Deucher for (i = 0; i < adev->mode_info.audio.num_pins; i++) { 3034aaa36a97SAlex Deucher dce_v11_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false); 3035aaa36a97SAlex Deucher } 3036aaa36a97SAlex Deucher 3037f6c7aba4SMichel Dänzer dce_v11_0_pageflip_interrupt_fini(adev); 3038f6c7aba4SMichel Dänzer 303990f56611Sxurui flush_delayed_work(&adev->hotplug_work); 3040a347ca97SAlex Deucher 3041aaa36a97SAlex Deucher return 0; 3042aaa36a97SAlex Deucher } 3043aaa36a97SAlex Deucher 30445fc3aeebSyanyang1 static int dce_v11_0_suspend(void *handle) 3045aaa36a97SAlex Deucher { 3046a59b3c80SAlex Deucher struct amdgpu_device *adev = (struct amdgpu_device *)handle; 3047a2e15b0eSAlex Deucher int r; 3048a2e15b0eSAlex Deucher 3049a2e15b0eSAlex Deucher r = amdgpu_display_suspend_helper(adev); 3050a2e15b0eSAlex Deucher if (r) 3051a2e15b0eSAlex Deucher return r; 3052a59b3c80SAlex Deucher 3053a59b3c80SAlex Deucher adev->mode_info.bl_level = 3054a59b3c80SAlex Deucher amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 3055a59b3c80SAlex Deucher 3056f9fff064SAlex Deucher return dce_v11_0_hw_fini(handle); 3057aaa36a97SAlex Deucher } 3058aaa36a97SAlex Deucher 30595fc3aeebSyanyang1 static int dce_v11_0_resume(void *handle) 3060aaa36a97SAlex Deucher { 30615fc3aeebSyanyang1 struct amdgpu_device *adev = (struct amdgpu_device *)handle; 3062f9fff064SAlex Deucher int ret; 3063aaa36a97SAlex Deucher 3064a59b3c80SAlex Deucher amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, 3065a59b3c80SAlex Deucher adev->mode_info.bl_level); 3066a59b3c80SAlex Deucher 3067f9fff064SAlex Deucher ret = dce_v11_0_hw_init(handle); 3068aaa36a97SAlex Deucher 3069aaa36a97SAlex Deucher /* turn on the BL */ 3070aaa36a97SAlex Deucher if (adev->mode_info.bl_encoder) { 3071aaa36a97SAlex Deucher u8 bl_level = amdgpu_display_backlight_get_level(adev, 3072aaa36a97SAlex Deucher adev->mode_info.bl_encoder); 3073aaa36a97SAlex Deucher amdgpu_display_backlight_set_level(adev, adev->mode_info.bl_encoder, 3074aaa36a97SAlex Deucher bl_level); 3075aaa36a97SAlex Deucher } 3076a2e15b0eSAlex Deucher if (ret) 3077f9fff064SAlex Deucher return ret; 3078a2e15b0eSAlex Deucher 3079a2e15b0eSAlex Deucher return amdgpu_display_resume_helper(adev); 3080aaa36a97SAlex Deucher } 3081aaa36a97SAlex Deucher 30825fc3aeebSyanyang1 static bool dce_v11_0_is_idle(void *handle) 3083aaa36a97SAlex Deucher { 3084aaa36a97SAlex Deucher return true; 3085aaa36a97SAlex Deucher } 3086aaa36a97SAlex Deucher 30875fc3aeebSyanyang1 static int dce_v11_0_wait_for_idle(void *handle) 3088aaa36a97SAlex Deucher { 3089aaa36a97SAlex Deucher return 0; 3090aaa36a97SAlex Deucher } 3091aaa36a97SAlex Deucher 30925fc3aeebSyanyang1 static int dce_v11_0_soft_reset(void *handle) 3093aaa36a97SAlex Deucher { 3094aaa36a97SAlex Deucher u32 srbm_soft_reset = 0, tmp; 30955fc3aeebSyanyang1 struct amdgpu_device *adev = (struct amdgpu_device *)handle; 3096aaa36a97SAlex Deucher 3097aaa36a97SAlex Deucher if (dce_v11_0_is_display_hung(adev)) 3098aaa36a97SAlex Deucher srbm_soft_reset |= SRBM_SOFT_RESET__SOFT_RESET_DC_MASK; 3099aaa36a97SAlex Deucher 3100aaa36a97SAlex Deucher if (srbm_soft_reset) { 3101aaa36a97SAlex Deucher tmp = RREG32(mmSRBM_SOFT_RESET); 3102aaa36a97SAlex Deucher tmp |= srbm_soft_reset; 3103aaa36a97SAlex Deucher dev_info(adev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); 3104aaa36a97SAlex Deucher WREG32(mmSRBM_SOFT_RESET, tmp); 3105aaa36a97SAlex Deucher tmp = RREG32(mmSRBM_SOFT_RESET); 3106aaa36a97SAlex Deucher 3107aaa36a97SAlex Deucher udelay(50); 3108aaa36a97SAlex Deucher 3109aaa36a97SAlex Deucher tmp &= ~srbm_soft_reset; 3110aaa36a97SAlex Deucher WREG32(mmSRBM_SOFT_RESET, tmp); 3111aaa36a97SAlex Deucher tmp = RREG32(mmSRBM_SOFT_RESET); 3112aaa36a97SAlex Deucher 3113aaa36a97SAlex Deucher /* Wait a little for things to settle down */ 3114aaa36a97SAlex Deucher udelay(50); 3115aaa36a97SAlex Deucher } 3116aaa36a97SAlex Deucher return 0; 3117aaa36a97SAlex Deucher } 3118aaa36a97SAlex Deucher 3119aaa36a97SAlex Deucher static void dce_v11_0_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev, 3120aaa36a97SAlex Deucher int crtc, 3121aaa36a97SAlex Deucher enum amdgpu_interrupt_state state) 3122aaa36a97SAlex Deucher { 3123aaa36a97SAlex Deucher u32 lb_interrupt_mask; 3124aaa36a97SAlex Deucher 3125aaa36a97SAlex Deucher if (crtc >= adev->mode_info.num_crtc) { 3126aaa36a97SAlex Deucher DRM_DEBUG("invalid crtc %d\n", crtc); 3127aaa36a97SAlex Deucher return; 3128aaa36a97SAlex Deucher } 3129aaa36a97SAlex Deucher 3130aaa36a97SAlex Deucher switch (state) { 3131aaa36a97SAlex Deucher case AMDGPU_IRQ_STATE_DISABLE: 3132aaa36a97SAlex Deucher lb_interrupt_mask = RREG32(mmLB_INTERRUPT_MASK + crtc_offsets[crtc]); 3133aaa36a97SAlex Deucher lb_interrupt_mask = REG_SET_FIELD(lb_interrupt_mask, LB_INTERRUPT_MASK, 3134aaa36a97SAlex Deucher VBLANK_INTERRUPT_MASK, 0); 3135aaa36a97SAlex Deucher WREG32(mmLB_INTERRUPT_MASK + crtc_offsets[crtc], lb_interrupt_mask); 3136aaa36a97SAlex Deucher break; 3137aaa36a97SAlex Deucher case AMDGPU_IRQ_STATE_ENABLE: 3138aaa36a97SAlex Deucher lb_interrupt_mask = RREG32(mmLB_INTERRUPT_MASK + crtc_offsets[crtc]); 3139aaa36a97SAlex Deucher lb_interrupt_mask = REG_SET_FIELD(lb_interrupt_mask, LB_INTERRUPT_MASK, 3140aaa36a97SAlex Deucher VBLANK_INTERRUPT_MASK, 1); 3141aaa36a97SAlex Deucher WREG32(mmLB_INTERRUPT_MASK + crtc_offsets[crtc], lb_interrupt_mask); 3142aaa36a97SAlex Deucher break; 3143aaa36a97SAlex Deucher default: 3144aaa36a97SAlex Deucher break; 3145aaa36a97SAlex Deucher } 3146aaa36a97SAlex Deucher } 3147aaa36a97SAlex Deucher 3148aaa36a97SAlex Deucher static void dce_v11_0_set_crtc_vline_interrupt_state(struct amdgpu_device *adev, 3149aaa36a97SAlex Deucher int crtc, 3150aaa36a97SAlex Deucher enum amdgpu_interrupt_state state) 3151aaa36a97SAlex Deucher { 3152aaa36a97SAlex Deucher u32 lb_interrupt_mask; 3153aaa36a97SAlex Deucher 3154aaa36a97SAlex Deucher if (crtc >= adev->mode_info.num_crtc) { 3155aaa36a97SAlex Deucher DRM_DEBUG("invalid crtc %d\n", crtc); 3156aaa36a97SAlex Deucher return; 3157aaa36a97SAlex Deucher } 3158aaa36a97SAlex Deucher 3159aaa36a97SAlex Deucher switch (state) { 3160aaa36a97SAlex Deucher case AMDGPU_IRQ_STATE_DISABLE: 3161aaa36a97SAlex Deucher lb_interrupt_mask = RREG32(mmLB_INTERRUPT_MASK + crtc_offsets[crtc]); 3162aaa36a97SAlex Deucher lb_interrupt_mask = REG_SET_FIELD(lb_interrupt_mask, LB_INTERRUPT_MASK, 3163aaa36a97SAlex Deucher VLINE_INTERRUPT_MASK, 0); 3164aaa36a97SAlex Deucher WREG32(mmLB_INTERRUPT_MASK + crtc_offsets[crtc], lb_interrupt_mask); 3165aaa36a97SAlex Deucher break; 3166aaa36a97SAlex Deucher case AMDGPU_IRQ_STATE_ENABLE: 3167aaa36a97SAlex Deucher lb_interrupt_mask = RREG32(mmLB_INTERRUPT_MASK + crtc_offsets[crtc]); 3168aaa36a97SAlex Deucher lb_interrupt_mask = REG_SET_FIELD(lb_interrupt_mask, LB_INTERRUPT_MASK, 3169aaa36a97SAlex Deucher VLINE_INTERRUPT_MASK, 1); 3170aaa36a97SAlex Deucher WREG32(mmLB_INTERRUPT_MASK + crtc_offsets[crtc], lb_interrupt_mask); 3171aaa36a97SAlex Deucher break; 3172aaa36a97SAlex Deucher default: 3173aaa36a97SAlex Deucher break; 3174aaa36a97SAlex Deucher } 3175aaa36a97SAlex Deucher } 3176aaa36a97SAlex Deucher 3177aaa36a97SAlex Deucher static int dce_v11_0_set_hpd_irq_state(struct amdgpu_device *adev, 3178aaa36a97SAlex Deucher struct amdgpu_irq_src *source, 3179aaa36a97SAlex Deucher unsigned hpd, 3180aaa36a97SAlex Deucher enum amdgpu_interrupt_state state) 3181aaa36a97SAlex Deucher { 3182aaa36a97SAlex Deucher u32 tmp; 3183aaa36a97SAlex Deucher 3184aaa36a97SAlex Deucher if (hpd >= adev->mode_info.num_hpd) { 3185aaa36a97SAlex Deucher DRM_DEBUG("invalid hdp %d\n", hpd); 3186aaa36a97SAlex Deucher return 0; 3187aaa36a97SAlex Deucher } 3188aaa36a97SAlex Deucher 3189aaa36a97SAlex Deucher switch (state) { 3190aaa36a97SAlex Deucher case AMDGPU_IRQ_STATE_DISABLE: 3191aaa36a97SAlex Deucher tmp = RREG32(mmDC_HPD_INT_CONTROL + hpd_offsets[hpd]); 3192aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DC_HPD_INT_CONTROL, DC_HPD_INT_EN, 0); 3193aaa36a97SAlex Deucher WREG32(mmDC_HPD_INT_CONTROL + hpd_offsets[hpd], tmp); 3194aaa36a97SAlex Deucher break; 3195aaa36a97SAlex Deucher case AMDGPU_IRQ_STATE_ENABLE: 3196aaa36a97SAlex Deucher tmp = RREG32(mmDC_HPD_INT_CONTROL + hpd_offsets[hpd]); 3197aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DC_HPD_INT_CONTROL, DC_HPD_INT_EN, 1); 3198aaa36a97SAlex Deucher WREG32(mmDC_HPD_INT_CONTROL + hpd_offsets[hpd], tmp); 3199aaa36a97SAlex Deucher break; 3200aaa36a97SAlex Deucher default: 3201aaa36a97SAlex Deucher break; 3202aaa36a97SAlex Deucher } 3203aaa36a97SAlex Deucher 3204aaa36a97SAlex Deucher return 0; 3205aaa36a97SAlex Deucher } 3206aaa36a97SAlex Deucher 3207aaa36a97SAlex Deucher static int dce_v11_0_set_crtc_irq_state(struct amdgpu_device *adev, 3208aaa36a97SAlex Deucher struct amdgpu_irq_src *source, 3209aaa36a97SAlex Deucher unsigned type, 3210aaa36a97SAlex Deucher enum amdgpu_interrupt_state state) 3211aaa36a97SAlex Deucher { 3212aaa36a97SAlex Deucher switch (type) { 3213aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VBLANK1: 3214aaa36a97SAlex Deucher dce_v11_0_set_crtc_vblank_interrupt_state(adev, 0, state); 3215aaa36a97SAlex Deucher break; 3216aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VBLANK2: 3217aaa36a97SAlex Deucher dce_v11_0_set_crtc_vblank_interrupt_state(adev, 1, state); 3218aaa36a97SAlex Deucher break; 3219aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VBLANK3: 3220aaa36a97SAlex Deucher dce_v11_0_set_crtc_vblank_interrupt_state(adev, 2, state); 3221aaa36a97SAlex Deucher break; 3222aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VBLANK4: 3223aaa36a97SAlex Deucher dce_v11_0_set_crtc_vblank_interrupt_state(adev, 3, state); 3224aaa36a97SAlex Deucher break; 3225aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VBLANK5: 3226aaa36a97SAlex Deucher dce_v11_0_set_crtc_vblank_interrupt_state(adev, 4, state); 3227aaa36a97SAlex Deucher break; 3228aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VBLANK6: 3229aaa36a97SAlex Deucher dce_v11_0_set_crtc_vblank_interrupt_state(adev, 5, state); 3230aaa36a97SAlex Deucher break; 3231aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VLINE1: 3232aaa36a97SAlex Deucher dce_v11_0_set_crtc_vline_interrupt_state(adev, 0, state); 3233aaa36a97SAlex Deucher break; 3234aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VLINE2: 3235aaa36a97SAlex Deucher dce_v11_0_set_crtc_vline_interrupt_state(adev, 1, state); 3236aaa36a97SAlex Deucher break; 3237aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VLINE3: 3238aaa36a97SAlex Deucher dce_v11_0_set_crtc_vline_interrupt_state(adev, 2, state); 3239aaa36a97SAlex Deucher break; 3240aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VLINE4: 3241aaa36a97SAlex Deucher dce_v11_0_set_crtc_vline_interrupt_state(adev, 3, state); 3242aaa36a97SAlex Deucher break; 3243aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VLINE5: 3244aaa36a97SAlex Deucher dce_v11_0_set_crtc_vline_interrupt_state(adev, 4, state); 3245aaa36a97SAlex Deucher break; 3246aaa36a97SAlex Deucher case AMDGPU_CRTC_IRQ_VLINE6: 3247aaa36a97SAlex Deucher dce_v11_0_set_crtc_vline_interrupt_state(adev, 5, state); 3248aaa36a97SAlex Deucher break; 3249aaa36a97SAlex Deucher default: 3250aaa36a97SAlex Deucher break; 3251aaa36a97SAlex Deucher } 3252aaa36a97SAlex Deucher return 0; 3253aaa36a97SAlex Deucher } 3254aaa36a97SAlex Deucher 3255aaa36a97SAlex Deucher static int dce_v11_0_set_pageflip_irq_state(struct amdgpu_device *adev, 3256aaa36a97SAlex Deucher struct amdgpu_irq_src *src, 3257aaa36a97SAlex Deucher unsigned type, 3258aaa36a97SAlex Deucher enum amdgpu_interrupt_state state) 3259aaa36a97SAlex Deucher { 32607dfac896SAlex Deucher u32 reg; 32617dfac896SAlex Deucher 32627dfac896SAlex Deucher if (type >= adev->mode_info.num_crtc) { 3263aaa36a97SAlex Deucher DRM_ERROR("invalid pageflip crtc %d\n", type); 3264aaa36a97SAlex Deucher return -EINVAL; 3265aaa36a97SAlex Deucher } 3266aaa36a97SAlex Deucher 32677dfac896SAlex Deucher reg = RREG32(mmGRPH_INTERRUPT_CONTROL + crtc_offsets[type]); 3268aaa36a97SAlex Deucher if (state == AMDGPU_IRQ_STATE_DISABLE) 32697dfac896SAlex Deucher WREG32(mmGRPH_INTERRUPT_CONTROL + crtc_offsets[type], 32707dfac896SAlex Deucher reg & ~GRPH_INTERRUPT_CONTROL__GRPH_PFLIP_INT_MASK_MASK); 3271aaa36a97SAlex Deucher else 32727dfac896SAlex Deucher WREG32(mmGRPH_INTERRUPT_CONTROL + crtc_offsets[type], 32737dfac896SAlex Deucher reg | GRPH_INTERRUPT_CONTROL__GRPH_PFLIP_INT_MASK_MASK); 3274aaa36a97SAlex Deucher 3275aaa36a97SAlex Deucher return 0; 3276aaa36a97SAlex Deucher } 3277aaa36a97SAlex Deucher 3278aaa36a97SAlex Deucher static int dce_v11_0_pageflip_irq(struct amdgpu_device *adev, 3279aaa36a97SAlex Deucher struct amdgpu_irq_src *source, 3280aaa36a97SAlex Deucher struct amdgpu_iv_entry *entry) 3281aaa36a97SAlex Deucher { 3282aaa36a97SAlex Deucher unsigned long flags; 3283aaa36a97SAlex Deucher unsigned crtc_id; 3284aaa36a97SAlex Deucher struct amdgpu_crtc *amdgpu_crtc; 3285aaa36a97SAlex Deucher struct amdgpu_flip_work *works; 3286aaa36a97SAlex Deucher 3287aaa36a97SAlex Deucher crtc_id = (entry->src_id - 8) >> 1; 3288aaa36a97SAlex Deucher amdgpu_crtc = adev->mode_info.crtcs[crtc_id]; 3289aaa36a97SAlex Deucher 32907dfac896SAlex Deucher if (crtc_id >= adev->mode_info.num_crtc) { 3291aaa36a97SAlex Deucher DRM_ERROR("invalid pageflip crtc %d\n", crtc_id); 3292aaa36a97SAlex Deucher return -EINVAL; 3293aaa36a97SAlex Deucher } 3294aaa36a97SAlex Deucher 32957dfac896SAlex Deucher if (RREG32(mmGRPH_INTERRUPT_STATUS + crtc_offsets[crtc_id]) & 32967dfac896SAlex Deucher GRPH_INTERRUPT_STATUS__GRPH_PFLIP_INT_OCCURRED_MASK) 32977dfac896SAlex Deucher WREG32(mmGRPH_INTERRUPT_STATUS + crtc_offsets[crtc_id], 32987dfac896SAlex Deucher GRPH_INTERRUPT_STATUS__GRPH_PFLIP_INT_CLEAR_MASK); 3299aaa36a97SAlex Deucher 3300aaa36a97SAlex Deucher /* IRQ could occur when in initial stage */ 3301aaa36a97SAlex Deucher if(amdgpu_crtc == NULL) 3302aaa36a97SAlex Deucher return 0; 3303aaa36a97SAlex Deucher 33044a580877SLuben Tuikov spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags); 3305aaa36a97SAlex Deucher works = amdgpu_crtc->pflip_works; 3306aaa36a97SAlex Deucher if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED){ 3307aaa36a97SAlex Deucher DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != " 3308aaa36a97SAlex Deucher "AMDGPU_FLIP_SUBMITTED(%d)\n", 3309aaa36a97SAlex Deucher amdgpu_crtc->pflip_status, 3310aaa36a97SAlex Deucher AMDGPU_FLIP_SUBMITTED); 33114a580877SLuben Tuikov spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags); 3312aaa36a97SAlex Deucher return 0; 3313aaa36a97SAlex Deucher } 3314aaa36a97SAlex Deucher 3315aaa36a97SAlex Deucher /* page flip completed. clean up */ 3316aaa36a97SAlex Deucher amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE; 3317aaa36a97SAlex Deucher amdgpu_crtc->pflip_works = NULL; 3318aaa36a97SAlex Deucher 3319aaa36a97SAlex Deucher /* wakeup usersapce */ 3320aaa36a97SAlex Deucher if(works->event) 332156286769SGustavo Padovan drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event); 3322aaa36a97SAlex Deucher 33234a580877SLuben Tuikov spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags); 3324aaa36a97SAlex Deucher 332560629c4dSGustavo Padovan drm_crtc_vblank_put(&amdgpu_crtc->base); 332687d58c11SChristian König schedule_work(&works->unpin_work); 3327aaa36a97SAlex Deucher 3328aaa36a97SAlex Deucher return 0; 3329aaa36a97SAlex Deucher } 3330aaa36a97SAlex Deucher 3331aaa36a97SAlex Deucher static void dce_v11_0_hpd_int_ack(struct amdgpu_device *adev, 3332aaa36a97SAlex Deucher int hpd) 3333aaa36a97SAlex Deucher { 3334aaa36a97SAlex Deucher u32 tmp; 3335aaa36a97SAlex Deucher 3336aaa36a97SAlex Deucher if (hpd >= adev->mode_info.num_hpd) { 3337aaa36a97SAlex Deucher DRM_DEBUG("invalid hdp %d\n", hpd); 3338aaa36a97SAlex Deucher return; 3339aaa36a97SAlex Deucher } 3340aaa36a97SAlex Deucher 3341aaa36a97SAlex Deucher tmp = RREG32(mmDC_HPD_INT_CONTROL + hpd_offsets[hpd]); 3342aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, DC_HPD_INT_CONTROL, DC_HPD_INT_ACK, 1); 3343aaa36a97SAlex Deucher WREG32(mmDC_HPD_INT_CONTROL + hpd_offsets[hpd], tmp); 3344aaa36a97SAlex Deucher } 3345aaa36a97SAlex Deucher 3346aaa36a97SAlex Deucher static void dce_v11_0_crtc_vblank_int_ack(struct amdgpu_device *adev, 3347aaa36a97SAlex Deucher int crtc) 3348aaa36a97SAlex Deucher { 3349aaa36a97SAlex Deucher u32 tmp; 3350aaa36a97SAlex Deucher 335115c3277fSTom St Denis if (crtc < 0 || crtc >= adev->mode_info.num_crtc) { 3352aaa36a97SAlex Deucher DRM_DEBUG("invalid crtc %d\n", crtc); 3353aaa36a97SAlex Deucher return; 3354aaa36a97SAlex Deucher } 3355aaa36a97SAlex Deucher 3356aaa36a97SAlex Deucher tmp = RREG32(mmLB_VBLANK_STATUS + crtc_offsets[crtc]); 3357aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, LB_VBLANK_STATUS, VBLANK_ACK, 1); 3358aaa36a97SAlex Deucher WREG32(mmLB_VBLANK_STATUS + crtc_offsets[crtc], tmp); 3359aaa36a97SAlex Deucher } 3360aaa36a97SAlex Deucher 3361aaa36a97SAlex Deucher static void dce_v11_0_crtc_vline_int_ack(struct amdgpu_device *adev, 3362aaa36a97SAlex Deucher int crtc) 3363aaa36a97SAlex Deucher { 3364aaa36a97SAlex Deucher u32 tmp; 3365aaa36a97SAlex Deucher 336615c3277fSTom St Denis if (crtc < 0 || crtc >= adev->mode_info.num_crtc) { 3367aaa36a97SAlex Deucher DRM_DEBUG("invalid crtc %d\n", crtc); 3368aaa36a97SAlex Deucher return; 3369aaa36a97SAlex Deucher } 3370aaa36a97SAlex Deucher 3371aaa36a97SAlex Deucher tmp = RREG32(mmLB_VLINE_STATUS + crtc_offsets[crtc]); 3372aaa36a97SAlex Deucher tmp = REG_SET_FIELD(tmp, LB_VLINE_STATUS, VLINE_ACK, 1); 3373aaa36a97SAlex Deucher WREG32(mmLB_VLINE_STATUS + crtc_offsets[crtc], tmp); 3374aaa36a97SAlex Deucher } 3375aaa36a97SAlex Deucher 3376aaa36a97SAlex Deucher static int dce_v11_0_crtc_irq(struct amdgpu_device *adev, 3377aaa36a97SAlex Deucher struct amdgpu_irq_src *source, 3378aaa36a97SAlex Deucher struct amdgpu_iv_entry *entry) 3379aaa36a97SAlex Deucher { 3380aaa36a97SAlex Deucher unsigned crtc = entry->src_id - 1; 3381aaa36a97SAlex Deucher uint32_t disp_int = RREG32(interrupt_status_offsets[crtc].reg); 3382734dd01dSSamuel Li unsigned int irq_type = amdgpu_display_crtc_idx_to_irq_type(adev, 3383734dd01dSSamuel Li crtc); 3384aaa36a97SAlex Deucher 33857ccf5aa8SAlex Deucher switch (entry->src_data[0]) { 3386aaa36a97SAlex Deucher case 0: /* vblank */ 3387bd833144SMario Kleiner if (disp_int & interrupt_status_offsets[crtc].vblank) 3388aaa36a97SAlex Deucher dce_v11_0_crtc_vblank_int_ack(adev, crtc); 3389bd833144SMario Kleiner else 3390bd833144SMario Kleiner DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); 3391bd833144SMario Kleiner 3392aaa36a97SAlex Deucher if (amdgpu_irq_enabled(adev, source, irq_type)) { 33934a580877SLuben Tuikov drm_handle_vblank(adev_to_drm(adev), crtc); 3394aaa36a97SAlex Deucher } 3395aaa36a97SAlex Deucher DRM_DEBUG("IH: D%d vblank\n", crtc + 1); 3396bd833144SMario Kleiner 3397aaa36a97SAlex Deucher break; 3398aaa36a97SAlex Deucher case 1: /* vline */ 3399bd833144SMario Kleiner if (disp_int & interrupt_status_offsets[crtc].vline) 3400aaa36a97SAlex Deucher dce_v11_0_crtc_vline_int_ack(adev, crtc); 3401bd833144SMario Kleiner else 3402bd833144SMario Kleiner DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); 3403bd833144SMario Kleiner 3404aaa36a97SAlex Deucher DRM_DEBUG("IH: D%d vline\n", crtc + 1); 3405bd833144SMario Kleiner 3406aaa36a97SAlex Deucher break; 3407aaa36a97SAlex Deucher default: 34087ccf5aa8SAlex Deucher DRM_DEBUG("Unhandled interrupt: %d %d\n", entry->src_id, entry->src_data[0]); 3409aaa36a97SAlex Deucher break; 3410aaa36a97SAlex Deucher } 3411aaa36a97SAlex Deucher 3412aaa36a97SAlex Deucher return 0; 3413aaa36a97SAlex Deucher } 3414aaa36a97SAlex Deucher 3415aaa36a97SAlex Deucher static int dce_v11_0_hpd_irq(struct amdgpu_device *adev, 3416aaa36a97SAlex Deucher struct amdgpu_irq_src *source, 3417aaa36a97SAlex Deucher struct amdgpu_iv_entry *entry) 3418aaa36a97SAlex Deucher { 3419aaa36a97SAlex Deucher uint32_t disp_int, mask; 3420aaa36a97SAlex Deucher unsigned hpd; 3421aaa36a97SAlex Deucher 34227ccf5aa8SAlex Deucher if (entry->src_data[0] >= adev->mode_info.num_hpd) { 34237ccf5aa8SAlex Deucher DRM_DEBUG("Unhandled interrupt: %d %d\n", entry->src_id, entry->src_data[0]); 3424aaa36a97SAlex Deucher return 0; 3425aaa36a97SAlex Deucher } 3426aaa36a97SAlex Deucher 34277ccf5aa8SAlex Deucher hpd = entry->src_data[0]; 3428aaa36a97SAlex Deucher disp_int = RREG32(interrupt_status_offsets[hpd].reg); 3429aaa36a97SAlex Deucher mask = interrupt_status_offsets[hpd].hpd; 3430aaa36a97SAlex Deucher 3431aaa36a97SAlex Deucher if (disp_int & mask) { 3432aaa36a97SAlex Deucher dce_v11_0_hpd_int_ack(adev, hpd); 343390f56611Sxurui schedule_delayed_work(&adev->hotplug_work, 0); 3434aaa36a97SAlex Deucher DRM_DEBUG("IH: HPD%d\n", hpd + 1); 3435aaa36a97SAlex Deucher } 3436aaa36a97SAlex Deucher 3437aaa36a97SAlex Deucher return 0; 3438aaa36a97SAlex Deucher } 3439aaa36a97SAlex Deucher 34405fc3aeebSyanyang1 static int dce_v11_0_set_clockgating_state(void *handle, 34415fc3aeebSyanyang1 enum amd_clockgating_state state) 3442aaa36a97SAlex Deucher { 3443aaa36a97SAlex Deucher return 0; 3444aaa36a97SAlex Deucher } 3445aaa36a97SAlex Deucher 34465fc3aeebSyanyang1 static int dce_v11_0_set_powergating_state(void *handle, 34475fc3aeebSyanyang1 enum amd_powergating_state state) 3448aaa36a97SAlex Deucher { 3449aaa36a97SAlex Deucher return 0; 3450aaa36a97SAlex Deucher } 3451aaa36a97SAlex Deucher 3452a1255107SAlex Deucher static const struct amd_ip_funcs dce_v11_0_ip_funcs = { 345388a907d6STom St Denis .name = "dce_v11_0", 3454aaa36a97SAlex Deucher .early_init = dce_v11_0_early_init, 3455aaa36a97SAlex Deucher .late_init = NULL, 3456aaa36a97SAlex Deucher .sw_init = dce_v11_0_sw_init, 3457aaa36a97SAlex Deucher .sw_fini = dce_v11_0_sw_fini, 3458aaa36a97SAlex Deucher .hw_init = dce_v11_0_hw_init, 3459aaa36a97SAlex Deucher .hw_fini = dce_v11_0_hw_fini, 3460aaa36a97SAlex Deucher .suspend = dce_v11_0_suspend, 3461aaa36a97SAlex Deucher .resume = dce_v11_0_resume, 3462aaa36a97SAlex Deucher .is_idle = dce_v11_0_is_idle, 3463aaa36a97SAlex Deucher .wait_for_idle = dce_v11_0_wait_for_idle, 3464aaa36a97SAlex Deucher .soft_reset = dce_v11_0_soft_reset, 3465aaa36a97SAlex Deucher .set_clockgating_state = dce_v11_0_set_clockgating_state, 3466aaa36a97SAlex Deucher .set_powergating_state = dce_v11_0_set_powergating_state, 3467e21d253bSSunil Khatri .dump_ip_state = NULL, 346840356542SSunil Khatri .print_ip_state = NULL, 3469aaa36a97SAlex Deucher }; 3470aaa36a97SAlex Deucher 3471aaa36a97SAlex Deucher static void 3472aaa36a97SAlex Deucher dce_v11_0_encoder_mode_set(struct drm_encoder *encoder, 3473aaa36a97SAlex Deucher struct drm_display_mode *mode, 3474aaa36a97SAlex Deucher struct drm_display_mode *adjusted_mode) 3475aaa36a97SAlex Deucher { 3476aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 3477aaa36a97SAlex Deucher 3478aaa36a97SAlex Deucher amdgpu_encoder->pixel_clock = adjusted_mode->clock; 3479aaa36a97SAlex Deucher 3480aaa36a97SAlex Deucher /* need to call this here rather than in prepare() since we need some crtc info */ 3481aaa36a97SAlex Deucher amdgpu_atombios_encoder_dpms(encoder, DRM_MODE_DPMS_OFF); 3482aaa36a97SAlex Deucher 3483aaa36a97SAlex Deucher /* set scaler clears this on some chips */ 3484aaa36a97SAlex Deucher dce_v11_0_set_interleave(encoder->crtc, mode); 3485aaa36a97SAlex Deucher 3486aaa36a97SAlex Deucher if (amdgpu_atombios_encoder_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_HDMI) { 3487aaa36a97SAlex Deucher dce_v11_0_afmt_enable(encoder, true); 3488aaa36a97SAlex Deucher dce_v11_0_afmt_setmode(encoder, adjusted_mode); 3489aaa36a97SAlex Deucher } 3490aaa36a97SAlex Deucher } 3491aaa36a97SAlex Deucher 3492aaa36a97SAlex Deucher static void dce_v11_0_encoder_prepare(struct drm_encoder *encoder) 3493aaa36a97SAlex Deucher { 34941348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(encoder->dev); 3495aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 3496aaa36a97SAlex Deucher struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 3497aaa36a97SAlex Deucher 3498aaa36a97SAlex Deucher if ((amdgpu_encoder->active_device & 3499aaa36a97SAlex Deucher (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) || 3500aaa36a97SAlex Deucher (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != 3501aaa36a97SAlex Deucher ENCODER_OBJECT_ID_NONE)) { 3502aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 3503aaa36a97SAlex Deucher if (dig) { 3504aaa36a97SAlex Deucher dig->dig_encoder = dce_v11_0_pick_dig_encoder(encoder); 3505aaa36a97SAlex Deucher if (amdgpu_encoder->active_device & ATOM_DEVICE_DFP_SUPPORT) 3506aaa36a97SAlex Deucher dig->afmt = adev->mode_info.afmt[dig->dig_encoder]; 3507aaa36a97SAlex Deucher } 3508aaa36a97SAlex Deucher } 3509aaa36a97SAlex Deucher 3510aaa36a97SAlex Deucher amdgpu_atombios_scratch_regs_lock(adev, true); 3511aaa36a97SAlex Deucher 3512aaa36a97SAlex Deucher if (connector) { 3513aaa36a97SAlex Deucher struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 3514aaa36a97SAlex Deucher 3515aaa36a97SAlex Deucher /* select the clock/data port if it uses a router */ 3516aaa36a97SAlex Deucher if (amdgpu_connector->router.cd_valid) 3517aaa36a97SAlex Deucher amdgpu_i2c_router_select_cd_port(amdgpu_connector); 3518aaa36a97SAlex Deucher 3519aaa36a97SAlex Deucher /* turn eDP panel on for mode set */ 3520aaa36a97SAlex Deucher if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) 3521aaa36a97SAlex Deucher amdgpu_atombios_encoder_set_edp_panel_power(connector, 3522aaa36a97SAlex Deucher ATOM_TRANSMITTER_ACTION_POWER_ON); 3523aaa36a97SAlex Deucher } 3524aaa36a97SAlex Deucher 3525aaa36a97SAlex Deucher /* this is needed for the pll/ss setup to work correctly in some cases */ 3526aaa36a97SAlex Deucher amdgpu_atombios_encoder_set_crtc_source(encoder); 3527aaa36a97SAlex Deucher /* set up the FMT blocks */ 3528aaa36a97SAlex Deucher dce_v11_0_program_fmt(encoder); 3529aaa36a97SAlex Deucher } 3530aaa36a97SAlex Deucher 3531aaa36a97SAlex Deucher static void dce_v11_0_encoder_commit(struct drm_encoder *encoder) 3532aaa36a97SAlex Deucher { 3533aaa36a97SAlex Deucher struct drm_device *dev = encoder->dev; 35341348969aSLuben Tuikov struct amdgpu_device *adev = drm_to_adev(dev); 3535aaa36a97SAlex Deucher 3536aaa36a97SAlex Deucher /* need to call this here as we need the crtc set up */ 3537aaa36a97SAlex Deucher amdgpu_atombios_encoder_dpms(encoder, DRM_MODE_DPMS_ON); 3538aaa36a97SAlex Deucher amdgpu_atombios_scratch_regs_lock(adev, false); 3539aaa36a97SAlex Deucher } 3540aaa36a97SAlex Deucher 3541aaa36a97SAlex Deucher static void dce_v11_0_encoder_disable(struct drm_encoder *encoder) 3542aaa36a97SAlex Deucher { 3543aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 3544aaa36a97SAlex Deucher struct amdgpu_encoder_atom_dig *dig; 3545aaa36a97SAlex Deucher 3546aaa36a97SAlex Deucher amdgpu_atombios_encoder_dpms(encoder, DRM_MODE_DPMS_OFF); 3547aaa36a97SAlex Deucher 3548aaa36a97SAlex Deucher if (amdgpu_atombios_encoder_is_digital(encoder)) { 3549aaa36a97SAlex Deucher if (amdgpu_atombios_encoder_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_HDMI) 3550aaa36a97SAlex Deucher dce_v11_0_afmt_enable(encoder, false); 3551aaa36a97SAlex Deucher dig = amdgpu_encoder->enc_priv; 3552aaa36a97SAlex Deucher dig->dig_encoder = -1; 3553aaa36a97SAlex Deucher } 3554aaa36a97SAlex Deucher amdgpu_encoder->active_device = 0; 3555aaa36a97SAlex Deucher } 3556aaa36a97SAlex Deucher 3557aaa36a97SAlex Deucher /* these are handled by the primary encoders */ 3558aaa36a97SAlex Deucher static void dce_v11_0_ext_prepare(struct drm_encoder *encoder) 3559aaa36a97SAlex Deucher { 3560aaa36a97SAlex Deucher 3561aaa36a97SAlex Deucher } 3562aaa36a97SAlex Deucher 3563aaa36a97SAlex Deucher static void dce_v11_0_ext_commit(struct drm_encoder *encoder) 3564aaa36a97SAlex Deucher { 3565aaa36a97SAlex Deucher 3566aaa36a97SAlex Deucher } 3567aaa36a97SAlex Deucher 3568aaa36a97SAlex Deucher static void 3569aaa36a97SAlex Deucher dce_v11_0_ext_mode_set(struct drm_encoder *encoder, 3570aaa36a97SAlex Deucher struct drm_display_mode *mode, 3571aaa36a97SAlex Deucher struct drm_display_mode *adjusted_mode) 3572aaa36a97SAlex Deucher { 3573aaa36a97SAlex Deucher 3574aaa36a97SAlex Deucher } 3575aaa36a97SAlex Deucher 3576aaa36a97SAlex Deucher static void dce_v11_0_ext_disable(struct drm_encoder *encoder) 3577aaa36a97SAlex Deucher { 3578aaa36a97SAlex Deucher 3579aaa36a97SAlex Deucher } 3580aaa36a97SAlex Deucher 3581aaa36a97SAlex Deucher static void 3582aaa36a97SAlex Deucher dce_v11_0_ext_dpms(struct drm_encoder *encoder, int mode) 3583aaa36a97SAlex Deucher { 3584aaa36a97SAlex Deucher 3585aaa36a97SAlex Deucher } 3586aaa36a97SAlex Deucher 3587aaa36a97SAlex Deucher static const struct drm_encoder_helper_funcs dce_v11_0_ext_helper_funcs = { 3588aaa36a97SAlex Deucher .dpms = dce_v11_0_ext_dpms, 3589aaa36a97SAlex Deucher .prepare = dce_v11_0_ext_prepare, 3590aaa36a97SAlex Deucher .mode_set = dce_v11_0_ext_mode_set, 3591aaa36a97SAlex Deucher .commit = dce_v11_0_ext_commit, 3592aaa36a97SAlex Deucher .disable = dce_v11_0_ext_disable, 3593aaa36a97SAlex Deucher /* no detect for TMDS/LVDS yet */ 3594aaa36a97SAlex Deucher }; 3595aaa36a97SAlex Deucher 3596aaa36a97SAlex Deucher static const struct drm_encoder_helper_funcs dce_v11_0_dig_helper_funcs = { 3597aaa36a97SAlex Deucher .dpms = amdgpu_atombios_encoder_dpms, 3598aaa36a97SAlex Deucher .mode_fixup = amdgpu_atombios_encoder_mode_fixup, 3599aaa36a97SAlex Deucher .prepare = dce_v11_0_encoder_prepare, 3600aaa36a97SAlex Deucher .mode_set = dce_v11_0_encoder_mode_set, 3601aaa36a97SAlex Deucher .commit = dce_v11_0_encoder_commit, 3602aaa36a97SAlex Deucher .disable = dce_v11_0_encoder_disable, 3603aaa36a97SAlex Deucher .detect = amdgpu_atombios_encoder_dig_detect, 3604aaa36a97SAlex Deucher }; 3605aaa36a97SAlex Deucher 3606aaa36a97SAlex Deucher static const struct drm_encoder_helper_funcs dce_v11_0_dac_helper_funcs = { 3607aaa36a97SAlex Deucher .dpms = amdgpu_atombios_encoder_dpms, 3608aaa36a97SAlex Deucher .mode_fixup = amdgpu_atombios_encoder_mode_fixup, 3609aaa36a97SAlex Deucher .prepare = dce_v11_0_encoder_prepare, 3610aaa36a97SAlex Deucher .mode_set = dce_v11_0_encoder_mode_set, 3611aaa36a97SAlex Deucher .commit = dce_v11_0_encoder_commit, 3612aaa36a97SAlex Deucher .detect = amdgpu_atombios_encoder_dac_detect, 3613aaa36a97SAlex Deucher }; 3614aaa36a97SAlex Deucher 3615aaa36a97SAlex Deucher static void dce_v11_0_encoder_destroy(struct drm_encoder *encoder) 3616aaa36a97SAlex Deucher { 3617aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 3618aaa36a97SAlex Deucher if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 3619aaa36a97SAlex Deucher amdgpu_atombios_encoder_fini_backlight(amdgpu_encoder); 3620aaa36a97SAlex Deucher kfree(amdgpu_encoder->enc_priv); 3621aaa36a97SAlex Deucher drm_encoder_cleanup(encoder); 3622aaa36a97SAlex Deucher kfree(amdgpu_encoder); 3623aaa36a97SAlex Deucher } 3624aaa36a97SAlex Deucher 3625aaa36a97SAlex Deucher static const struct drm_encoder_funcs dce_v11_0_encoder_funcs = { 3626aaa36a97SAlex Deucher .destroy = dce_v11_0_encoder_destroy, 3627aaa36a97SAlex Deucher }; 3628aaa36a97SAlex Deucher 3629aaa36a97SAlex Deucher static void dce_v11_0_encoder_add(struct amdgpu_device *adev, 3630aaa36a97SAlex Deucher uint32_t encoder_enum, 3631aaa36a97SAlex Deucher uint32_t supported_device, 3632aaa36a97SAlex Deucher u16 caps) 3633aaa36a97SAlex Deucher { 36344a580877SLuben Tuikov struct drm_device *dev = adev_to_drm(adev); 3635aaa36a97SAlex Deucher struct drm_encoder *encoder; 3636aaa36a97SAlex Deucher struct amdgpu_encoder *amdgpu_encoder; 3637aaa36a97SAlex Deucher 3638aaa36a97SAlex Deucher /* see if we already added it */ 3639aaa36a97SAlex Deucher list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 3640aaa36a97SAlex Deucher amdgpu_encoder = to_amdgpu_encoder(encoder); 3641aaa36a97SAlex Deucher if (amdgpu_encoder->encoder_enum == encoder_enum) { 3642aaa36a97SAlex Deucher amdgpu_encoder->devices |= supported_device; 3643aaa36a97SAlex Deucher return; 3644aaa36a97SAlex Deucher } 3645aaa36a97SAlex Deucher 3646aaa36a97SAlex Deucher } 3647aaa36a97SAlex Deucher 3648aaa36a97SAlex Deucher /* add a new one */ 3649aaa36a97SAlex Deucher amdgpu_encoder = kzalloc(sizeof(struct amdgpu_encoder), GFP_KERNEL); 3650aaa36a97SAlex Deucher if (!amdgpu_encoder) 3651aaa36a97SAlex Deucher return; 3652aaa36a97SAlex Deucher 3653aaa36a97SAlex Deucher encoder = &amdgpu_encoder->base; 3654aaa36a97SAlex Deucher switch (adev->mode_info.num_crtc) { 3655aaa36a97SAlex Deucher case 1: 3656aaa36a97SAlex Deucher encoder->possible_crtcs = 0x1; 3657aaa36a97SAlex Deucher break; 3658aaa36a97SAlex Deucher case 2: 3659aaa36a97SAlex Deucher default: 3660aaa36a97SAlex Deucher encoder->possible_crtcs = 0x3; 3661aaa36a97SAlex Deucher break; 36624ce3bd45SAlex Deucher case 3: 36634ce3bd45SAlex Deucher encoder->possible_crtcs = 0x7; 36644ce3bd45SAlex Deucher break; 3665aaa36a97SAlex Deucher case 4: 3666aaa36a97SAlex Deucher encoder->possible_crtcs = 0xf; 3667aaa36a97SAlex Deucher break; 36684ce3bd45SAlex Deucher case 5: 36694ce3bd45SAlex Deucher encoder->possible_crtcs = 0x1f; 36704ce3bd45SAlex Deucher break; 3671aaa36a97SAlex Deucher case 6: 3672aaa36a97SAlex Deucher encoder->possible_crtcs = 0x3f; 3673aaa36a97SAlex Deucher break; 3674aaa36a97SAlex Deucher } 3675aaa36a97SAlex Deucher 3676aaa36a97SAlex Deucher amdgpu_encoder->enc_priv = NULL; 3677aaa36a97SAlex Deucher 3678aaa36a97SAlex Deucher amdgpu_encoder->encoder_enum = encoder_enum; 3679aaa36a97SAlex Deucher amdgpu_encoder->encoder_id = (encoder_enum & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 3680aaa36a97SAlex Deucher amdgpu_encoder->devices = supported_device; 3681aaa36a97SAlex Deucher amdgpu_encoder->rmx_type = RMX_OFF; 3682aaa36a97SAlex Deucher amdgpu_encoder->underscan_type = UNDERSCAN_OFF; 3683aaa36a97SAlex Deucher amdgpu_encoder->is_ext_encoder = false; 3684aaa36a97SAlex Deucher amdgpu_encoder->caps = caps; 3685aaa36a97SAlex Deucher 3686aaa36a97SAlex Deucher switch (amdgpu_encoder->encoder_id) { 3687aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 3688aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 3689aaa36a97SAlex Deucher drm_encoder_init(dev, encoder, &dce_v11_0_encoder_funcs, 369013a3d91fSVille Syrjälä DRM_MODE_ENCODER_DAC, NULL); 3691aaa36a97SAlex Deucher drm_encoder_helper_add(encoder, &dce_v11_0_dac_helper_funcs); 3692aaa36a97SAlex Deucher break; 3693aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 3694aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 3695aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 3696aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 3697aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 3698aaa36a97SAlex Deucher if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 3699aaa36a97SAlex Deucher amdgpu_encoder->rmx_type = RMX_FULL; 3700aaa36a97SAlex Deucher drm_encoder_init(dev, encoder, &dce_v11_0_encoder_funcs, 370113a3d91fSVille Syrjälä DRM_MODE_ENCODER_LVDS, NULL); 3702aaa36a97SAlex Deucher amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_lcd_info(amdgpu_encoder); 3703aaa36a97SAlex Deucher } else if (amdgpu_encoder->devices & (ATOM_DEVICE_CRT_SUPPORT)) { 3704aaa36a97SAlex Deucher drm_encoder_init(dev, encoder, &dce_v11_0_encoder_funcs, 370513a3d91fSVille Syrjälä DRM_MODE_ENCODER_DAC, NULL); 3706aaa36a97SAlex Deucher amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_dig_info(amdgpu_encoder); 3707aaa36a97SAlex Deucher } else { 3708aaa36a97SAlex Deucher drm_encoder_init(dev, encoder, &dce_v11_0_encoder_funcs, 370913a3d91fSVille Syrjälä DRM_MODE_ENCODER_TMDS, NULL); 3710aaa36a97SAlex Deucher amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_dig_info(amdgpu_encoder); 3711aaa36a97SAlex Deucher } 3712aaa36a97SAlex Deucher drm_encoder_helper_add(encoder, &dce_v11_0_dig_helper_funcs); 3713aaa36a97SAlex Deucher break; 3714aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_SI170B: 3715aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_CH7303: 3716aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_EXTERNAL_SDVOA: 3717aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_EXTERNAL_SDVOB: 3718aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_TITFP513: 3719aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_VT1623: 3720aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_HDMI_SI1930: 3721aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_TRAVIS: 3722aaa36a97SAlex Deucher case ENCODER_OBJECT_ID_NUTMEG: 3723aaa36a97SAlex Deucher /* these are handled by the primary encoders */ 3724aaa36a97SAlex Deucher amdgpu_encoder->is_ext_encoder = true; 3725aaa36a97SAlex Deucher if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 3726aaa36a97SAlex Deucher drm_encoder_init(dev, encoder, &dce_v11_0_encoder_funcs, 372713a3d91fSVille Syrjälä DRM_MODE_ENCODER_LVDS, NULL); 3728aaa36a97SAlex Deucher else if (amdgpu_encoder->devices & (ATOM_DEVICE_CRT_SUPPORT)) 3729aaa36a97SAlex Deucher drm_encoder_init(dev, encoder, &dce_v11_0_encoder_funcs, 373013a3d91fSVille Syrjälä DRM_MODE_ENCODER_DAC, NULL); 3731aaa36a97SAlex Deucher else 3732aaa36a97SAlex Deucher drm_encoder_init(dev, encoder, &dce_v11_0_encoder_funcs, 373313a3d91fSVille Syrjälä DRM_MODE_ENCODER_TMDS, NULL); 3734aaa36a97SAlex Deucher drm_encoder_helper_add(encoder, &dce_v11_0_ext_helper_funcs); 3735aaa36a97SAlex Deucher break; 3736aaa36a97SAlex Deucher } 3737aaa36a97SAlex Deucher } 3738aaa36a97SAlex Deucher 3739aaa36a97SAlex Deucher static const struct amdgpu_display_funcs dce_v11_0_display_funcs = { 3740aaa36a97SAlex Deucher .bandwidth_update = &dce_v11_0_bandwidth_update, 3741aaa36a97SAlex Deucher .vblank_get_counter = &dce_v11_0_vblank_get_counter, 3742aaa36a97SAlex Deucher .backlight_set_level = &amdgpu_atombios_encoder_set_backlight_level, 3743aaa36a97SAlex Deucher .backlight_get_level = &amdgpu_atombios_encoder_get_backlight_level, 3744aaa36a97SAlex Deucher .hpd_sense = &dce_v11_0_hpd_sense, 3745aaa36a97SAlex Deucher .hpd_set_polarity = &dce_v11_0_hpd_set_polarity, 3746aaa36a97SAlex Deucher .hpd_get_gpio_reg = &dce_v11_0_hpd_get_gpio_reg, 3747aaa36a97SAlex Deucher .page_flip = &dce_v11_0_page_flip, 3748aaa36a97SAlex Deucher .page_flip_get_scanoutpos = &dce_v11_0_crtc_get_scanoutpos, 3749aaa36a97SAlex Deucher .add_encoder = &dce_v11_0_encoder_add, 3750aaa36a97SAlex Deucher .add_connector = &amdgpu_connector_add, 3751aaa36a97SAlex Deucher }; 3752aaa36a97SAlex Deucher 3753aaa36a97SAlex Deucher static void dce_v11_0_set_display_funcs(struct amdgpu_device *adev) 3754aaa36a97SAlex Deucher { 3755aaa36a97SAlex Deucher adev->mode_info.funcs = &dce_v11_0_display_funcs; 3756aaa36a97SAlex Deucher } 3757aaa36a97SAlex Deucher 3758aaa36a97SAlex Deucher static const struct amdgpu_irq_src_funcs dce_v11_0_crtc_irq_funcs = { 3759aaa36a97SAlex Deucher .set = dce_v11_0_set_crtc_irq_state, 3760aaa36a97SAlex Deucher .process = dce_v11_0_crtc_irq, 3761aaa36a97SAlex Deucher }; 3762aaa36a97SAlex Deucher 3763aaa36a97SAlex Deucher static const struct amdgpu_irq_src_funcs dce_v11_0_pageflip_irq_funcs = { 3764aaa36a97SAlex Deucher .set = dce_v11_0_set_pageflip_irq_state, 3765aaa36a97SAlex Deucher .process = dce_v11_0_pageflip_irq, 3766aaa36a97SAlex Deucher }; 3767aaa36a97SAlex Deucher 3768aaa36a97SAlex Deucher static const struct amdgpu_irq_src_funcs dce_v11_0_hpd_irq_funcs = { 3769aaa36a97SAlex Deucher .set = dce_v11_0_set_hpd_irq_state, 3770aaa36a97SAlex Deucher .process = dce_v11_0_hpd_irq, 3771aaa36a97SAlex Deucher }; 3772aaa36a97SAlex Deucher 3773aaa36a97SAlex Deucher static void dce_v11_0_set_irq_funcs(struct amdgpu_device *adev) 3774aaa36a97SAlex Deucher { 3775d794b9f8SMichel Dänzer if (adev->mode_info.num_crtc > 0) 3776d794b9f8SMichel Dänzer adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VLINE1 + adev->mode_info.num_crtc; 3777d794b9f8SMichel Dänzer else 3778d794b9f8SMichel Dänzer adev->crtc_irq.num_types = 0; 3779aaa36a97SAlex Deucher adev->crtc_irq.funcs = &dce_v11_0_crtc_irq_funcs; 3780aaa36a97SAlex Deucher 3781d794b9f8SMichel Dänzer adev->pageflip_irq.num_types = adev->mode_info.num_crtc; 3782aaa36a97SAlex Deucher adev->pageflip_irq.funcs = &dce_v11_0_pageflip_irq_funcs; 3783aaa36a97SAlex Deucher 3784d794b9f8SMichel Dänzer adev->hpd_irq.num_types = adev->mode_info.num_hpd; 3785aaa36a97SAlex Deucher adev->hpd_irq.funcs = &dce_v11_0_hpd_irq_funcs; 3786aaa36a97SAlex Deucher } 3787a1255107SAlex Deucher 3788a1255107SAlex Deucher const struct amdgpu_ip_block_version dce_v11_0_ip_block = 3789a1255107SAlex Deucher { 3790a1255107SAlex Deucher .type = AMD_IP_BLOCK_TYPE_DCE, 3791a1255107SAlex Deucher .major = 11, 3792a1255107SAlex Deucher .minor = 0, 3793a1255107SAlex Deucher .rev = 0, 3794a1255107SAlex Deucher .funcs = &dce_v11_0_ip_funcs, 3795a1255107SAlex Deucher }; 3796a1255107SAlex Deucher 3797a1255107SAlex Deucher const struct amdgpu_ip_block_version dce_v11_2_ip_block = 3798a1255107SAlex Deucher { 3799a1255107SAlex Deucher .type = AMD_IP_BLOCK_TYPE_DCE, 3800a1255107SAlex Deucher .major = 11, 3801a1255107SAlex Deucher .minor = 2, 3802a1255107SAlex Deucher .rev = 0, 3803a1255107SAlex Deucher .funcs = &dce_v11_0_ip_funcs, 3804a1255107SAlex Deucher }; 3805