1 /* 2 * Copyright 2015 Advanced Micro Devices, Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 */ 23 24 #include <linux/firmware.h> 25 #include <linux/slab.h> 26 #include <linux/module.h> 27 #include <linux/pci.h> 28 29 #include <drm/amdgpu_drm.h> 30 31 #include "amdgpu.h" 32 #include "amdgpu_atombios.h" 33 #include "amdgpu_ih.h" 34 #include "amdgpu_uvd.h" 35 #include "amdgpu_vce.h" 36 #include "atom.h" 37 #include "amd_pcie.h" 38 39 #include "si_dpm.h" 40 #include "sid.h" 41 #include "si_ih.h" 42 #include "gfx_v6_0.h" 43 #include "gmc_v6_0.h" 44 #include "si_dma.h" 45 #include "dce_v6_0.h" 46 #include "si.h" 47 #include "uvd_v3_1.h" 48 49 #include "uvd/uvd_4_0_d.h" 50 51 #include "smu/smu_6_0_d.h" 52 #include "smu/smu_6_0_sh_mask.h" 53 54 #include "gca/gfx_6_0_d.h" 55 #include "gca/gfx_6_0_sh_mask.h" 56 57 #include "oss/oss_1_0_d.h" 58 #include "oss/oss_1_0_sh_mask.h" 59 60 #include "gmc/gmc_6_0_d.h" 61 #include"gmc/gmc_6_0_sh_mask.h" 62 63 #include "dce/dce_6_0_d.h" 64 #include "dce/dce_6_0_sh_mask.h" 65 66 #include "bif/bif_3_0_d.h" 67 #include "bif/bif_3_0_sh_mask.h" 68 #include "si_enums.h" 69 70 #include "amdgpu_dm.h" 71 #include "amdgpu_vkms.h" 72 73 static const u32 tahiti_golden_registers[] = 74 { 75 mmAZALIA_SCLK_CONTROL, 0x00000030, 0x00000011, 76 mmCB_HW_CONTROL, 0x00010000, 0x00018208, 77 mmDB_DEBUG, 0xffffffff, 0x00000000, 78 mmDB_DEBUG2, 0xf00fffff, 0x00000400, 79 mmDB_DEBUG3, 0x0002021c, 0x00020200, 80 mmDCI_CLK_CNTL, 0x00000080, 0x00000000, 81 0x340c, 0x000000c0, 0x00800040, 82 0x360c, 0x000000c0, 0x00800040, 83 mmFBC_DEBUG_COMP, 0x000000f0, 0x00000070, 84 mmFBC_MISC, 0x00200000, 0x50100000, 85 mmDIG0_HDMI_CONTROL, 0x31000311, 0x00000011, 86 mmMC_ARB_WTM_CNTL_RD, 0x00000003, 0x000007ff, 87 mmMC_XPB_P2P_BAR_CFG, 0x000007ff, 0x00000000, 88 mmPA_CL_ENHANCE, 0xf000001f, 0x00000007, 89 mmPA_SC_FORCE_EOV_MAX_CNTS, 0xffffffff, 0x00ffffff, 90 mmPA_SC_LINE_STIPPLE_STATE, 0x0000ff0f, 0x00000000, 91 mmPA_SC_MODE_CNTL_1, 0x07ffffff, 0x4e000000, 92 mmPA_SC_RASTER_CONFIG, 0x3f3f3fff, 0x2a00126a, 93 0x000c, 0xffffffff, 0x0040, 94 0x000d, 0x00000040, 0x00004040, 95 mmSPI_CONFIG_CNTL, 0x07ffffff, 0x03000000, 96 mmSQ_DED_CNT, 0x01ff1f3f, 0x00000000, 97 mmSQ_SEC_CNT, 0x01ff1f3f, 0x00000000, 98 mmSX_DEBUG_1, 0x0000007f, 0x00000020, 99 mmTA_CNTL_AUX, 0x00010000, 0x00010000, 100 mmTCP_ADDR_CONFIG, 0x00000200, 0x000002fb, 101 mmTCP_CHAN_STEER_HI, 0xffffffff, 0x0000543b, 102 mmTCP_CHAN_STEER_LO, 0xffffffff, 0xa9210876, 103 mmVGT_FIFO_DEPTHS, 0xffffffff, 0x000fff40, 104 mmVGT_GS_VERTEX_REUSE, 0x0000001f, 0x00000010, 105 mmVM_CONTEXT0_CNTL, 0x20000000, 0x20fffed8, 106 mmVM_L2_CG, 0x000c0fc0, 0x000c0400, 107 mmVM_PRT_APERTURE0_LOW_ADDR, 0x0fffffff, 0xffffffff, 108 mmVM_PRT_APERTURE1_LOW_ADDR, 0x0fffffff, 0x0fffffff, 109 mmVM_PRT_APERTURE2_LOW_ADDR, 0x0fffffff, 0x0fffffff, 110 mmVM_PRT_APERTURE3_LOW_ADDR, 0x0fffffff, 0x0fffffff, 111 }; 112 113 static const u32 tahiti_golden_registers2[] = 114 { 115 mmMCIF_MEM_CONTROL, 0x00000001, 0x00000001, 116 }; 117 118 static const u32 tahiti_golden_rlc_registers[] = 119 { 120 mmGB_ADDR_CONFIG, 0xffffffff, 0x12011003, 121 mmRLC_LB_PARAMS, 0xffffffff, 0x00601005, 122 0x311f, 0xffffffff, 0x10104040, 123 0x3122, 0xffffffff, 0x0100000a, 124 mmRLC_LB_CNTR_MAX, 0xffffffff, 0x00000800, 125 mmRLC_LB_CNTL, 0xffffffff, 0x800000f4, 126 mmUVD_CGC_GATE, 0x00000008, 0x00000000, 127 }; 128 129 static const u32 pitcairn_golden_registers[] = 130 { 131 mmAZALIA_SCLK_CONTROL, 0x00000030, 0x00000011, 132 mmCB_HW_CONTROL, 0x00010000, 0x00018208, 133 mmDB_DEBUG, 0xffffffff, 0x00000000, 134 mmDB_DEBUG2, 0xf00fffff, 0x00000400, 135 mmDB_DEBUG3, 0x0002021c, 0x00020200, 136 mmDCI_CLK_CNTL, 0x00000080, 0x00000000, 137 0x340c, 0x000300c0, 0x00800040, 138 0x360c, 0x000300c0, 0x00800040, 139 mmFBC_DEBUG_COMP, 0x000000f0, 0x00000070, 140 mmFBC_MISC, 0x00200000, 0x50100000, 141 mmDIG0_HDMI_CONTROL, 0x31000311, 0x00000011, 142 mmMC_SEQ_PMG_PG_HWCNTL, 0x00073ffe, 0x000022a2, 143 mmMC_XPB_P2P_BAR_CFG, 0x000007ff, 0x00000000, 144 mmPA_CL_ENHANCE, 0xf000001f, 0x00000007, 145 mmPA_SC_FORCE_EOV_MAX_CNTS, 0xffffffff, 0x00ffffff, 146 mmPA_SC_LINE_STIPPLE_STATE, 0x0000ff0f, 0x00000000, 147 mmPA_SC_MODE_CNTL_1, 0x07ffffff, 0x4e000000, 148 mmPA_SC_RASTER_CONFIG, 0x3f3f3fff, 0x2a00126a, 149 0x000c, 0xffffffff, 0x0040, 150 0x000d, 0x00000040, 0x00004040, 151 mmSPI_CONFIG_CNTL, 0x07ffffff, 0x03000000, 152 mmSX_DEBUG_1, 0x0000007f, 0x00000020, 153 mmTA_CNTL_AUX, 0x00010000, 0x00010000, 154 mmTCP_ADDR_CONFIG, 0x000003ff, 0x000000f7, 155 mmTCP_CHAN_STEER_HI, 0xffffffff, 0x00000000, 156 mmTCP_CHAN_STEER_LO, 0xffffffff, 0x32761054, 157 mmVGT_GS_VERTEX_REUSE, 0x0000001f, 0x00000010, 158 mmVM_L2_CG, 0x000c0fc0, 0x000c0400, 159 mmVM_PRT_APERTURE0_LOW_ADDR, 0x0fffffff, 0xffffffff, 160 mmVM_PRT_APERTURE1_LOW_ADDR, 0x0fffffff, 0x0fffffff, 161 mmVM_PRT_APERTURE2_LOW_ADDR, 0x0fffffff, 0x0fffffff, 162 mmVM_PRT_APERTURE3_LOW_ADDR, 0x0fffffff, 0x0fffffff, 163 }; 164 165 static const u32 pitcairn_golden_rlc_registers[] = 166 { 167 mmGB_ADDR_CONFIG, 0xffffffff, 0x12011003, 168 mmRLC_LB_PARAMS, 0xffffffff, 0x00601004, 169 0x311f, 0xffffffff, 0x10102020, 170 0x3122, 0xffffffff, 0x01000020, 171 mmRLC_LB_CNTR_MAX, 0xffffffff, 0x00000800, 172 mmRLC_LB_CNTL, 0xffffffff, 0x800000a4, 173 }; 174 175 static const u32 verde_pg_init[] = 176 { 177 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x40000, 178 mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x200010ff, 179 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 180 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 181 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 182 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 183 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 184 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x7007, 185 mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x300010ff, 186 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 187 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 188 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 189 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 190 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 191 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x400000, 192 mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x100010ff, 193 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 194 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 195 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 196 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 197 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 198 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x120200, 199 mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x500010ff, 200 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 201 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 202 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 203 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 204 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 205 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x1e1e16, 206 mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x600010ff, 207 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 208 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 209 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 210 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 211 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 212 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x171f1e, 213 mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x700010ff, 214 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 215 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 216 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 217 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 218 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 219 mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0, 220 mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x9ff, 221 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x0, 222 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x10000800, 223 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xf, 224 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xf, 225 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x4, 226 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1000051e, 227 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xffff, 228 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xffff, 229 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x8, 230 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x80500, 231 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x12, 232 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x9050c, 233 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x1d, 234 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xb052c, 235 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x2a, 236 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1053e, 237 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x2d, 238 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x10546, 239 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x30, 240 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xa054e, 241 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x3c, 242 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1055f, 243 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x3f, 244 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x10567, 245 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x42, 246 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1056f, 247 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x45, 248 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x10572, 249 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x48, 250 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x20575, 251 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x4c, 252 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x190801, 253 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x67, 254 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1082a, 255 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x6a, 256 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1b082d, 257 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x87, 258 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x310851, 259 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xba, 260 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x891, 261 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xbc, 262 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x893, 263 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xbe, 264 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x20895, 265 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xc2, 266 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x20899, 267 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xc6, 268 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x2089d, 269 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xca, 270 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x8a1, 271 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xcc, 272 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x8a3, 273 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xce, 274 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x308a5, 275 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xd3, 276 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x6d08cd, 277 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x142, 278 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x2000095a, 279 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1, 280 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x144, 281 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x301f095b, 282 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x165, 283 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xc094d, 284 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x173, 285 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xf096d, 286 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x184, 287 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x15097f, 288 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x19b, 289 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xc0998, 290 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x1a9, 291 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x409a7, 292 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x1af, 293 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xcdc, 294 mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x1b1, 295 mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x800, 296 mmGMCON_RENG_EXECUTE, 0xffffffff, 0x6c9b2000, 297 mmGMCON_MISC2, 0xfc00, 0x2000, 298 mmGMCON_MISC3, 0xffffffff, 0xfc0, 299 mmMC_PMG_AUTO_CFG, 0x00000100, 0x100, 300 }; 301 302 static const u32 verde_golden_rlc_registers[] = 303 { 304 mmGB_ADDR_CONFIG, 0xffffffff, 0x02010002, 305 mmRLC_LB_PARAMS, 0xffffffff, 0x033f1005, 306 0x311f, 0xffffffff, 0x10808020, 307 0x3122, 0xffffffff, 0x00800008, 308 mmRLC_LB_CNTR_MAX, 0xffffffff, 0x00001000, 309 mmRLC_LB_CNTL, 0xffffffff, 0x80010014, 310 }; 311 312 static const u32 verde_golden_registers[] = 313 { 314 mmAZALIA_SCLK_CONTROL, 0x00000030, 0x00000011, 315 mmCB_HW_CONTROL, 0x00010000, 0x00018208, 316 mmDB_DEBUG, 0xffffffff, 0x00000000, 317 mmDB_DEBUG2, 0xf00fffff, 0x00000400, 318 mmDB_DEBUG3, 0x0002021c, 0x00020200, 319 mmDCI_CLK_CNTL, 0x00000080, 0x00000000, 320 0x340c, 0x000300c0, 0x00800040, 321 0x360c, 0x000300c0, 0x00800040, 322 mmFBC_DEBUG_COMP, 0x000000f0, 0x00000070, 323 mmFBC_MISC, 0x00200000, 0x50100000, 324 mmDIG0_HDMI_CONTROL, 0x31000311, 0x00000011, 325 mmMC_SEQ_PMG_PG_HWCNTL, 0x00073ffe, 0x000022a2, 326 mmMC_XPB_P2P_BAR_CFG, 0x000007ff, 0x00000000, 327 mmPA_CL_ENHANCE, 0xf000001f, 0x00000007, 328 mmPA_SC_FORCE_EOV_MAX_CNTS, 0xffffffff, 0x00ffffff, 329 mmPA_SC_LINE_STIPPLE_STATE, 0x0000ff0f, 0x00000000, 330 mmPA_SC_MODE_CNTL_1, 0x07ffffff, 0x4e000000, 331 mmPA_SC_RASTER_CONFIG, 0x3f3f3fff, 0x0000124a, 332 0x000c, 0xffffffff, 0x0040, 333 0x000d, 0x00000040, 0x00004040, 334 mmSPI_CONFIG_CNTL, 0x07ffffff, 0x03000000, 335 mmSQ_DED_CNT, 0x01ff1f3f, 0x00000000, 336 mmSQ_SEC_CNT, 0x01ff1f3f, 0x00000000, 337 mmSX_DEBUG_1, 0x0000007f, 0x00000020, 338 mmTA_CNTL_AUX, 0x00010000, 0x00010000, 339 mmTCP_ADDR_CONFIG, 0x000003ff, 0x00000003, 340 mmTCP_CHAN_STEER_HI, 0xffffffff, 0x00000000, 341 mmTCP_CHAN_STEER_LO, 0xffffffff, 0x00001032, 342 mmVGT_GS_VERTEX_REUSE, 0x0000001f, 0x00000010, 343 mmVM_L2_CG, 0x000c0fc0, 0x000c0400, 344 mmVM_PRT_APERTURE0_LOW_ADDR, 0x0fffffff, 0xffffffff, 345 mmVM_PRT_APERTURE1_LOW_ADDR, 0x0fffffff, 0x0fffffff, 346 mmVM_PRT_APERTURE2_LOW_ADDR, 0x0fffffff, 0x0fffffff, 347 mmVM_PRT_APERTURE3_LOW_ADDR, 0x0fffffff, 0x0fffffff, 348 }; 349 350 static const u32 oland_golden_registers[] = 351 { 352 mmAZALIA_SCLK_CONTROL, 0x00000030, 0x00000011, 353 mmCB_HW_CONTROL, 0x00010000, 0x00018208, 354 mmDB_DEBUG, 0xffffffff, 0x00000000, 355 mmDB_DEBUG2, 0xf00fffff, 0x00000400, 356 mmDB_DEBUG3, 0x0002021c, 0x00020200, 357 mmDCI_CLK_CNTL, 0x00000080, 0x00000000, 358 0x340c, 0x000300c0, 0x00800040, 359 0x360c, 0x000300c0, 0x00800040, 360 mmFBC_DEBUG_COMP, 0x000000f0, 0x00000070, 361 mmFBC_MISC, 0x00200000, 0x50100000, 362 mmDIG0_HDMI_CONTROL, 0x31000311, 0x00000011, 363 mmMC_SEQ_PMG_PG_HWCNTL, 0x00073ffe, 0x000022a2, 364 mmMC_XPB_P2P_BAR_CFG, 0x000007ff, 0x00000000, 365 mmPA_CL_ENHANCE, 0xf000001f, 0x00000007, 366 mmPA_SC_FORCE_EOV_MAX_CNTS, 0xffffffff, 0x00ffffff, 367 mmPA_SC_LINE_STIPPLE_STATE, 0x0000ff0f, 0x00000000, 368 mmPA_SC_MODE_CNTL_1, 0x07ffffff, 0x4e000000, 369 mmPA_SC_RASTER_CONFIG, 0x3f3f3fff, 0x00000082, 370 0x000c, 0xffffffff, 0x0040, 371 0x000d, 0x00000040, 0x00004040, 372 mmSPI_CONFIG_CNTL, 0x07ffffff, 0x03000000, 373 mmSX_DEBUG_1, 0x0000007f, 0x00000020, 374 mmTA_CNTL_AUX, 0x00010000, 0x00010000, 375 mmTCP_ADDR_CONFIG, 0x000003ff, 0x000000f3, 376 mmTCP_CHAN_STEER_HI, 0xffffffff, 0x00000000, 377 mmTCP_CHAN_STEER_LO, 0xffffffff, 0x00003210, 378 mmVGT_GS_VERTEX_REUSE, 0x0000001f, 0x00000010, 379 mmVM_L2_CG, 0x000c0fc0, 0x000c0400, 380 mmVM_PRT_APERTURE0_LOW_ADDR, 0x0fffffff, 0xffffffff, 381 mmVM_PRT_APERTURE1_LOW_ADDR, 0x0fffffff, 0x0fffffff, 382 mmVM_PRT_APERTURE2_LOW_ADDR, 0x0fffffff, 0x0fffffff, 383 mmVM_PRT_APERTURE3_LOW_ADDR, 0x0fffffff, 0x0fffffff, 384 385 }; 386 387 static const u32 oland_golden_rlc_registers[] = 388 { 389 mmGB_ADDR_CONFIG, 0xffffffff, 0x02010002, 390 mmRLC_LB_PARAMS, 0xffffffff, 0x00601005, 391 0x311f, 0xffffffff, 0x10104040, 392 0x3122, 0xffffffff, 0x0100000a, 393 mmRLC_LB_CNTR_MAX, 0xffffffff, 0x00000800, 394 mmRLC_LB_CNTL, 0xffffffff, 0x800000f4, 395 }; 396 397 static const u32 hainan_golden_registers[] = 398 { 399 0x17bc, 0x00000030, 0x00000011, 400 mmCB_HW_CONTROL, 0x00010000, 0x00018208, 401 mmDB_DEBUG, 0xffffffff, 0x00000000, 402 mmDB_DEBUG2, 0xf00fffff, 0x00000400, 403 mmDB_DEBUG3, 0x0002021c, 0x00020200, 404 0x031e, 0x00000080, 0x00000000, 405 0x3430, 0xff000fff, 0x00000100, 406 0x340c, 0x000300c0, 0x00800040, 407 0x3630, 0xff000fff, 0x00000100, 408 0x360c, 0x000300c0, 0x00800040, 409 0x16ec, 0x000000f0, 0x00000070, 410 0x16f0, 0x00200000, 0x50100000, 411 0x1c0c, 0x31000311, 0x00000011, 412 mmMC_SEQ_PMG_PG_HWCNTL, 0x00073ffe, 0x000022a2, 413 mmMC_XPB_P2P_BAR_CFG, 0x000007ff, 0x00000000, 414 mmPA_CL_ENHANCE, 0xf000001f, 0x00000007, 415 mmPA_SC_FORCE_EOV_MAX_CNTS, 0xffffffff, 0x00ffffff, 416 mmPA_SC_LINE_STIPPLE_STATE, 0x0000ff0f, 0x00000000, 417 mmPA_SC_MODE_CNTL_1, 0x07ffffff, 0x4e000000, 418 mmPA_SC_RASTER_CONFIG, 0x3f3f3fff, 0x00000000, 419 0x000c, 0xffffffff, 0x0040, 420 0x000d, 0x00000040, 0x00004040, 421 mmSPI_CONFIG_CNTL, 0x03e00000, 0x03600000, 422 mmSX_DEBUG_1, 0x0000007f, 0x00000020, 423 mmTA_CNTL_AUX, 0x00010000, 0x00010000, 424 mmTCP_ADDR_CONFIG, 0x000003ff, 0x000000f1, 425 mmTCP_CHAN_STEER_HI, 0xffffffff, 0x00000000, 426 mmTCP_CHAN_STEER_LO, 0xffffffff, 0x00003210, 427 mmVGT_GS_VERTEX_REUSE, 0x0000001f, 0x00000010, 428 mmVM_L2_CG, 0x000c0fc0, 0x000c0400, 429 mmVM_PRT_APERTURE0_LOW_ADDR, 0x0fffffff, 0xffffffff, 430 mmVM_PRT_APERTURE1_LOW_ADDR, 0x0fffffff, 0x0fffffff, 431 mmVM_PRT_APERTURE2_LOW_ADDR, 0x0fffffff, 0x0fffffff, 432 mmVM_PRT_APERTURE3_LOW_ADDR, 0x0fffffff, 0x0fffffff, 433 }; 434 435 static const u32 hainan_golden_registers2[] = 436 { 437 mmGB_ADDR_CONFIG, 0xffffffff, 0x2011003, 438 }; 439 440 static const u32 tahiti_mgcg_cgcg_init[] = 441 { 442 mmRLC_CGTT_MGCG_OVERRIDE, 0xffffffff, 0xfffffffc, 443 mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000, 444 mmCB_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 445 mmCGTT_BCI_CLK_CTRL, 0xffffffff, 0x00000100, 446 mmCGTT_CP_CLK_CTRL, 0xffffffff, 0x00000100, 447 mmCGTT_GDS_CLK_CTRL, 0xffffffff, 0x00000100, 448 mmCGTT_IA_CLK_CTRL, 0xffffffff, 0x06000100, 449 mmCGTT_PA_CLK_CTRL, 0xffffffff, 0x00000100, 450 mmCGTT_PC_CLK_CTRL, 0xffffffff, 0x00000100, 451 mmCGTT_RLC_CLK_CTRL, 0xffffffff, 0x00000100, 452 mmCGTT_SC_CLK_CTRL, 0xffffffff, 0x00000100, 453 mmCGTT_SPI_CLK_CTRL, 0xffffffff, 0x00000100, 454 mmCGTT_SQ_CLK_CTRL, 0xffffffff, 0x00000100, 455 mmCGTT_SQG_CLK_CTRL, 0xffffffff, 0x00000100, 456 mmCGTT_SX_CLK_CTRL0, 0xffffffff, 0x00000100, 457 mmCGTT_SX_CLK_CTRL1, 0xffffffff, 0x00000100, 458 mmCGTT_SX_CLK_CTRL2, 0xffffffff, 0x00000100, 459 mmCGTT_SX_CLK_CTRL3, 0xffffffff, 0x00000100, 460 mmCGTT_TCI_CLK_CTRL, 0xffffffff, 0x00000100, 461 mmCGTT_TCP_CLK_CTRL, 0xffffffff, 0x00000100, 462 mmCGTT_VGT_CLK_CTRL, 0xffffffff, 0x06000100, 463 mmDB_CGTT_CLK_CTRL_0, 0xffffffff, 0x00000100, 464 mmTA_CGTT_CTRL, 0xffffffff, 0x00000100, 465 mmTCA_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 466 mmTCC_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 467 mmTD_CGTT_CTRL, 0xffffffff, 0x00000100, 468 mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000, 469 0x2458, 0xffffffff, 0x00010000, 470 0x2459, 0xffffffff, 0x00030002, 471 0x245a, 0xffffffff, 0x00040007, 472 0x245b, 0xffffffff, 0x00060005, 473 0x245c, 0xffffffff, 0x00090008, 474 0x245d, 0xffffffff, 0x00020001, 475 0x245e, 0xffffffff, 0x00040003, 476 0x245f, 0xffffffff, 0x00000007, 477 0x2460, 0xffffffff, 0x00060005, 478 0x2461, 0xffffffff, 0x00090008, 479 0x2462, 0xffffffff, 0x00030002, 480 0x2463, 0xffffffff, 0x00050004, 481 0x2464, 0xffffffff, 0x00000008, 482 0x2465, 0xffffffff, 0x00070006, 483 0x2466, 0xffffffff, 0x000a0009, 484 0x2467, 0xffffffff, 0x00040003, 485 0x2468, 0xffffffff, 0x00060005, 486 0x2469, 0xffffffff, 0x00000009, 487 0x246a, 0xffffffff, 0x00080007, 488 0x246b, 0xffffffff, 0x000b000a, 489 0x246c, 0xffffffff, 0x00050004, 490 0x246d, 0xffffffff, 0x00070006, 491 0x246e, 0xffffffff, 0x0008000b, 492 0x246f, 0xffffffff, 0x000a0009, 493 0x2470, 0xffffffff, 0x000d000c, 494 0x2471, 0xffffffff, 0x00060005, 495 0x2472, 0xffffffff, 0x00080007, 496 0x2473, 0xffffffff, 0x0000000b, 497 0x2474, 0xffffffff, 0x000a0009, 498 0x2475, 0xffffffff, 0x000d000c, 499 0x2476, 0xffffffff, 0x00070006, 500 0x2477, 0xffffffff, 0x00090008, 501 0x2478, 0xffffffff, 0x0000000c, 502 0x2479, 0xffffffff, 0x000b000a, 503 0x247a, 0xffffffff, 0x000e000d, 504 0x247b, 0xffffffff, 0x00080007, 505 0x247c, 0xffffffff, 0x000a0009, 506 0x247d, 0xffffffff, 0x0000000d, 507 0x247e, 0xffffffff, 0x000c000b, 508 0x247f, 0xffffffff, 0x000f000e, 509 0x2480, 0xffffffff, 0x00090008, 510 0x2481, 0xffffffff, 0x000b000a, 511 0x2482, 0xffffffff, 0x000c000f, 512 0x2483, 0xffffffff, 0x000e000d, 513 0x2484, 0xffffffff, 0x00110010, 514 0x2485, 0xffffffff, 0x000a0009, 515 0x2486, 0xffffffff, 0x000c000b, 516 0x2487, 0xffffffff, 0x0000000f, 517 0x2488, 0xffffffff, 0x000e000d, 518 0x2489, 0xffffffff, 0x00110010, 519 0x248a, 0xffffffff, 0x000b000a, 520 0x248b, 0xffffffff, 0x000d000c, 521 0x248c, 0xffffffff, 0x00000010, 522 0x248d, 0xffffffff, 0x000f000e, 523 0x248e, 0xffffffff, 0x00120011, 524 0x248f, 0xffffffff, 0x000c000b, 525 0x2490, 0xffffffff, 0x000e000d, 526 0x2491, 0xffffffff, 0x00000011, 527 0x2492, 0xffffffff, 0x0010000f, 528 0x2493, 0xffffffff, 0x00130012, 529 0x2494, 0xffffffff, 0x000d000c, 530 0x2495, 0xffffffff, 0x000f000e, 531 0x2496, 0xffffffff, 0x00100013, 532 0x2497, 0xffffffff, 0x00120011, 533 0x2498, 0xffffffff, 0x00150014, 534 0x2499, 0xffffffff, 0x000e000d, 535 0x249a, 0xffffffff, 0x0010000f, 536 0x249b, 0xffffffff, 0x00000013, 537 0x249c, 0xffffffff, 0x00120011, 538 0x249d, 0xffffffff, 0x00150014, 539 0x249e, 0xffffffff, 0x000f000e, 540 0x249f, 0xffffffff, 0x00110010, 541 0x24a0, 0xffffffff, 0x00000014, 542 0x24a1, 0xffffffff, 0x00130012, 543 0x24a2, 0xffffffff, 0x00160015, 544 0x24a3, 0xffffffff, 0x0010000f, 545 0x24a4, 0xffffffff, 0x00120011, 546 0x24a5, 0xffffffff, 0x00000015, 547 0x24a6, 0xffffffff, 0x00140013, 548 0x24a7, 0xffffffff, 0x00170016, 549 mmCGTS_SM_CTRL_REG, 0xffffffff, 0x96940200, 550 mmCP_RB_WPTR_POLL_CNTL, 0xffffffff, 0x00900100, 551 mmRLC_GCPM_GENERAL_3, 0xffffffff, 0x00000080, 552 mmRLC_CGCG_CGLS_CTRL, 0xffffffff, 0x0020003f, 553 0x000c, 0xffffffff, 0x0000001c, 554 0x000d, 0x000f0000, 0x000f0000, 555 0x0583, 0xffffffff, 0x00000100, 556 mmXDMA_CLOCK_GATING_CNTL, 0xffffffff, 0x00000100, 557 mmXDMA_MEM_POWER_CNTL, 0x00000101, 0x00000000, 558 mmMC_MEM_POWER_LS, 0xffffffff, 0x00000104, 559 mmMC_CITF_MISC_WR_CG, 0x000c0000, 0x000c0000, 560 mmMC_CITF_MISC_RD_CG, 0x000c0000, 0x000c0000, 561 mmCGTT_DRM_CLK_CTRL0, 0xff000fff, 0x00000100, 562 0x157a, 0x00000001, 0x00000001, 563 mmHDP_MEM_POWER_LS, 0x00000001, 0x00000001, 564 mmHDP_XDP_CGTT_BLK_CTRL, 0xc0000fff, 0x00000104, 565 mmCP_MEM_SLP_CNTL, 0x00000001, 0x00000001, 566 0x3430, 0xfffffff0, 0x00000100, 567 0x3630, 0xfffffff0, 0x00000100, 568 }; 569 static const u32 pitcairn_mgcg_cgcg_init[] = 570 { 571 mmRLC_CGTT_MGCG_OVERRIDE, 0xffffffff, 0xfffffffc, 572 mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000, 573 mmCB_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 574 mmCGTT_BCI_CLK_CTRL, 0xffffffff, 0x00000100, 575 mmCGTT_CP_CLK_CTRL, 0xffffffff, 0x00000100, 576 mmCGTT_GDS_CLK_CTRL, 0xffffffff, 0x00000100, 577 mmCGTT_IA_CLK_CTRL, 0xffffffff, 0x06000100, 578 mmCGTT_PA_CLK_CTRL, 0xffffffff, 0x00000100, 579 mmCGTT_PC_CLK_CTRL, 0xffffffff, 0x00000100, 580 mmCGTT_RLC_CLK_CTRL, 0xffffffff, 0x00000100, 581 mmCGTT_SC_CLK_CTRL, 0xffffffff, 0x00000100, 582 mmCGTT_SPI_CLK_CTRL, 0xffffffff, 0x00000100, 583 mmCGTT_SQ_CLK_CTRL, 0xffffffff, 0x00000100, 584 mmCGTT_SQG_CLK_CTRL, 0xffffffff, 0x00000100, 585 mmCGTT_SX_CLK_CTRL0, 0xffffffff, 0x00000100, 586 mmCGTT_SX_CLK_CTRL1, 0xffffffff, 0x00000100, 587 mmCGTT_SX_CLK_CTRL2, 0xffffffff, 0x00000100, 588 mmCGTT_SX_CLK_CTRL3, 0xffffffff, 0x00000100, 589 mmCGTT_TCI_CLK_CTRL, 0xffffffff, 0x00000100, 590 mmCGTT_TCP_CLK_CTRL, 0xffffffff, 0x00000100, 591 mmCGTT_VGT_CLK_CTRL, 0xffffffff, 0x06000100, 592 mmDB_CGTT_CLK_CTRL_0, 0xffffffff, 0x00000100, 593 mmTA_CGTT_CTRL, 0xffffffff, 0x00000100, 594 mmTCA_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 595 mmTCC_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 596 mmTD_CGTT_CTRL, 0xffffffff, 0x00000100, 597 mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000, 598 0x2458, 0xffffffff, 0x00010000, 599 0x2459, 0xffffffff, 0x00030002, 600 0x245a, 0xffffffff, 0x00040007, 601 0x245b, 0xffffffff, 0x00060005, 602 0x245c, 0xffffffff, 0x00090008, 603 0x245d, 0xffffffff, 0x00020001, 604 0x245e, 0xffffffff, 0x00040003, 605 0x245f, 0xffffffff, 0x00000007, 606 0x2460, 0xffffffff, 0x00060005, 607 0x2461, 0xffffffff, 0x00090008, 608 0x2462, 0xffffffff, 0x00030002, 609 0x2463, 0xffffffff, 0x00050004, 610 0x2464, 0xffffffff, 0x00000008, 611 0x2465, 0xffffffff, 0x00070006, 612 0x2466, 0xffffffff, 0x000a0009, 613 0x2467, 0xffffffff, 0x00040003, 614 0x2468, 0xffffffff, 0x00060005, 615 0x2469, 0xffffffff, 0x00000009, 616 0x246a, 0xffffffff, 0x00080007, 617 0x246b, 0xffffffff, 0x000b000a, 618 0x246c, 0xffffffff, 0x00050004, 619 0x246d, 0xffffffff, 0x00070006, 620 0x246e, 0xffffffff, 0x0008000b, 621 0x246f, 0xffffffff, 0x000a0009, 622 0x2470, 0xffffffff, 0x000d000c, 623 0x2480, 0xffffffff, 0x00090008, 624 0x2481, 0xffffffff, 0x000b000a, 625 0x2482, 0xffffffff, 0x000c000f, 626 0x2483, 0xffffffff, 0x000e000d, 627 0x2484, 0xffffffff, 0x00110010, 628 0x2485, 0xffffffff, 0x000a0009, 629 0x2486, 0xffffffff, 0x000c000b, 630 0x2487, 0xffffffff, 0x0000000f, 631 0x2488, 0xffffffff, 0x000e000d, 632 0x2489, 0xffffffff, 0x00110010, 633 0x248a, 0xffffffff, 0x000b000a, 634 0x248b, 0xffffffff, 0x000d000c, 635 0x248c, 0xffffffff, 0x00000010, 636 0x248d, 0xffffffff, 0x000f000e, 637 0x248e, 0xffffffff, 0x00120011, 638 0x248f, 0xffffffff, 0x000c000b, 639 0x2490, 0xffffffff, 0x000e000d, 640 0x2491, 0xffffffff, 0x00000011, 641 0x2492, 0xffffffff, 0x0010000f, 642 0x2493, 0xffffffff, 0x00130012, 643 0x2494, 0xffffffff, 0x000d000c, 644 0x2495, 0xffffffff, 0x000f000e, 645 0x2496, 0xffffffff, 0x00100013, 646 0x2497, 0xffffffff, 0x00120011, 647 0x2498, 0xffffffff, 0x00150014, 648 mmCGTS_SM_CTRL_REG, 0xffffffff, 0x96940200, 649 mmCP_RB_WPTR_POLL_CNTL, 0xffffffff, 0x00900100, 650 mmRLC_GCPM_GENERAL_3, 0xffffffff, 0x00000080, 651 mmRLC_CGCG_CGLS_CTRL, 0xffffffff, 0x0020003f, 652 0x000c, 0xffffffff, 0x0000001c, 653 0x000d, 0x000f0000, 0x000f0000, 654 0x0583, 0xffffffff, 0x00000100, 655 mmXDMA_CLOCK_GATING_CNTL, 0xffffffff, 0x00000100, 656 mmXDMA_MEM_POWER_CNTL, 0x00000101, 0x00000000, 657 mmMC_MEM_POWER_LS, 0xffffffff, 0x00000104, 658 mmCGTT_DRM_CLK_CTRL0, 0xff000fff, 0x00000100, 659 0x157a, 0x00000001, 0x00000001, 660 mmHDP_MEM_POWER_LS, 0x00000001, 0x00000001, 661 mmHDP_XDP_CGTT_BLK_CTRL, 0xc0000fff, 0x00000104, 662 mmCP_MEM_SLP_CNTL, 0x00000001, 0x00000001, 663 0x3430, 0xfffffff0, 0x00000100, 664 0x3630, 0xfffffff0, 0x00000100, 665 }; 666 667 static const u32 verde_mgcg_cgcg_init[] = 668 { 669 mmRLC_CGTT_MGCG_OVERRIDE, 0xffffffff, 0xfffffffc, 670 mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000, 671 mmCB_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 672 mmCGTT_BCI_CLK_CTRL, 0xffffffff, 0x00000100, 673 mmCGTT_CP_CLK_CTRL, 0xffffffff, 0x00000100, 674 mmCGTT_GDS_CLK_CTRL, 0xffffffff, 0x00000100, 675 mmCGTT_IA_CLK_CTRL, 0xffffffff, 0x06000100, 676 mmCGTT_PA_CLK_CTRL, 0xffffffff, 0x00000100, 677 mmCGTT_PC_CLK_CTRL, 0xffffffff, 0x00000100, 678 mmCGTT_RLC_CLK_CTRL, 0xffffffff, 0x00000100, 679 mmCGTT_SC_CLK_CTRL, 0xffffffff, 0x00000100, 680 mmCGTT_SPI_CLK_CTRL, 0xffffffff, 0x00000100, 681 mmCGTT_SQ_CLK_CTRL, 0xffffffff, 0x00000100, 682 mmCGTT_SQG_CLK_CTRL, 0xffffffff, 0x00000100, 683 mmCGTT_SX_CLK_CTRL0, 0xffffffff, 0x00000100, 684 mmCGTT_SX_CLK_CTRL1, 0xffffffff, 0x00000100, 685 mmCGTT_SX_CLK_CTRL2, 0xffffffff, 0x00000100, 686 mmCGTT_SX_CLK_CTRL3, 0xffffffff, 0x00000100, 687 mmCGTT_TCI_CLK_CTRL, 0xffffffff, 0x00000100, 688 mmCGTT_TCP_CLK_CTRL, 0xffffffff, 0x00000100, 689 mmCGTT_VGT_CLK_CTRL, 0xffffffff, 0x06000100, 690 mmDB_CGTT_CLK_CTRL_0, 0xffffffff, 0x00000100, 691 mmTA_CGTT_CTRL, 0xffffffff, 0x00000100, 692 mmTCA_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 693 mmTCC_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 694 mmTD_CGTT_CTRL, 0xffffffff, 0x00000100, 695 mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000, 696 0x2458, 0xffffffff, 0x00010000, 697 0x2459, 0xffffffff, 0x00030002, 698 0x245a, 0xffffffff, 0x00040007, 699 0x245b, 0xffffffff, 0x00060005, 700 0x245c, 0xffffffff, 0x00090008, 701 0x245d, 0xffffffff, 0x00020001, 702 0x245e, 0xffffffff, 0x00040003, 703 0x245f, 0xffffffff, 0x00000007, 704 0x2460, 0xffffffff, 0x00060005, 705 0x2461, 0xffffffff, 0x00090008, 706 0x2462, 0xffffffff, 0x00030002, 707 0x2463, 0xffffffff, 0x00050004, 708 0x2464, 0xffffffff, 0x00000008, 709 0x2465, 0xffffffff, 0x00070006, 710 0x2466, 0xffffffff, 0x000a0009, 711 0x2467, 0xffffffff, 0x00040003, 712 0x2468, 0xffffffff, 0x00060005, 713 0x2469, 0xffffffff, 0x00000009, 714 0x246a, 0xffffffff, 0x00080007, 715 0x246b, 0xffffffff, 0x000b000a, 716 0x246c, 0xffffffff, 0x00050004, 717 0x246d, 0xffffffff, 0x00070006, 718 0x246e, 0xffffffff, 0x0008000b, 719 0x246f, 0xffffffff, 0x000a0009, 720 0x2470, 0xffffffff, 0x000d000c, 721 0x2480, 0xffffffff, 0x00090008, 722 0x2481, 0xffffffff, 0x000b000a, 723 0x2482, 0xffffffff, 0x000c000f, 724 0x2483, 0xffffffff, 0x000e000d, 725 0x2484, 0xffffffff, 0x00110010, 726 0x2485, 0xffffffff, 0x000a0009, 727 0x2486, 0xffffffff, 0x000c000b, 728 0x2487, 0xffffffff, 0x0000000f, 729 0x2488, 0xffffffff, 0x000e000d, 730 0x2489, 0xffffffff, 0x00110010, 731 0x248a, 0xffffffff, 0x000b000a, 732 0x248b, 0xffffffff, 0x000d000c, 733 0x248c, 0xffffffff, 0x00000010, 734 0x248d, 0xffffffff, 0x000f000e, 735 0x248e, 0xffffffff, 0x00120011, 736 0x248f, 0xffffffff, 0x000c000b, 737 0x2490, 0xffffffff, 0x000e000d, 738 0x2491, 0xffffffff, 0x00000011, 739 0x2492, 0xffffffff, 0x0010000f, 740 0x2493, 0xffffffff, 0x00130012, 741 0x2494, 0xffffffff, 0x000d000c, 742 0x2495, 0xffffffff, 0x000f000e, 743 0x2496, 0xffffffff, 0x00100013, 744 0x2497, 0xffffffff, 0x00120011, 745 0x2498, 0xffffffff, 0x00150014, 746 mmCGTS_SM_CTRL_REG, 0xffffffff, 0x96940200, 747 mmCP_RB_WPTR_POLL_CNTL, 0xffffffff, 0x00900100, 748 mmRLC_GCPM_GENERAL_3, 0xffffffff, 0x00000080, 749 mmRLC_CGCG_CGLS_CTRL, 0xffffffff, 0x0020003f, 750 0x000c, 0xffffffff, 0x0000001c, 751 0x000d, 0x000f0000, 0x000f0000, 752 0x0583, 0xffffffff, 0x00000100, 753 mmXDMA_CLOCK_GATING_CNTL, 0xffffffff, 0x00000100, 754 mmXDMA_MEM_POWER_CNTL, 0x00000101, 0x00000000, 755 mmMC_MEM_POWER_LS, 0xffffffff, 0x00000104, 756 mmMC_CITF_MISC_WR_CG, 0x000c0000, 0x000c0000, 757 mmMC_CITF_MISC_RD_CG, 0x000c0000, 0x000c0000, 758 mmCGTT_DRM_CLK_CTRL0, 0xff000fff, 0x00000100, 759 0x157a, 0x00000001, 0x00000001, 760 mmHDP_MEM_POWER_LS, 0x00000001, 0x00000001, 761 mmHDP_XDP_CGTT_BLK_CTRL, 0xc0000fff, 0x00000104, 762 mmCP_MEM_SLP_CNTL, 0x00000001, 0x00000001, 763 0x3430, 0xfffffff0, 0x00000100, 764 0x3630, 0xfffffff0, 0x00000100, 765 }; 766 767 static const u32 oland_mgcg_cgcg_init[] = 768 { 769 mmRLC_CGTT_MGCG_OVERRIDE, 0xffffffff, 0xfffffffc, 770 mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000, 771 mmCB_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 772 mmCGTT_BCI_CLK_CTRL, 0xffffffff, 0x00000100, 773 mmCGTT_CP_CLK_CTRL, 0xffffffff, 0x00000100, 774 mmCGTT_GDS_CLK_CTRL, 0xffffffff, 0x00000100, 775 mmCGTT_IA_CLK_CTRL, 0xffffffff, 0x06000100, 776 mmCGTT_PA_CLK_CTRL, 0xffffffff, 0x00000100, 777 mmCGTT_PC_CLK_CTRL, 0xffffffff, 0x00000100, 778 mmCGTT_RLC_CLK_CTRL, 0xffffffff, 0x00000100, 779 mmCGTT_SC_CLK_CTRL, 0xffffffff, 0x00000100, 780 mmCGTT_SPI_CLK_CTRL, 0xffffffff, 0x00000100, 781 mmCGTT_SQ_CLK_CTRL, 0xffffffff, 0x00000100, 782 mmCGTT_SQG_CLK_CTRL, 0xffffffff, 0x00000100, 783 mmCGTT_SX_CLK_CTRL0, 0xffffffff, 0x00000100, 784 mmCGTT_SX_CLK_CTRL1, 0xffffffff, 0x00000100, 785 mmCGTT_SX_CLK_CTRL2, 0xffffffff, 0x00000100, 786 mmCGTT_SX_CLK_CTRL3, 0xffffffff, 0x00000100, 787 mmCGTT_TCI_CLK_CTRL, 0xffffffff, 0x00000100, 788 mmCGTT_TCP_CLK_CTRL, 0xffffffff, 0x00000100, 789 mmCGTT_VGT_CLK_CTRL, 0xffffffff, 0x06000100, 790 mmDB_CGTT_CLK_CTRL_0, 0xffffffff, 0x00000100, 791 mmTA_CGTT_CTRL, 0xffffffff, 0x00000100, 792 mmTCA_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 793 mmTCC_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 794 mmTD_CGTT_CTRL, 0xffffffff, 0x00000100, 795 mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000, 796 0x2458, 0xffffffff, 0x00010000, 797 0x2459, 0xffffffff, 0x00030002, 798 0x245a, 0xffffffff, 0x00040007, 799 0x245b, 0xffffffff, 0x00060005, 800 0x245c, 0xffffffff, 0x00090008, 801 0x245d, 0xffffffff, 0x00020001, 802 0x245e, 0xffffffff, 0x00040003, 803 0x245f, 0xffffffff, 0x00000007, 804 0x2460, 0xffffffff, 0x00060005, 805 0x2461, 0xffffffff, 0x00090008, 806 0x2462, 0xffffffff, 0x00030002, 807 0x2463, 0xffffffff, 0x00050004, 808 0x2464, 0xffffffff, 0x00000008, 809 0x2465, 0xffffffff, 0x00070006, 810 0x2466, 0xffffffff, 0x000a0009, 811 0x2467, 0xffffffff, 0x00040003, 812 0x2468, 0xffffffff, 0x00060005, 813 0x2469, 0xffffffff, 0x00000009, 814 0x246a, 0xffffffff, 0x00080007, 815 0x246b, 0xffffffff, 0x000b000a, 816 0x246c, 0xffffffff, 0x00050004, 817 0x246d, 0xffffffff, 0x00070006, 818 0x246e, 0xffffffff, 0x0008000b, 819 0x246f, 0xffffffff, 0x000a0009, 820 0x2470, 0xffffffff, 0x000d000c, 821 0x2471, 0xffffffff, 0x00060005, 822 0x2472, 0xffffffff, 0x00080007, 823 0x2473, 0xffffffff, 0x0000000b, 824 0x2474, 0xffffffff, 0x000a0009, 825 0x2475, 0xffffffff, 0x000d000c, 826 mmCGTS_SM_CTRL_REG, 0xffffffff, 0x96940200, 827 mmCP_RB_WPTR_POLL_CNTL, 0xffffffff, 0x00900100, 828 mmRLC_GCPM_GENERAL_3, 0xffffffff, 0x00000080, 829 mmRLC_CGCG_CGLS_CTRL, 0xffffffff, 0x0020003f, 830 0x000c, 0xffffffff, 0x0000001c, 831 0x000d, 0x000f0000, 0x000f0000, 832 0x0583, 0xffffffff, 0x00000100, 833 mmXDMA_CLOCK_GATING_CNTL, 0xffffffff, 0x00000100, 834 mmXDMA_MEM_POWER_CNTL, 0x00000101, 0x00000000, 835 mmMC_MEM_POWER_LS, 0xffffffff, 0x00000104, 836 mmMC_CITF_MISC_WR_CG, 0x000c0000, 0x000c0000, 837 mmMC_CITF_MISC_RD_CG, 0x000c0000, 0x000c0000, 838 mmCGTT_DRM_CLK_CTRL0, 0xff000fff, 0x00000100, 839 0x157a, 0x00000001, 0x00000001, 840 mmHDP_MEM_POWER_LS, 0x00000001, 0x00000001, 841 mmHDP_XDP_CGTT_BLK_CTRL, 0xc0000fff, 0x00000104, 842 mmCP_MEM_SLP_CNTL, 0x00000001, 0x00000001, 843 0x3430, 0xfffffff0, 0x00000100, 844 0x3630, 0xfffffff0, 0x00000100, 845 }; 846 847 static const u32 hainan_mgcg_cgcg_init[] = 848 { 849 mmRLC_CGTT_MGCG_OVERRIDE, 0xffffffff, 0xfffffffc, 850 mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000, 851 mmCB_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 852 mmCGTT_BCI_CLK_CTRL, 0xffffffff, 0x00000100, 853 mmCGTT_CP_CLK_CTRL, 0xffffffff, 0x00000100, 854 mmCGTT_GDS_CLK_CTRL, 0xffffffff, 0x00000100, 855 mmCGTT_IA_CLK_CTRL, 0xffffffff, 0x06000100, 856 mmCGTT_PA_CLK_CTRL, 0xffffffff, 0x00000100, 857 mmCGTT_PC_CLK_CTRL, 0xffffffff, 0x00000100, 858 mmCGTT_RLC_CLK_CTRL, 0xffffffff, 0x00000100, 859 mmCGTT_SC_CLK_CTRL, 0xffffffff, 0x00000100, 860 mmCGTT_SPI_CLK_CTRL, 0xffffffff, 0x00000100, 861 mmCGTT_SQ_CLK_CTRL, 0xffffffff, 0x00000100, 862 mmCGTT_SQG_CLK_CTRL, 0xffffffff, 0x00000100, 863 mmCGTT_SX_CLK_CTRL0, 0xffffffff, 0x00000100, 864 mmCGTT_SX_CLK_CTRL1, 0xffffffff, 0x00000100, 865 mmCGTT_SX_CLK_CTRL2, 0xffffffff, 0x00000100, 866 mmCGTT_SX_CLK_CTRL3, 0xffffffff, 0x00000100, 867 mmCGTT_TCI_CLK_CTRL, 0xffffffff, 0x00000100, 868 mmCGTT_TCP_CLK_CTRL, 0xffffffff, 0x00000100, 869 mmCGTT_VGT_CLK_CTRL, 0xffffffff, 0x06000100, 870 mmDB_CGTT_CLK_CTRL_0, 0xffffffff, 0x00000100, 871 mmTA_CGTT_CTRL, 0xffffffff, 0x00000100, 872 mmTCA_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 873 mmTCC_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100, 874 mmTD_CGTT_CTRL, 0xffffffff, 0x00000100, 875 mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000, 876 0x2458, 0xffffffff, 0x00010000, 877 0x2459, 0xffffffff, 0x00030002, 878 0x245a, 0xffffffff, 0x00040007, 879 0x245b, 0xffffffff, 0x00060005, 880 0x245c, 0xffffffff, 0x00090008, 881 0x245d, 0xffffffff, 0x00020001, 882 0x245e, 0xffffffff, 0x00040003, 883 0x245f, 0xffffffff, 0x00000007, 884 0x2460, 0xffffffff, 0x00060005, 885 0x2461, 0xffffffff, 0x00090008, 886 0x2462, 0xffffffff, 0x00030002, 887 0x2463, 0xffffffff, 0x00050004, 888 0x2464, 0xffffffff, 0x00000008, 889 0x2465, 0xffffffff, 0x00070006, 890 0x2466, 0xffffffff, 0x000a0009, 891 0x2467, 0xffffffff, 0x00040003, 892 0x2468, 0xffffffff, 0x00060005, 893 0x2469, 0xffffffff, 0x00000009, 894 0x246a, 0xffffffff, 0x00080007, 895 0x246b, 0xffffffff, 0x000b000a, 896 0x246c, 0xffffffff, 0x00050004, 897 0x246d, 0xffffffff, 0x00070006, 898 0x246e, 0xffffffff, 0x0008000b, 899 0x246f, 0xffffffff, 0x000a0009, 900 0x2470, 0xffffffff, 0x000d000c, 901 0x2471, 0xffffffff, 0x00060005, 902 0x2472, 0xffffffff, 0x00080007, 903 0x2473, 0xffffffff, 0x0000000b, 904 0x2474, 0xffffffff, 0x000a0009, 905 0x2475, 0xffffffff, 0x000d000c, 906 mmCGTS_SM_CTRL_REG, 0xffffffff, 0x96940200, 907 mmCP_RB_WPTR_POLL_CNTL, 0xffffffff, 0x00900100, 908 mmRLC_GCPM_GENERAL_3, 0xffffffff, 0x00000080, 909 mmRLC_CGCG_CGLS_CTRL, 0xffffffff, 0x0020003f, 910 0x000c, 0xffffffff, 0x0000001c, 911 0x000d, 0x000f0000, 0x000f0000, 912 0x0583, 0xffffffff, 0x00000100, 913 0x0409, 0xffffffff, 0x00000100, 914 mmMC_MEM_POWER_LS, 0xffffffff, 0x00000104, 915 mmMC_CITF_MISC_WR_CG, 0x000c0000, 0x000c0000, 916 mmMC_CITF_MISC_RD_CG, 0x000c0000, 0x000c0000, 917 mmHDP_MEM_POWER_LS, 0x00000001, 0x00000001, 918 mmHDP_XDP_CGTT_BLK_CTRL, 0xc0000fff, 0x00000104, 919 mmCP_MEM_SLP_CNTL, 0x00000001, 0x00000001, 920 0x3430, 0xfffffff0, 0x00000100, 921 0x3630, 0xfffffff0, 0x00000100, 922 }; 923 924 /* XXX: update when we support VCE */ 925 #if 0 926 /* tahiti, pitcairn, verde */ 927 static const struct amdgpu_video_codec_info tahiti_video_codecs_encode_array[] = 928 { 929 { 930 .codec_type = AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4_AVC, 931 .max_width = 2048, 932 .max_height = 1152, 933 .max_pixels_per_frame = 2048 * 1152, 934 .max_level = 0, 935 }, 936 }; 937 938 static const struct amdgpu_video_codecs tahiti_video_codecs_encode = 939 { 940 .codec_count = ARRAY_SIZE(tahiti_video_codecs_encode_array), 941 .codec_array = tahiti_video_codecs_encode_array, 942 }; 943 #else 944 static const struct amdgpu_video_codecs tahiti_video_codecs_encode = 945 { 946 .codec_count = 0, 947 .codec_array = NULL, 948 }; 949 #endif 950 /* oland and hainan don't support encode */ 951 static const struct amdgpu_video_codecs hainan_video_codecs_encode = 952 { 953 .codec_count = 0, 954 .codec_array = NULL, 955 }; 956 957 /* tahiti, pitcairn, verde, oland */ 958 static const struct amdgpu_video_codec_info tahiti_video_codecs_decode_array[] = 959 { 960 { 961 .codec_type = AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG2, 962 .max_width = 2048, 963 .max_height = 1152, 964 .max_pixels_per_frame = 2048 * 1152, 965 .max_level = 3, 966 }, 967 { 968 .codec_type = AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4, 969 .max_width = 2048, 970 .max_height = 1152, 971 .max_pixels_per_frame = 2048 * 1152, 972 .max_level = 5, 973 }, 974 { 975 .codec_type = AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4_AVC, 976 .max_width = 2048, 977 .max_height = 1152, 978 .max_pixels_per_frame = 2048 * 1152, 979 .max_level = 41, 980 }, 981 { 982 .codec_type = AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_VC1, 983 .max_width = 2048, 984 .max_height = 1152, 985 .max_pixels_per_frame = 2048 * 1152, 986 .max_level = 4, 987 }, 988 }; 989 990 static const struct amdgpu_video_codecs tahiti_video_codecs_decode = 991 { 992 .codec_count = ARRAY_SIZE(tahiti_video_codecs_decode_array), 993 .codec_array = tahiti_video_codecs_decode_array, 994 }; 995 996 /* hainan doesn't support decode */ 997 static const struct amdgpu_video_codecs hainan_video_codecs_decode = 998 { 999 .codec_count = 0, 1000 .codec_array = NULL, 1001 }; 1002 1003 static int si_query_video_codecs(struct amdgpu_device *adev, bool encode, 1004 const struct amdgpu_video_codecs **codecs) 1005 { 1006 switch (adev->asic_type) { 1007 case CHIP_VERDE: 1008 case CHIP_TAHITI: 1009 case CHIP_PITCAIRN: 1010 if (encode) 1011 *codecs = &tahiti_video_codecs_encode; 1012 else 1013 *codecs = &tahiti_video_codecs_decode; 1014 return 0; 1015 case CHIP_OLAND: 1016 if (encode) 1017 *codecs = &hainan_video_codecs_encode; 1018 else 1019 *codecs = &tahiti_video_codecs_decode; 1020 return 0; 1021 case CHIP_HAINAN: 1022 if (encode) 1023 *codecs = &hainan_video_codecs_encode; 1024 else 1025 *codecs = &hainan_video_codecs_decode; 1026 return 0; 1027 default: 1028 return -EINVAL; 1029 } 1030 } 1031 1032 static u32 si_pcie_rreg(struct amdgpu_device *adev, u32 reg) 1033 { 1034 unsigned long flags; 1035 u32 r; 1036 1037 spin_lock_irqsave(&adev->pcie_idx_lock, flags); 1038 WREG32(AMDGPU_PCIE_INDEX, reg); 1039 (void)RREG32(AMDGPU_PCIE_INDEX); 1040 r = RREG32(AMDGPU_PCIE_DATA); 1041 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); 1042 return r; 1043 } 1044 1045 static void si_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v) 1046 { 1047 unsigned long flags; 1048 1049 spin_lock_irqsave(&adev->pcie_idx_lock, flags); 1050 WREG32(AMDGPU_PCIE_INDEX, reg); 1051 (void)RREG32(AMDGPU_PCIE_INDEX); 1052 WREG32(AMDGPU_PCIE_DATA, v); 1053 (void)RREG32(AMDGPU_PCIE_DATA); 1054 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); 1055 } 1056 1057 static u32 si_pciep_rreg(struct amdgpu_device *adev, u32 reg) 1058 { 1059 unsigned long flags; 1060 u32 r; 1061 1062 spin_lock_irqsave(&adev->pcie_idx_lock, flags); 1063 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff)); 1064 (void)RREG32(PCIE_PORT_INDEX); 1065 r = RREG32(PCIE_PORT_DATA); 1066 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); 1067 return r; 1068 } 1069 1070 static void si_pciep_wreg(struct amdgpu_device *adev, u32 reg, u32 v) 1071 { 1072 unsigned long flags; 1073 1074 spin_lock_irqsave(&adev->pcie_idx_lock, flags); 1075 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff)); 1076 (void)RREG32(PCIE_PORT_INDEX); 1077 WREG32(PCIE_PORT_DATA, (v)); 1078 (void)RREG32(PCIE_PORT_DATA); 1079 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); 1080 } 1081 1082 static u32 si_smc_rreg(struct amdgpu_device *adev, u32 reg) 1083 { 1084 unsigned long flags; 1085 u32 r; 1086 1087 spin_lock_irqsave(&adev->smc_idx_lock, flags); 1088 WREG32(mmSMC_IND_INDEX_0, (reg)); 1089 r = RREG32(mmSMC_IND_DATA_0); 1090 spin_unlock_irqrestore(&adev->smc_idx_lock, flags); 1091 return r; 1092 } 1093 1094 static void si_smc_wreg(struct amdgpu_device *adev, u32 reg, u32 v) 1095 { 1096 unsigned long flags; 1097 1098 spin_lock_irqsave(&adev->smc_idx_lock, flags); 1099 WREG32(mmSMC_IND_INDEX_0, (reg)); 1100 WREG32(mmSMC_IND_DATA_0, (v)); 1101 spin_unlock_irqrestore(&adev->smc_idx_lock, flags); 1102 } 1103 1104 static u32 si_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg) 1105 { 1106 unsigned long flags; 1107 u32 r; 1108 1109 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); 1110 WREG32(mmUVD_CTX_INDEX, ((reg) & 0x1ff)); 1111 r = RREG32(mmUVD_CTX_DATA); 1112 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); 1113 return r; 1114 } 1115 1116 static void si_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v) 1117 { 1118 unsigned long flags; 1119 1120 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); 1121 WREG32(mmUVD_CTX_INDEX, ((reg) & 0x1ff)); 1122 WREG32(mmUVD_CTX_DATA, (v)); 1123 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); 1124 } 1125 1126 static struct amdgpu_allowed_register_entry si_allowed_read_registers[] = { 1127 {mmGRBM_STATUS}, 1128 {mmGRBM_STATUS2}, 1129 {mmGRBM_STATUS_SE0}, 1130 {mmGRBM_STATUS_SE1}, 1131 {mmSRBM_STATUS}, 1132 {mmSRBM_STATUS2}, 1133 {mmDMA_STATUS_REG + DMA0_REGISTER_OFFSET}, 1134 {mmDMA_STATUS_REG + DMA1_REGISTER_OFFSET}, 1135 {mmCP_STAT}, 1136 {mmCP_STALLED_STAT1}, 1137 {mmCP_STALLED_STAT2}, 1138 {mmCP_STALLED_STAT3}, 1139 {mmGB_ADDR_CONFIG}, 1140 {mmMC_ARB_RAMCFG}, 1141 {mmGB_TILE_MODE0}, 1142 {mmGB_TILE_MODE1}, 1143 {mmGB_TILE_MODE2}, 1144 {mmGB_TILE_MODE3}, 1145 {mmGB_TILE_MODE4}, 1146 {mmGB_TILE_MODE5}, 1147 {mmGB_TILE_MODE6}, 1148 {mmGB_TILE_MODE7}, 1149 {mmGB_TILE_MODE8}, 1150 {mmGB_TILE_MODE9}, 1151 {mmGB_TILE_MODE10}, 1152 {mmGB_TILE_MODE11}, 1153 {mmGB_TILE_MODE12}, 1154 {mmGB_TILE_MODE13}, 1155 {mmGB_TILE_MODE14}, 1156 {mmGB_TILE_MODE15}, 1157 {mmGB_TILE_MODE16}, 1158 {mmGB_TILE_MODE17}, 1159 {mmGB_TILE_MODE18}, 1160 {mmGB_TILE_MODE19}, 1161 {mmGB_TILE_MODE20}, 1162 {mmGB_TILE_MODE21}, 1163 {mmGB_TILE_MODE22}, 1164 {mmGB_TILE_MODE23}, 1165 {mmGB_TILE_MODE24}, 1166 {mmGB_TILE_MODE25}, 1167 {mmGB_TILE_MODE26}, 1168 {mmGB_TILE_MODE27}, 1169 {mmGB_TILE_MODE28}, 1170 {mmGB_TILE_MODE29}, 1171 {mmGB_TILE_MODE30}, 1172 {mmGB_TILE_MODE31}, 1173 {mmCC_RB_BACKEND_DISABLE, true}, 1174 {mmGC_USER_RB_BACKEND_DISABLE, true}, 1175 {mmPA_SC_RASTER_CONFIG, true}, 1176 }; 1177 1178 static uint32_t si_get_register_value(struct amdgpu_device *adev, 1179 bool indexed, u32 se_num, 1180 u32 sh_num, u32 reg_offset) 1181 { 1182 if (indexed) { 1183 uint32_t val; 1184 unsigned se_idx = (se_num == 0xffffffff) ? 0 : se_num; 1185 unsigned sh_idx = (sh_num == 0xffffffff) ? 0 : sh_num; 1186 1187 switch (reg_offset) { 1188 case mmCC_RB_BACKEND_DISABLE: 1189 return adev->gfx.config.rb_config[se_idx][sh_idx].rb_backend_disable; 1190 case mmGC_USER_RB_BACKEND_DISABLE: 1191 return adev->gfx.config.rb_config[se_idx][sh_idx].user_rb_backend_disable; 1192 case mmPA_SC_RASTER_CONFIG: 1193 return adev->gfx.config.rb_config[se_idx][sh_idx].raster_config; 1194 } 1195 1196 mutex_lock(&adev->grbm_idx_mutex); 1197 if (se_num != 0xffffffff || sh_num != 0xffffffff) 1198 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff, 0); 1199 1200 val = RREG32(reg_offset); 1201 1202 if (se_num != 0xffffffff || sh_num != 0xffffffff) 1203 amdgpu_gfx_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff, 0); 1204 mutex_unlock(&adev->grbm_idx_mutex); 1205 return val; 1206 } else { 1207 unsigned idx; 1208 1209 switch (reg_offset) { 1210 case mmGB_ADDR_CONFIG: 1211 return adev->gfx.config.gb_addr_config; 1212 case mmMC_ARB_RAMCFG: 1213 return adev->gfx.config.mc_arb_ramcfg; 1214 case mmGB_TILE_MODE0: 1215 case mmGB_TILE_MODE1: 1216 case mmGB_TILE_MODE2: 1217 case mmGB_TILE_MODE3: 1218 case mmGB_TILE_MODE4: 1219 case mmGB_TILE_MODE5: 1220 case mmGB_TILE_MODE6: 1221 case mmGB_TILE_MODE7: 1222 case mmGB_TILE_MODE8: 1223 case mmGB_TILE_MODE9: 1224 case mmGB_TILE_MODE10: 1225 case mmGB_TILE_MODE11: 1226 case mmGB_TILE_MODE12: 1227 case mmGB_TILE_MODE13: 1228 case mmGB_TILE_MODE14: 1229 case mmGB_TILE_MODE15: 1230 case mmGB_TILE_MODE16: 1231 case mmGB_TILE_MODE17: 1232 case mmGB_TILE_MODE18: 1233 case mmGB_TILE_MODE19: 1234 case mmGB_TILE_MODE20: 1235 case mmGB_TILE_MODE21: 1236 case mmGB_TILE_MODE22: 1237 case mmGB_TILE_MODE23: 1238 case mmGB_TILE_MODE24: 1239 case mmGB_TILE_MODE25: 1240 case mmGB_TILE_MODE26: 1241 case mmGB_TILE_MODE27: 1242 case mmGB_TILE_MODE28: 1243 case mmGB_TILE_MODE29: 1244 case mmGB_TILE_MODE30: 1245 case mmGB_TILE_MODE31: 1246 idx = (reg_offset - mmGB_TILE_MODE0); 1247 return adev->gfx.config.tile_mode_array[idx]; 1248 default: 1249 return RREG32(reg_offset); 1250 } 1251 } 1252 } 1253 static int si_read_register(struct amdgpu_device *adev, u32 se_num, 1254 u32 sh_num, u32 reg_offset, u32 *value) 1255 { 1256 uint32_t i; 1257 1258 *value = 0; 1259 for (i = 0; i < ARRAY_SIZE(si_allowed_read_registers); i++) { 1260 bool indexed = si_allowed_read_registers[i].grbm_indexed; 1261 1262 if (reg_offset != si_allowed_read_registers[i].reg_offset) 1263 continue; 1264 1265 *value = si_get_register_value(adev, indexed, se_num, sh_num, 1266 reg_offset); 1267 return 0; 1268 } 1269 return -EINVAL; 1270 } 1271 1272 static bool si_read_disabled_bios(struct amdgpu_device *adev) 1273 { 1274 u32 bus_cntl; 1275 u32 d1vga_control = 0; 1276 u32 d2vga_control = 0; 1277 u32 vga_render_control = 0; 1278 u32 rom_cntl; 1279 bool r; 1280 1281 bus_cntl = RREG32(mmBUS_CNTL); 1282 if (adev->mode_info.num_crtc) { 1283 d1vga_control = RREG32(mmD1VGA_CONTROL); 1284 d2vga_control = RREG32(mmD2VGA_CONTROL); 1285 vga_render_control = RREG32(mmVGA_RENDER_CONTROL); 1286 } 1287 rom_cntl = RREG32(R600_ROM_CNTL); 1288 1289 /* enable the rom */ 1290 WREG32(mmBUS_CNTL, (bus_cntl & ~BUS_CNTL__BIOS_ROM_DIS_MASK)); 1291 if (adev->mode_info.num_crtc) { 1292 /* Disable VGA mode */ 1293 WREG32(mmD1VGA_CONTROL, 1294 (d1vga_control & ~(D1VGA_CONTROL__D1VGA_MODE_ENABLE_MASK | 1295 D1VGA_CONTROL__D1VGA_TIMING_SELECT_MASK))); 1296 WREG32(mmD2VGA_CONTROL, 1297 (d2vga_control & ~(D1VGA_CONTROL__D1VGA_MODE_ENABLE_MASK | 1298 D1VGA_CONTROL__D1VGA_TIMING_SELECT_MASK))); 1299 WREG32(mmVGA_RENDER_CONTROL, 1300 (vga_render_control & ~VGA_RENDER_CONTROL__VGA_VSTATUS_CNTL_MASK)); 1301 } 1302 WREG32(R600_ROM_CNTL, rom_cntl | R600_SCK_OVERWRITE); 1303 1304 r = amdgpu_read_bios(adev); 1305 1306 /* restore regs */ 1307 WREG32(mmBUS_CNTL, bus_cntl); 1308 if (adev->mode_info.num_crtc) { 1309 WREG32(mmD1VGA_CONTROL, d1vga_control); 1310 WREG32(mmD2VGA_CONTROL, d2vga_control); 1311 WREG32(mmVGA_RENDER_CONTROL, vga_render_control); 1312 } 1313 WREG32(R600_ROM_CNTL, rom_cntl); 1314 return r; 1315 } 1316 1317 #define mmROM_INDEX 0x2A 1318 #define mmROM_DATA 0x2B 1319 1320 static bool si_read_bios_from_rom(struct amdgpu_device *adev, 1321 u8 *bios, u32 length_bytes) 1322 { 1323 u32 *dw_ptr; 1324 u32 i, length_dw; 1325 1326 if (bios == NULL) 1327 return false; 1328 if (length_bytes == 0) 1329 return false; 1330 /* APU vbios image is part of sbios image */ 1331 if (adev->flags & AMD_IS_APU) 1332 return false; 1333 1334 dw_ptr = (u32 *)bios; 1335 length_dw = ALIGN(length_bytes, 4) / 4; 1336 /* set rom index to 0 */ 1337 WREG32(mmROM_INDEX, 0); 1338 for (i = 0; i < length_dw; i++) 1339 dw_ptr[i] = RREG32(mmROM_DATA); 1340 1341 return true; 1342 } 1343 1344 static void si_set_clk_bypass_mode(struct amdgpu_device *adev) 1345 { 1346 u32 tmp, i; 1347 1348 tmp = RREG32(mmCG_SPLL_FUNC_CNTL); 1349 tmp |= CG_SPLL_FUNC_CNTL__SPLL_BYPASS_EN_MASK; 1350 WREG32(mmCG_SPLL_FUNC_CNTL, tmp); 1351 1352 tmp = RREG32(mmCG_SPLL_FUNC_CNTL_2); 1353 tmp |= CG_SPLL_FUNC_CNTL_2__SPLL_CTLREQ_CHG_MASK; 1354 WREG32(mmCG_SPLL_FUNC_CNTL_2, tmp); 1355 1356 for (i = 0; i < adev->usec_timeout; i++) { 1357 if (RREG32(mmCG_SPLL_STATUS) & CG_SPLL_STATUS__SPLL_CHG_STATUS_MASK) 1358 break; 1359 udelay(1); 1360 } 1361 1362 tmp = RREG32(mmCG_SPLL_FUNC_CNTL_2); 1363 tmp &= ~(CG_SPLL_FUNC_CNTL_2__SPLL_CTLREQ_CHG_MASK | 1364 CG_SPLL_FUNC_CNTL_2__SCLK_MUX_UPDATE_MASK); 1365 WREG32(mmCG_SPLL_FUNC_CNTL_2, tmp); 1366 1367 tmp = RREG32(MPLL_CNTL_MODE); 1368 tmp &= ~MPLL_MCLK_SEL; 1369 WREG32(MPLL_CNTL_MODE, tmp); 1370 } 1371 1372 static void si_spll_powerdown(struct amdgpu_device *adev) 1373 { 1374 u32 tmp; 1375 1376 tmp = RREG32(mmSPLL_CNTL_MODE); 1377 tmp |= SPLL_CNTL_MODE__SPLL_SW_DIR_CONTROL_MASK; 1378 WREG32(mmSPLL_CNTL_MODE, tmp); 1379 1380 tmp = RREG32(mmCG_SPLL_FUNC_CNTL); 1381 tmp |= CG_SPLL_FUNC_CNTL__SPLL_RESET_MASK; 1382 WREG32(mmCG_SPLL_FUNC_CNTL, tmp); 1383 1384 tmp = RREG32(mmCG_SPLL_FUNC_CNTL); 1385 tmp |= CG_SPLL_FUNC_CNTL__SPLL_SLEEP_MASK; 1386 WREG32(mmCG_SPLL_FUNC_CNTL, tmp); 1387 1388 tmp = RREG32(mmSPLL_CNTL_MODE); 1389 tmp &= ~SPLL_CNTL_MODE__SPLL_SW_DIR_CONTROL_MASK; 1390 WREG32(mmSPLL_CNTL_MODE, tmp); 1391 } 1392 1393 static int si_gpu_pci_config_reset(struct amdgpu_device *adev) 1394 { 1395 u32 i; 1396 int r = -EINVAL; 1397 1398 amdgpu_atombios_scratch_regs_engine_hung(adev, true); 1399 1400 /* set mclk/sclk to bypass */ 1401 si_set_clk_bypass_mode(adev); 1402 /* powerdown spll */ 1403 si_spll_powerdown(adev); 1404 /* disable BM */ 1405 pci_clear_master(adev->pdev); 1406 /* reset */ 1407 amdgpu_device_pci_config_reset(adev); 1408 1409 udelay(100); 1410 1411 /* wait for asic to come out of reset */ 1412 for (i = 0; i < adev->usec_timeout; i++) { 1413 if (RREG32(mmCONFIG_MEMSIZE) != 0xffffffff) { 1414 /* enable BM */ 1415 pci_set_master(adev->pdev); 1416 adev->has_hw_reset = true; 1417 r = 0; 1418 break; 1419 } 1420 udelay(1); 1421 } 1422 amdgpu_atombios_scratch_regs_engine_hung(adev, false); 1423 1424 return r; 1425 } 1426 1427 static int si_asic_supports_baco(struct amdgpu_device *adev) 1428 { 1429 return 0; 1430 } 1431 1432 static enum amd_reset_method 1433 si_asic_reset_method(struct amdgpu_device *adev) 1434 { 1435 if (amdgpu_reset_method == AMD_RESET_METHOD_PCI) 1436 return amdgpu_reset_method; 1437 else if (amdgpu_reset_method != AMD_RESET_METHOD_LEGACY && 1438 amdgpu_reset_method != -1) 1439 dev_warn(adev->dev, "Specified reset method:%d isn't supported, using AUTO instead.\n", 1440 amdgpu_reset_method); 1441 1442 return AMD_RESET_METHOD_LEGACY; 1443 } 1444 1445 static int si_asic_reset(struct amdgpu_device *adev) 1446 { 1447 int r; 1448 1449 switch (si_asic_reset_method(adev)) { 1450 case AMD_RESET_METHOD_PCI: 1451 dev_info(adev->dev, "PCI reset\n"); 1452 r = amdgpu_device_pci_reset(adev); 1453 break; 1454 default: 1455 dev_info(adev->dev, "PCI CONFIG reset\n"); 1456 r = si_gpu_pci_config_reset(adev); 1457 break; 1458 } 1459 1460 return r; 1461 } 1462 1463 static u32 si_get_config_memsize(struct amdgpu_device *adev) 1464 { 1465 return RREG32(mmCONFIG_MEMSIZE); 1466 } 1467 1468 static void si_vga_set_state(struct amdgpu_device *adev, bool state) 1469 { 1470 uint32_t temp; 1471 1472 temp = RREG32(mmCONFIG_CNTL); 1473 if (!state) { 1474 temp &= ~(1<<0); 1475 temp |= (1<<1); 1476 } else { 1477 temp &= ~(1<<1); 1478 } 1479 WREG32(mmCONFIG_CNTL, temp); 1480 } 1481 1482 static u32 si_get_xclk(struct amdgpu_device *adev) 1483 { 1484 u32 reference_clock = adev->clock.spll.reference_freq; 1485 u32 tmp; 1486 1487 tmp = RREG32(mmCG_CLKPIN_CNTL_2); 1488 if (tmp & CG_CLKPIN_CNTL_2__MUX_TCLK_TO_XCLK_MASK) 1489 return TCLK; 1490 1491 tmp = RREG32(mmCG_CLKPIN_CNTL); 1492 if (tmp & CG_CLKPIN_CNTL__XTALIN_DIVIDE_MASK) 1493 return reference_clock / 4; 1494 1495 return reference_clock; 1496 } 1497 1498 static void si_flush_hdp(struct amdgpu_device *adev, struct amdgpu_ring *ring) 1499 { 1500 if (!ring || !ring->funcs->emit_wreg) { 1501 WREG32(mmHDP_MEM_COHERENCY_FLUSH_CNTL, 1); 1502 RREG32(mmHDP_MEM_COHERENCY_FLUSH_CNTL); 1503 } else { 1504 amdgpu_ring_emit_wreg(ring, mmHDP_MEM_COHERENCY_FLUSH_CNTL, 1); 1505 } 1506 } 1507 1508 static void si_invalidate_hdp(struct amdgpu_device *adev, 1509 struct amdgpu_ring *ring) 1510 { 1511 if (!ring || !ring->funcs->emit_wreg) { 1512 WREG32(mmHDP_DEBUG0, 1); 1513 RREG32(mmHDP_DEBUG0); 1514 } else { 1515 amdgpu_ring_emit_wreg(ring, mmHDP_DEBUG0, 1); 1516 } 1517 } 1518 1519 static bool si_need_full_reset(struct amdgpu_device *adev) 1520 { 1521 /* change this when we support soft reset */ 1522 return true; 1523 } 1524 1525 static bool si_need_reset_on_init(struct amdgpu_device *adev) 1526 { 1527 return false; 1528 } 1529 1530 static int si_get_pcie_lanes(struct amdgpu_device *adev) 1531 { 1532 u32 link_width_cntl; 1533 1534 if (adev->flags & AMD_IS_APU) 1535 return 0; 1536 1537 link_width_cntl = RREG32_PCIE_PORT(ixPCIE_LC_LINK_WIDTH_CNTL); 1538 1539 switch ((link_width_cntl & PCIE_LC_LINK_WIDTH_CNTL__LC_LINK_WIDTH_RD_MASK) >> PCIE_LC_LINK_WIDTH_CNTL__LC_LINK_WIDTH_RD__SHIFT) { 1540 case LC_LINK_WIDTH_X1: 1541 return 1; 1542 case LC_LINK_WIDTH_X2: 1543 return 2; 1544 case LC_LINK_WIDTH_X4: 1545 return 4; 1546 case LC_LINK_WIDTH_X8: 1547 return 8; 1548 case LC_LINK_WIDTH_X0: 1549 case LC_LINK_WIDTH_X16: 1550 default: 1551 return 16; 1552 } 1553 } 1554 1555 static void si_set_pcie_lanes(struct amdgpu_device *adev, int lanes) 1556 { 1557 u32 link_width_cntl, mask; 1558 1559 if (adev->flags & AMD_IS_APU) 1560 return; 1561 1562 switch (lanes) { 1563 case 0: 1564 mask = LC_LINK_WIDTH_X0; 1565 break; 1566 case 1: 1567 mask = LC_LINK_WIDTH_X1; 1568 break; 1569 case 2: 1570 mask = LC_LINK_WIDTH_X2; 1571 break; 1572 case 4: 1573 mask = LC_LINK_WIDTH_X4; 1574 break; 1575 case 8: 1576 mask = LC_LINK_WIDTH_X8; 1577 break; 1578 case 16: 1579 mask = LC_LINK_WIDTH_X16; 1580 break; 1581 default: 1582 DRM_ERROR("invalid pcie lane request: %d\n", lanes); 1583 return; 1584 } 1585 1586 link_width_cntl = RREG32_PCIE_PORT(ixPCIE_LC_LINK_WIDTH_CNTL); 1587 link_width_cntl &= ~PCIE_LC_LINK_WIDTH_CNTL__LC_LINK_WIDTH_MASK; 1588 link_width_cntl |= mask << PCIE_LC_LINK_WIDTH_CNTL__LC_LINK_WIDTH__SHIFT; 1589 link_width_cntl |= (PCIE_LC_LINK_WIDTH_CNTL__LC_RECONFIG_NOW_MASK | 1590 PCIE_LC_LINK_WIDTH_CNTL__LC_RECONFIG_ARC_MISSING_ESCAPE_MASK); 1591 1592 WREG32_PCIE_PORT(ixPCIE_LC_LINK_WIDTH_CNTL, link_width_cntl); 1593 } 1594 1595 static void si_get_pcie_usage(struct amdgpu_device *adev, uint64_t *count0, 1596 uint64_t *count1) 1597 { 1598 uint32_t perfctr = 0; 1599 uint64_t cnt0_of, cnt1_of; 1600 int tmp; 1601 1602 /* This reports 0 on APUs, so return to avoid writing/reading registers 1603 * that may or may not be different from their GPU counterparts 1604 */ 1605 if (adev->flags & AMD_IS_APU) 1606 return; 1607 1608 /* Set the 2 events that we wish to watch, defined above */ 1609 /* Reg 40 is # received msgs, Reg 104 is # of posted requests sent */ 1610 perfctr = REG_SET_FIELD(perfctr, PCIE_PERF_CNTL_TXCLK, EVENT0_SEL, 40); 1611 perfctr = REG_SET_FIELD(perfctr, PCIE_PERF_CNTL_TXCLK, EVENT1_SEL, 104); 1612 1613 /* Write to enable desired perf counters */ 1614 WREG32_PCIE(ixPCIE_PERF_CNTL_TXCLK, perfctr); 1615 /* Zero out and enable the perf counters 1616 * Write 0x5: 1617 * Bit 0 = Start all counters(1) 1618 * Bit 2 = Global counter reset enable(1) 1619 */ 1620 WREG32_PCIE(ixPCIE_PERF_COUNT_CNTL, 0x00000005); 1621 1622 msleep(1000); 1623 1624 /* Load the shadow and disable the perf counters 1625 * Write 0x2: 1626 * Bit 0 = Stop counters(0) 1627 * Bit 1 = Load the shadow counters(1) 1628 */ 1629 WREG32_PCIE(ixPCIE_PERF_COUNT_CNTL, 0x00000002); 1630 1631 /* Read register values to get any >32bit overflow */ 1632 tmp = RREG32_PCIE(ixPCIE_PERF_CNTL_TXCLK); 1633 cnt0_of = REG_GET_FIELD(tmp, PCIE_PERF_CNTL_TXCLK, COUNTER0_UPPER); 1634 cnt1_of = REG_GET_FIELD(tmp, PCIE_PERF_CNTL_TXCLK, COUNTER1_UPPER); 1635 1636 /* Get the values and add the overflow */ 1637 *count0 = RREG32_PCIE(ixPCIE_PERF_COUNT0_TXCLK) | (cnt0_of << 32); 1638 *count1 = RREG32_PCIE(ixPCIE_PERF_COUNT1_TXCLK) | (cnt1_of << 32); 1639 } 1640 1641 static uint64_t si_get_pcie_replay_count(struct amdgpu_device *adev) 1642 { 1643 uint64_t nak_r, nak_g; 1644 1645 /* Get the number of NAKs received and generated */ 1646 nak_r = RREG32_PCIE(ixPCIE_RX_NUM_NAK); 1647 nak_g = RREG32_PCIE(ixPCIE_RX_NUM_NAK_GENERATED); 1648 1649 /* Add the total number of NAKs, i.e the number of replays */ 1650 return (nak_r + nak_g); 1651 } 1652 1653 static int si_uvd_send_upll_ctlreq(struct amdgpu_device *adev, 1654 unsigned cg_upll_func_cntl) 1655 { 1656 unsigned i; 1657 1658 /* Make sure UPLL_CTLREQ is deasserted */ 1659 WREG32_P(cg_upll_func_cntl, 0, ~UPLL_CTLREQ_MASK); 1660 1661 mdelay(10); 1662 1663 /* Assert UPLL_CTLREQ */ 1664 WREG32_P(cg_upll_func_cntl, UPLL_CTLREQ_MASK, ~UPLL_CTLREQ_MASK); 1665 1666 /* Wait for CTLACK and CTLACK2 to get asserted */ 1667 for (i = 0; i < SI_MAX_CTLACKS_ASSERTION_WAIT; ++i) { 1668 uint32_t mask = UPLL_CTLACK_MASK | UPLL_CTLACK2_MASK; 1669 1670 if ((RREG32(cg_upll_func_cntl) & mask) == mask) 1671 break; 1672 mdelay(10); 1673 } 1674 1675 /* Deassert UPLL_CTLREQ */ 1676 WREG32_P(cg_upll_func_cntl, 0, ~UPLL_CTLREQ_MASK); 1677 1678 if (i == SI_MAX_CTLACKS_ASSERTION_WAIT) { 1679 DRM_ERROR("Timeout setting UVD clocks!\n"); 1680 return -ETIMEDOUT; 1681 } 1682 1683 return 0; 1684 } 1685 1686 static unsigned si_uvd_calc_upll_post_div(unsigned vco_freq, 1687 unsigned target_freq, 1688 unsigned pd_min, 1689 unsigned pd_even) 1690 { 1691 unsigned post_div = vco_freq / target_freq; 1692 1693 /* Adjust to post divider minimum value */ 1694 if (post_div < pd_min) 1695 post_div = pd_min; 1696 1697 /* We alway need a frequency less than or equal the target */ 1698 if ((vco_freq / post_div) > target_freq) 1699 post_div += 1; 1700 1701 /* Post dividers above a certain value must be even */ 1702 if (post_div > pd_even && post_div % 2) 1703 post_div += 1; 1704 1705 return post_div; 1706 } 1707 1708 /** 1709 * si_calc_upll_dividers - calc UPLL clock dividers 1710 * 1711 * @adev: amdgpu_device pointer 1712 * @vclk: wanted VCLK 1713 * @dclk: wanted DCLK 1714 * @vco_min: minimum VCO frequency 1715 * @vco_max: maximum VCO frequency 1716 * @fb_factor: factor to multiply vco freq with 1717 * @fb_mask: limit and bitmask for feedback divider 1718 * @pd_min: post divider minimum 1719 * @pd_max: post divider maximum 1720 * @pd_even: post divider must be even above this value 1721 * @optimal_fb_div: resulting feedback divider 1722 * @optimal_vclk_div: resulting vclk post divider 1723 * @optimal_dclk_div: resulting dclk post divider 1724 * 1725 * Calculate dividers for UVDs UPLL (except APUs). 1726 * Returns zero on success; -EINVAL on error. 1727 */ 1728 static int si_calc_upll_dividers(struct amdgpu_device *adev, 1729 unsigned vclk, unsigned dclk, 1730 unsigned vco_min, unsigned vco_max, 1731 unsigned fb_factor, unsigned fb_mask, 1732 unsigned pd_min, unsigned pd_max, 1733 unsigned pd_even, 1734 unsigned *optimal_fb_div, 1735 unsigned *optimal_vclk_div, 1736 unsigned *optimal_dclk_div) 1737 { 1738 unsigned vco_freq, ref_freq = adev->clock.spll.reference_freq; 1739 1740 /* Start off with something large */ 1741 unsigned optimal_score = ~0; 1742 1743 /* Loop through vco from low to high */ 1744 vco_min = max(max(vco_min, vclk), dclk); 1745 for (vco_freq = vco_min; vco_freq <= vco_max; vco_freq += 100) { 1746 uint64_t fb_div = (uint64_t)vco_freq * fb_factor; 1747 unsigned vclk_div, dclk_div, score; 1748 1749 do_div(fb_div, ref_freq); 1750 1751 /* fb div out of range ? */ 1752 if (fb_div > fb_mask) 1753 break; /* It can oly get worse */ 1754 1755 fb_div &= fb_mask; 1756 1757 /* Calc vclk divider with current vco freq */ 1758 vclk_div = si_uvd_calc_upll_post_div(vco_freq, vclk, 1759 pd_min, pd_even); 1760 if (vclk_div > pd_max) 1761 break; /* vco is too big, it has to stop */ 1762 1763 /* Calc dclk divider with current vco freq */ 1764 dclk_div = si_uvd_calc_upll_post_div(vco_freq, dclk, 1765 pd_min, pd_even); 1766 if (dclk_div > pd_max) 1767 break; /* vco is too big, it has to stop */ 1768 1769 /* Calc score with current vco freq */ 1770 score = vclk - (vco_freq / vclk_div) + dclk - (vco_freq / dclk_div); 1771 1772 /* Determine if this vco setting is better than current optimal settings */ 1773 if (score < optimal_score) { 1774 *optimal_fb_div = fb_div; 1775 *optimal_vclk_div = vclk_div; 1776 *optimal_dclk_div = dclk_div; 1777 optimal_score = score; 1778 if (optimal_score == 0) 1779 break; /* It can't get better than this */ 1780 } 1781 } 1782 1783 /* Did we found a valid setup ? */ 1784 if (optimal_score == ~0) 1785 return -EINVAL; 1786 1787 return 0; 1788 } 1789 1790 static int si_set_uvd_clocks(struct amdgpu_device *adev, u32 vclk, u32 dclk) 1791 { 1792 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0; 1793 int r; 1794 1795 /* Bypass vclk and dclk with bclk */ 1796 WREG32_P(CG_UPLL_FUNC_CNTL_2, 1797 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1), 1798 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK)); 1799 1800 /* Put PLL in bypass mode */ 1801 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK); 1802 1803 if (!vclk || !dclk) { 1804 /* Keep the Bypass mode */ 1805 return 0; 1806 } 1807 1808 r = si_calc_upll_dividers(adev, vclk, dclk, 125000, 250000, 1809 16384, 0x03FFFFFF, 0, 128, 5, 1810 &fb_div, &vclk_div, &dclk_div); 1811 if (r) 1812 return r; 1813 1814 /* Set RESET_ANTI_MUX to 0 */ 1815 WREG32_P(CG_UPLL_FUNC_CNTL_5, 0, ~RESET_ANTI_MUX_MASK); 1816 1817 /* Set VCO_MODE to 1 */ 1818 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK); 1819 1820 /* Disable sleep mode */ 1821 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK); 1822 1823 /* Deassert UPLL_RESET */ 1824 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK); 1825 1826 mdelay(1); 1827 1828 r = si_uvd_send_upll_ctlreq(adev, CG_UPLL_FUNC_CNTL); 1829 if (r) 1830 return r; 1831 1832 /* Assert UPLL_RESET again */ 1833 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK); 1834 1835 /* Disable spread spectrum. */ 1836 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK); 1837 1838 /* Set feedback divider */ 1839 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK); 1840 1841 /* Set ref divider to 0 */ 1842 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK); 1843 1844 if (fb_div < 307200) 1845 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9); 1846 else 1847 WREG32_P(CG_UPLL_FUNC_CNTL_4, 1848 UPLL_SPARE_ISPARE9, 1849 ~UPLL_SPARE_ISPARE9); 1850 1851 /* Set PDIV_A and PDIV_B */ 1852 WREG32_P(CG_UPLL_FUNC_CNTL_2, 1853 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div), 1854 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK)); 1855 1856 /* Give the PLL some time to settle */ 1857 mdelay(15); 1858 1859 /* Deassert PLL_RESET */ 1860 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK); 1861 1862 mdelay(15); 1863 1864 /* Switch from bypass mode to normal mode */ 1865 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK); 1866 1867 r = si_uvd_send_upll_ctlreq(adev, CG_UPLL_FUNC_CNTL); 1868 if (r) 1869 return r; 1870 1871 /* Switch VCLK and DCLK selection */ 1872 WREG32_P(CG_UPLL_FUNC_CNTL_2, 1873 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2), 1874 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK)); 1875 1876 mdelay(100); 1877 1878 return 0; 1879 } 1880 1881 static int si_vce_send_vcepll_ctlreq(struct amdgpu_device *adev) 1882 { 1883 unsigned i; 1884 1885 /* Make sure VCEPLL_CTLREQ is deasserted */ 1886 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~UPLL_CTLREQ_MASK); 1887 1888 mdelay(10); 1889 1890 /* Assert UPLL_CTLREQ */ 1891 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, UPLL_CTLREQ_MASK, ~UPLL_CTLREQ_MASK); 1892 1893 /* Wait for CTLACK and CTLACK2 to get asserted */ 1894 for (i = 0; i < SI_MAX_CTLACKS_ASSERTION_WAIT; ++i) { 1895 uint32_t mask = UPLL_CTLACK_MASK | UPLL_CTLACK2_MASK; 1896 1897 if ((RREG32_SMC(CG_VCEPLL_FUNC_CNTL) & mask) == mask) 1898 break; 1899 mdelay(10); 1900 } 1901 1902 /* Deassert UPLL_CTLREQ */ 1903 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~UPLL_CTLREQ_MASK); 1904 1905 if (i == SI_MAX_CTLACKS_ASSERTION_WAIT) { 1906 DRM_ERROR("Timeout setting VCE clocks!\n"); 1907 return -ETIMEDOUT; 1908 } 1909 1910 return 0; 1911 } 1912 1913 static int si_set_vce_clocks(struct amdgpu_device *adev, u32 evclk, u32 ecclk) 1914 { 1915 unsigned fb_div = 0, evclk_div = 0, ecclk_div = 0; 1916 int r; 1917 1918 /* Bypass evclk and ecclk with bclk */ 1919 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL_2, 1920 EVCLK_SRC_SEL(1) | ECCLK_SRC_SEL(1), 1921 ~(EVCLK_SRC_SEL_MASK | ECCLK_SRC_SEL_MASK)); 1922 1923 /* Put PLL in bypass mode */ 1924 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, VCEPLL_BYPASS_EN_MASK, 1925 ~VCEPLL_BYPASS_EN_MASK); 1926 1927 if (!evclk || !ecclk) { 1928 /* Keep the Bypass mode, put PLL to sleep */ 1929 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, VCEPLL_SLEEP_MASK, 1930 ~VCEPLL_SLEEP_MASK); 1931 return 0; 1932 } 1933 1934 r = si_calc_upll_dividers(adev, evclk, ecclk, 125000, 250000, 1935 16384, 0x03FFFFFF, 0, 128, 5, 1936 &fb_div, &evclk_div, &ecclk_div); 1937 if (r) 1938 return r; 1939 1940 /* Set RESET_ANTI_MUX to 0 */ 1941 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL_5, 0, ~RESET_ANTI_MUX_MASK); 1942 1943 /* Set VCO_MODE to 1 */ 1944 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, VCEPLL_VCO_MODE_MASK, 1945 ~VCEPLL_VCO_MODE_MASK); 1946 1947 /* Toggle VCEPLL_SLEEP to 1 then back to 0 */ 1948 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, VCEPLL_SLEEP_MASK, 1949 ~VCEPLL_SLEEP_MASK); 1950 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~VCEPLL_SLEEP_MASK); 1951 1952 /* Deassert VCEPLL_RESET */ 1953 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~VCEPLL_RESET_MASK); 1954 1955 mdelay(1); 1956 1957 r = si_vce_send_vcepll_ctlreq(adev); 1958 if (r) 1959 return r; 1960 1961 /* Assert VCEPLL_RESET again */ 1962 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, VCEPLL_RESET_MASK, ~VCEPLL_RESET_MASK); 1963 1964 /* Disable spread spectrum. */ 1965 WREG32_SMC_P(CG_VCEPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK); 1966 1967 /* Set feedback divider */ 1968 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL_3, 1969 VCEPLL_FB_DIV(fb_div), 1970 ~VCEPLL_FB_DIV_MASK); 1971 1972 /* Set ref divider to 0 */ 1973 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~VCEPLL_REF_DIV_MASK); 1974 1975 /* Set PDIV_A and PDIV_B */ 1976 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL_2, 1977 VCEPLL_PDIV_A(evclk_div) | VCEPLL_PDIV_B(ecclk_div), 1978 ~(VCEPLL_PDIV_A_MASK | VCEPLL_PDIV_B_MASK)); 1979 1980 /* Give the PLL some time to settle */ 1981 mdelay(15); 1982 1983 /* Deassert PLL_RESET */ 1984 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~VCEPLL_RESET_MASK); 1985 1986 mdelay(15); 1987 1988 /* Switch from bypass mode to normal mode */ 1989 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~VCEPLL_BYPASS_EN_MASK); 1990 1991 r = si_vce_send_vcepll_ctlreq(adev); 1992 if (r) 1993 return r; 1994 1995 /* Switch VCLK and DCLK selection */ 1996 WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL_2, 1997 EVCLK_SRC_SEL(16) | ECCLK_SRC_SEL(16), 1998 ~(EVCLK_SRC_SEL_MASK | ECCLK_SRC_SEL_MASK)); 1999 2000 mdelay(100); 2001 2002 return 0; 2003 } 2004 2005 static void si_pre_asic_init(struct amdgpu_device *adev) 2006 { 2007 } 2008 2009 static const struct amdgpu_asic_funcs si_asic_funcs = 2010 { 2011 .read_disabled_bios = &si_read_disabled_bios, 2012 .read_bios_from_rom = &si_read_bios_from_rom, 2013 .read_register = &si_read_register, 2014 .reset = &si_asic_reset, 2015 .reset_method = &si_asic_reset_method, 2016 .set_vga_state = &si_vga_set_state, 2017 .get_xclk = &si_get_xclk, 2018 .set_uvd_clocks = &si_set_uvd_clocks, 2019 .set_vce_clocks = &si_set_vce_clocks, 2020 .get_pcie_lanes = &si_get_pcie_lanes, 2021 .set_pcie_lanes = &si_set_pcie_lanes, 2022 .get_config_memsize = &si_get_config_memsize, 2023 .flush_hdp = &si_flush_hdp, 2024 .invalidate_hdp = &si_invalidate_hdp, 2025 .need_full_reset = &si_need_full_reset, 2026 .get_pcie_usage = &si_get_pcie_usage, 2027 .need_reset_on_init = &si_need_reset_on_init, 2028 .get_pcie_replay_count = &si_get_pcie_replay_count, 2029 .supports_baco = &si_asic_supports_baco, 2030 .pre_asic_init = &si_pre_asic_init, 2031 .query_video_codecs = &si_query_video_codecs, 2032 }; 2033 2034 static uint32_t si_get_rev_id(struct amdgpu_device *adev) 2035 { 2036 return (RREG32(mmCC_DRM_ID_STRAPS) & CC_DRM_ID_STRAPS__ATI_REV_ID_MASK) 2037 >> CC_DRM_ID_STRAPS__ATI_REV_ID__SHIFT; 2038 } 2039 2040 static int si_common_early_init(struct amdgpu_ip_block *ip_block) 2041 { 2042 struct amdgpu_device *adev = ip_block->adev; 2043 2044 adev->smc_rreg = &si_smc_rreg; 2045 adev->smc_wreg = &si_smc_wreg; 2046 adev->pcie_rreg = &si_pcie_rreg; 2047 adev->pcie_wreg = &si_pcie_wreg; 2048 adev->pciep_rreg = &si_pciep_rreg; 2049 adev->pciep_wreg = &si_pciep_wreg; 2050 adev->uvd_ctx_rreg = si_uvd_ctx_rreg; 2051 adev->uvd_ctx_wreg = si_uvd_ctx_wreg; 2052 adev->didt_rreg = NULL; 2053 adev->didt_wreg = NULL; 2054 2055 adev->asic_funcs = &si_asic_funcs; 2056 2057 adev->rev_id = si_get_rev_id(adev); 2058 adev->external_rev_id = 0xFF; 2059 switch (adev->asic_type) { 2060 case CHIP_TAHITI: 2061 adev->cg_flags = 2062 AMD_CG_SUPPORT_GFX_MGCG | 2063 AMD_CG_SUPPORT_GFX_MGLS | 2064 /*AMD_CG_SUPPORT_GFX_CGCG |*/ 2065 AMD_CG_SUPPORT_GFX_CGLS | 2066 AMD_CG_SUPPORT_GFX_CGTS | 2067 AMD_CG_SUPPORT_GFX_CP_LS | 2068 AMD_CG_SUPPORT_MC_MGCG | 2069 AMD_CG_SUPPORT_SDMA_MGCG | 2070 AMD_CG_SUPPORT_BIF_LS | 2071 AMD_CG_SUPPORT_VCE_MGCG | 2072 AMD_CG_SUPPORT_UVD_MGCG | 2073 AMD_CG_SUPPORT_HDP_LS | 2074 AMD_CG_SUPPORT_HDP_MGCG; 2075 adev->pg_flags = 0; 2076 adev->external_rev_id = (adev->rev_id == 0) ? 1 : 2077 (adev->rev_id == 1) ? 5 : 6; 2078 break; 2079 case CHIP_PITCAIRN: 2080 adev->cg_flags = 2081 AMD_CG_SUPPORT_GFX_MGCG | 2082 AMD_CG_SUPPORT_GFX_MGLS | 2083 /*AMD_CG_SUPPORT_GFX_CGCG |*/ 2084 AMD_CG_SUPPORT_GFX_CGLS | 2085 AMD_CG_SUPPORT_GFX_CGTS | 2086 AMD_CG_SUPPORT_GFX_CP_LS | 2087 AMD_CG_SUPPORT_GFX_RLC_LS | 2088 AMD_CG_SUPPORT_MC_LS | 2089 AMD_CG_SUPPORT_MC_MGCG | 2090 AMD_CG_SUPPORT_SDMA_MGCG | 2091 AMD_CG_SUPPORT_BIF_LS | 2092 AMD_CG_SUPPORT_VCE_MGCG | 2093 AMD_CG_SUPPORT_UVD_MGCG | 2094 AMD_CG_SUPPORT_HDP_LS | 2095 AMD_CG_SUPPORT_HDP_MGCG; 2096 adev->pg_flags = 0; 2097 adev->external_rev_id = adev->rev_id + 20; 2098 break; 2099 2100 case CHIP_VERDE: 2101 adev->cg_flags = 2102 AMD_CG_SUPPORT_GFX_MGCG | 2103 AMD_CG_SUPPORT_GFX_MGLS | 2104 AMD_CG_SUPPORT_GFX_CGLS | 2105 AMD_CG_SUPPORT_GFX_CGTS | 2106 AMD_CG_SUPPORT_GFX_CGTS_LS | 2107 AMD_CG_SUPPORT_GFX_CP_LS | 2108 AMD_CG_SUPPORT_MC_LS | 2109 AMD_CG_SUPPORT_MC_MGCG | 2110 AMD_CG_SUPPORT_SDMA_MGCG | 2111 AMD_CG_SUPPORT_SDMA_LS | 2112 AMD_CG_SUPPORT_BIF_LS | 2113 AMD_CG_SUPPORT_VCE_MGCG | 2114 AMD_CG_SUPPORT_UVD_MGCG | 2115 AMD_CG_SUPPORT_HDP_LS | 2116 AMD_CG_SUPPORT_HDP_MGCG; 2117 adev->pg_flags = 0; 2118 //??? 2119 adev->external_rev_id = adev->rev_id + 40; 2120 break; 2121 case CHIP_OLAND: 2122 adev->cg_flags = 2123 AMD_CG_SUPPORT_GFX_MGCG | 2124 AMD_CG_SUPPORT_GFX_MGLS | 2125 /*AMD_CG_SUPPORT_GFX_CGCG |*/ 2126 AMD_CG_SUPPORT_GFX_CGLS | 2127 AMD_CG_SUPPORT_GFX_CGTS | 2128 AMD_CG_SUPPORT_GFX_CP_LS | 2129 AMD_CG_SUPPORT_GFX_RLC_LS | 2130 AMD_CG_SUPPORT_MC_LS | 2131 AMD_CG_SUPPORT_MC_MGCG | 2132 AMD_CG_SUPPORT_SDMA_MGCG | 2133 AMD_CG_SUPPORT_BIF_LS | 2134 AMD_CG_SUPPORT_UVD_MGCG | 2135 AMD_CG_SUPPORT_HDP_LS | 2136 AMD_CG_SUPPORT_HDP_MGCG; 2137 adev->pg_flags = 0; 2138 adev->external_rev_id = 60; 2139 break; 2140 case CHIP_HAINAN: 2141 adev->cg_flags = 2142 AMD_CG_SUPPORT_GFX_MGCG | 2143 AMD_CG_SUPPORT_GFX_MGLS | 2144 /*AMD_CG_SUPPORT_GFX_CGCG |*/ 2145 AMD_CG_SUPPORT_GFX_CGLS | 2146 AMD_CG_SUPPORT_GFX_CGTS | 2147 AMD_CG_SUPPORT_GFX_CP_LS | 2148 AMD_CG_SUPPORT_GFX_RLC_LS | 2149 AMD_CG_SUPPORT_MC_LS | 2150 AMD_CG_SUPPORT_MC_MGCG | 2151 AMD_CG_SUPPORT_SDMA_MGCG | 2152 AMD_CG_SUPPORT_BIF_LS | 2153 AMD_CG_SUPPORT_HDP_LS | 2154 AMD_CG_SUPPORT_HDP_MGCG; 2155 adev->pg_flags = 0; 2156 adev->external_rev_id = 70; 2157 break; 2158 2159 default: 2160 return -EINVAL; 2161 } 2162 2163 return 0; 2164 } 2165 2166 static void si_init_golden_registers(struct amdgpu_device *adev) 2167 { 2168 switch (adev->asic_type) { 2169 case CHIP_TAHITI: 2170 amdgpu_device_program_register_sequence(adev, 2171 tahiti_golden_registers, 2172 ARRAY_SIZE(tahiti_golden_registers)); 2173 amdgpu_device_program_register_sequence(adev, 2174 tahiti_golden_rlc_registers, 2175 ARRAY_SIZE(tahiti_golden_rlc_registers)); 2176 amdgpu_device_program_register_sequence(adev, 2177 tahiti_mgcg_cgcg_init, 2178 ARRAY_SIZE(tahiti_mgcg_cgcg_init)); 2179 amdgpu_device_program_register_sequence(adev, 2180 tahiti_golden_registers2, 2181 ARRAY_SIZE(tahiti_golden_registers2)); 2182 break; 2183 case CHIP_PITCAIRN: 2184 amdgpu_device_program_register_sequence(adev, 2185 pitcairn_golden_registers, 2186 ARRAY_SIZE(pitcairn_golden_registers)); 2187 amdgpu_device_program_register_sequence(adev, 2188 pitcairn_golden_rlc_registers, 2189 ARRAY_SIZE(pitcairn_golden_rlc_registers)); 2190 amdgpu_device_program_register_sequence(adev, 2191 pitcairn_mgcg_cgcg_init, 2192 ARRAY_SIZE(pitcairn_mgcg_cgcg_init)); 2193 break; 2194 case CHIP_VERDE: 2195 amdgpu_device_program_register_sequence(adev, 2196 verde_golden_registers, 2197 ARRAY_SIZE(verde_golden_registers)); 2198 amdgpu_device_program_register_sequence(adev, 2199 verde_golden_rlc_registers, 2200 ARRAY_SIZE(verde_golden_rlc_registers)); 2201 amdgpu_device_program_register_sequence(adev, 2202 verde_mgcg_cgcg_init, 2203 ARRAY_SIZE(verde_mgcg_cgcg_init)); 2204 amdgpu_device_program_register_sequence(adev, 2205 verde_pg_init, 2206 ARRAY_SIZE(verde_pg_init)); 2207 break; 2208 case CHIP_OLAND: 2209 amdgpu_device_program_register_sequence(adev, 2210 oland_golden_registers, 2211 ARRAY_SIZE(oland_golden_registers)); 2212 amdgpu_device_program_register_sequence(adev, 2213 oland_golden_rlc_registers, 2214 ARRAY_SIZE(oland_golden_rlc_registers)); 2215 amdgpu_device_program_register_sequence(adev, 2216 oland_mgcg_cgcg_init, 2217 ARRAY_SIZE(oland_mgcg_cgcg_init)); 2218 break; 2219 case CHIP_HAINAN: 2220 amdgpu_device_program_register_sequence(adev, 2221 hainan_golden_registers, 2222 ARRAY_SIZE(hainan_golden_registers)); 2223 amdgpu_device_program_register_sequence(adev, 2224 hainan_golden_registers2, 2225 ARRAY_SIZE(hainan_golden_registers2)); 2226 amdgpu_device_program_register_sequence(adev, 2227 hainan_mgcg_cgcg_init, 2228 ARRAY_SIZE(hainan_mgcg_cgcg_init)); 2229 break; 2230 2231 2232 default: 2233 BUG(); 2234 } 2235 } 2236 2237 static void si_pcie_gen3_enable(struct amdgpu_device *adev) 2238 { 2239 struct pci_dev *root = adev->pdev->bus->self; 2240 u32 speed_cntl, current_data_rate; 2241 int i; 2242 u16 tmp16; 2243 2244 if (pci_is_root_bus(adev->pdev->bus)) 2245 return; 2246 2247 if (amdgpu_pcie_gen2 == 0) 2248 return; 2249 2250 if (adev->flags & AMD_IS_APU) 2251 return; 2252 2253 if (!(adev->pm.pcie_gen_mask & (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2 | 2254 CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3))) 2255 return; 2256 2257 speed_cntl = RREG32_PCIE_PORT(ixPCIE_LC_SPEED_CNTL); 2258 current_data_rate = (speed_cntl & PCIE_LC_SPEED_CNTL__LC_CURRENT_DATA_RATE_MASK) >> 2259 PCIE_LC_SPEED_CNTL__LC_CURRENT_DATA_RATE__SHIFT; 2260 if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3) { 2261 if (current_data_rate == 2) { 2262 DRM_INFO("PCIE gen 3 link speeds already enabled\n"); 2263 return; 2264 } 2265 DRM_INFO("enabling PCIE gen 3 link speeds, disable with amdgpu.pcie_gen2=0\n"); 2266 } else if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2) { 2267 if (current_data_rate == 1) { 2268 DRM_INFO("PCIE gen 2 link speeds already enabled\n"); 2269 return; 2270 } 2271 DRM_INFO("enabling PCIE gen 2 link speeds, disable with amdgpu.pcie_gen2=0\n"); 2272 } 2273 2274 if (!pci_is_pcie(root) || !pci_is_pcie(adev->pdev)) 2275 return; 2276 2277 if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3) { 2278 if (current_data_rate != 2) { 2279 u16 bridge_cfg, gpu_cfg; 2280 u16 bridge_cfg2, gpu_cfg2; 2281 u32 max_lw, current_lw, tmp; 2282 2283 pcie_capability_set_word(root, PCI_EXP_LNKCTL, PCI_EXP_LNKCTL_HAWD); 2284 pcie_capability_set_word(adev->pdev, PCI_EXP_LNKCTL, PCI_EXP_LNKCTL_HAWD); 2285 2286 tmp = RREG32_PCIE(ixPCIE_LC_STATUS1); 2287 max_lw = (tmp & PCIE_LC_STATUS1__LC_DETECTED_LINK_WIDTH_MASK) >> PCIE_LC_STATUS1__LC_DETECTED_LINK_WIDTH__SHIFT; 2288 current_lw = (tmp & PCIE_LC_STATUS1__LC_OPERATING_LINK_WIDTH_MASK) >> PCIE_LC_STATUS1__LC_OPERATING_LINK_WIDTH__SHIFT; 2289 2290 if (current_lw < max_lw) { 2291 tmp = RREG32_PCIE_PORT(ixPCIE_LC_LINK_WIDTH_CNTL); 2292 if (tmp & PCIE_LC_LINK_WIDTH_CNTL__LC_RENEGOTIATION_SUPPORT_MASK) { 2293 tmp &= ~(PCIE_LC_LINK_WIDTH_CNTL__LC_LINK_WIDTH_MASK | PCIE_LC_LINK_WIDTH_CNTL__LC_UPCONFIGURE_DIS_MASK); 2294 tmp |= (max_lw << PCIE_LC_LINK_WIDTH_CNTL__LC_LINK_WIDTH__SHIFT); 2295 tmp |= PCIE_LC_LINK_WIDTH_CNTL__LC_UPCONFIGURE_SUPPORT_MASK | PCIE_LC_LINK_WIDTH_CNTL__LC_RENEGOTIATE_EN_MASK | PCIE_LC_LINK_WIDTH_CNTL__LC_RECONFIG_NOW_MASK; 2296 WREG32_PCIE_PORT(ixPCIE_LC_LINK_WIDTH_CNTL, tmp); 2297 } 2298 } 2299 2300 for (i = 0; i < 10; i++) { 2301 pcie_capability_read_word(adev->pdev, 2302 PCI_EXP_DEVSTA, 2303 &tmp16); 2304 if (tmp16 & PCI_EXP_DEVSTA_TRPND) 2305 break; 2306 2307 pcie_capability_read_word(root, PCI_EXP_LNKCTL, 2308 &bridge_cfg); 2309 pcie_capability_read_word(adev->pdev, 2310 PCI_EXP_LNKCTL, 2311 &gpu_cfg); 2312 2313 pcie_capability_read_word(root, PCI_EXP_LNKCTL2, 2314 &bridge_cfg2); 2315 pcie_capability_read_word(adev->pdev, 2316 PCI_EXP_LNKCTL2, 2317 &gpu_cfg2); 2318 2319 tmp = RREG32_PCIE_PORT(ixPCIE_LC_CNTL4); 2320 tmp |= PCIE_LC_CNTL4__LC_SET_QUIESCE_MASK; 2321 WREG32_PCIE_PORT(ixPCIE_LC_CNTL4, tmp); 2322 2323 tmp = RREG32_PCIE_PORT(ixPCIE_LC_CNTL4); 2324 tmp |= PCIE_LC_CNTL4__LC_REDO_EQ_MASK; 2325 WREG32_PCIE_PORT(ixPCIE_LC_CNTL4, tmp); 2326 2327 mdelay(100); 2328 2329 pcie_capability_clear_and_set_word(root, PCI_EXP_LNKCTL, 2330 PCI_EXP_LNKCTL_HAWD, 2331 bridge_cfg & 2332 PCI_EXP_LNKCTL_HAWD); 2333 pcie_capability_clear_and_set_word(adev->pdev, PCI_EXP_LNKCTL, 2334 PCI_EXP_LNKCTL_HAWD, 2335 gpu_cfg & 2336 PCI_EXP_LNKCTL_HAWD); 2337 2338 pcie_capability_clear_and_set_word(root, PCI_EXP_LNKCTL2, 2339 PCI_EXP_LNKCTL2_ENTER_COMP | 2340 PCI_EXP_LNKCTL2_TX_MARGIN, 2341 bridge_cfg2 & 2342 (PCI_EXP_LNKCTL2_ENTER_COMP | 2343 PCI_EXP_LNKCTL2_TX_MARGIN)); 2344 pcie_capability_clear_and_set_word(adev->pdev, PCI_EXP_LNKCTL2, 2345 PCI_EXP_LNKCTL2_ENTER_COMP | 2346 PCI_EXP_LNKCTL2_TX_MARGIN, 2347 gpu_cfg2 & 2348 (PCI_EXP_LNKCTL2_ENTER_COMP | 2349 PCI_EXP_LNKCTL2_TX_MARGIN)); 2350 2351 tmp = RREG32_PCIE_PORT(ixPCIE_LC_CNTL4); 2352 tmp &= ~PCIE_LC_CNTL4__LC_SET_QUIESCE_MASK; 2353 WREG32_PCIE_PORT(ixPCIE_LC_CNTL4, tmp); 2354 } 2355 } 2356 } 2357 2358 speed_cntl |= PCIE_LC_SPEED_CNTL__LC_FORCE_EN_SW_SPEED_CHANGE_MASK | PCIE_LC_SPEED_CNTL__LC_FORCE_DIS_HW_SPEED_CHANGE_MASK; 2359 speed_cntl &= ~PCIE_LC_SPEED_CNTL__LC_FORCE_DIS_SW_SPEED_CHANGE_MASK; 2360 WREG32_PCIE_PORT(ixPCIE_LC_SPEED_CNTL, speed_cntl); 2361 2362 tmp16 = 0; 2363 if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3) 2364 tmp16 |= PCI_EXP_LNKCTL2_TLS_8_0GT; /* gen3 */ 2365 else if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2) 2366 tmp16 |= PCI_EXP_LNKCTL2_TLS_5_0GT; /* gen2 */ 2367 else 2368 tmp16 |= PCI_EXP_LNKCTL2_TLS_2_5GT; /* gen1 */ 2369 pcie_capability_clear_and_set_word(adev->pdev, PCI_EXP_LNKCTL2, 2370 PCI_EXP_LNKCTL2_TLS, tmp16); 2371 2372 speed_cntl = RREG32_PCIE_PORT(ixPCIE_LC_SPEED_CNTL); 2373 speed_cntl |= PCIE_LC_SPEED_CNTL__LC_INITIATE_LINK_SPEED_CHANGE_MASK; 2374 WREG32_PCIE_PORT(ixPCIE_LC_SPEED_CNTL, speed_cntl); 2375 2376 for (i = 0; i < adev->usec_timeout; i++) { 2377 speed_cntl = RREG32_PCIE_PORT(ixPCIE_LC_SPEED_CNTL); 2378 if ((speed_cntl & PCIE_LC_SPEED_CNTL__LC_INITIATE_LINK_SPEED_CHANGE_MASK) == 0) 2379 break; 2380 udelay(1); 2381 } 2382 } 2383 2384 static inline u32 si_pif_phy0_rreg(struct amdgpu_device *adev, u32 reg) 2385 { 2386 unsigned long flags; 2387 u32 r; 2388 2389 spin_lock_irqsave(&adev->pcie_idx_lock, flags); 2390 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff)); 2391 r = RREG32(EVERGREEN_PIF_PHY0_DATA); 2392 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); 2393 return r; 2394 } 2395 2396 static inline void si_pif_phy0_wreg(struct amdgpu_device *adev, u32 reg, u32 v) 2397 { 2398 unsigned long flags; 2399 2400 spin_lock_irqsave(&adev->pcie_idx_lock, flags); 2401 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff)); 2402 WREG32(EVERGREEN_PIF_PHY0_DATA, (v)); 2403 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); 2404 } 2405 2406 static inline u32 si_pif_phy1_rreg(struct amdgpu_device *adev, u32 reg) 2407 { 2408 unsigned long flags; 2409 u32 r; 2410 2411 spin_lock_irqsave(&adev->pcie_idx_lock, flags); 2412 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff)); 2413 r = RREG32(EVERGREEN_PIF_PHY1_DATA); 2414 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); 2415 return r; 2416 } 2417 2418 static inline void si_pif_phy1_wreg(struct amdgpu_device *adev, u32 reg, u32 v) 2419 { 2420 unsigned long flags; 2421 2422 spin_lock_irqsave(&adev->pcie_idx_lock, flags); 2423 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff)); 2424 WREG32(EVERGREEN_PIF_PHY1_DATA, (v)); 2425 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); 2426 } 2427 static void si_program_aspm(struct amdgpu_device *adev) 2428 { 2429 u32 data, orig; 2430 bool disable_l0s = false, disable_l1 = false, disable_plloff_in_l1 = false; 2431 bool disable_clkreq = false; 2432 2433 if (!amdgpu_device_should_use_aspm(adev)) 2434 return; 2435 2436 orig = data = RREG32_PCIE_PORT(ixPCIE_LC_N_FTS_CNTL); 2437 data &= ~PCIE_LC_N_FTS_CNTL__LC_XMIT_N_FTS_MASK; 2438 data |= (0x24 << PCIE_LC_N_FTS_CNTL__LC_XMIT_N_FTS__SHIFT) | PCIE_LC_N_FTS_CNTL__LC_XMIT_N_FTS_OVERRIDE_EN_MASK; 2439 if (orig != data) 2440 WREG32_PCIE_PORT(ixPCIE_LC_N_FTS_CNTL, data); 2441 2442 orig = data = RREG32_PCIE_PORT(ixPCIE_LC_CNTL3); 2443 data |= PCIE_LC_CNTL3__LC_GO_TO_RECOVERY_MASK; 2444 if (orig != data) 2445 WREG32_PCIE_PORT(ixPCIE_LC_CNTL3, data); 2446 2447 orig = data = RREG32_PCIE(ixPCIE_P_CNTL); 2448 data |= PCIE_P_CNTL__P_IGNORE_EDB_ERR_MASK; 2449 if (orig != data) 2450 WREG32_PCIE(ixPCIE_P_CNTL, data); 2451 2452 orig = data = RREG32_PCIE_PORT(ixPCIE_LC_CNTL); 2453 data &= ~(PCIE_LC_CNTL__LC_L0S_INACTIVITY_MASK | PCIE_LC_CNTL__LC_L1_INACTIVITY_MASK); 2454 data |= PCIE_LC_CNTL__LC_PMI_TO_L1_DIS_MASK; 2455 if (!disable_l0s) 2456 data |= (7 << PCIE_LC_CNTL__LC_L0S_INACTIVITY__SHIFT); 2457 2458 if (!disable_l1) { 2459 data |= (7 << PCIE_LC_CNTL__LC_L1_INACTIVITY__SHIFT); 2460 data &= ~PCIE_LC_CNTL__LC_PMI_TO_L1_DIS_MASK; 2461 if (orig != data) 2462 WREG32_PCIE_PORT(ixPCIE_LC_CNTL, data); 2463 2464 if (!disable_plloff_in_l1) { 2465 bool clk_req_support; 2466 2467 orig = data = si_pif_phy0_rreg(adev,ixPB0_PIF_PWRDOWN_0); 2468 data &= ~(PB0_PIF_PWRDOWN_0__PLL_POWER_STATE_IN_OFF_0_MASK | PB0_PIF_PWRDOWN_0__PLL_POWER_STATE_IN_TXS2_0_MASK); 2469 data |= (7 << PB0_PIF_PWRDOWN_0__PLL_POWER_STATE_IN_OFF_0__SHIFT) | (7 << PB0_PIF_PWRDOWN_0__PLL_POWER_STATE_IN_TXS2_0__SHIFT); 2470 if (orig != data) 2471 si_pif_phy0_wreg(adev,ixPB0_PIF_PWRDOWN_0, data); 2472 2473 orig = data = si_pif_phy0_rreg(adev,ixPB0_PIF_PWRDOWN_1); 2474 data &= ~(PB0_PIF_PWRDOWN_1__PLL_POWER_STATE_IN_OFF_1_MASK | PB0_PIF_PWRDOWN_1__PLL_POWER_STATE_IN_TXS2_1_MASK); 2475 data |= (7 << PB0_PIF_PWRDOWN_1__PLL_POWER_STATE_IN_OFF_1__SHIFT) | (7 << PB0_PIF_PWRDOWN_1__PLL_POWER_STATE_IN_TXS2_1__SHIFT); 2476 if (orig != data) 2477 si_pif_phy0_wreg(adev,ixPB0_PIF_PWRDOWN_1, data); 2478 2479 orig = data = si_pif_phy1_rreg(adev,ixPB1_PIF_PWRDOWN_0); 2480 data &= ~(PB1_PIF_PWRDOWN_0__PLL_POWER_STATE_IN_OFF_0_MASK | PB1_PIF_PWRDOWN_0__PLL_POWER_STATE_IN_TXS2_0_MASK); 2481 data |= (7 << PB1_PIF_PWRDOWN_0__PLL_POWER_STATE_IN_OFF_0__SHIFT) | (7 << PB1_PIF_PWRDOWN_0__PLL_POWER_STATE_IN_TXS2_0__SHIFT); 2482 if (orig != data) 2483 si_pif_phy1_wreg(adev,ixPB1_PIF_PWRDOWN_0, data); 2484 2485 orig = data = si_pif_phy1_rreg(adev,ixPB1_PIF_PWRDOWN_1); 2486 data &= ~(PB1_PIF_PWRDOWN_1__PLL_POWER_STATE_IN_OFF_1_MASK | PB1_PIF_PWRDOWN_1__PLL_POWER_STATE_IN_TXS2_1_MASK); 2487 data |= (7 << PB1_PIF_PWRDOWN_1__PLL_POWER_STATE_IN_OFF_1__SHIFT) | (7 << PB1_PIF_PWRDOWN_1__PLL_POWER_STATE_IN_TXS2_1__SHIFT); 2488 if (orig != data) 2489 si_pif_phy1_wreg(adev,ixPB1_PIF_PWRDOWN_1, data); 2490 2491 if ((adev->asic_type != CHIP_OLAND) && (adev->asic_type != CHIP_HAINAN)) { 2492 orig = data = si_pif_phy0_rreg(adev,ixPB0_PIF_PWRDOWN_0); 2493 data &= ~PB0_PIF_PWRDOWN_0__PLL_RAMP_UP_TIME_0_MASK; 2494 if (orig != data) 2495 si_pif_phy0_wreg(adev,ixPB0_PIF_PWRDOWN_0, data); 2496 2497 orig = data = si_pif_phy0_rreg(adev,ixPB0_PIF_PWRDOWN_1); 2498 data &= ~PB0_PIF_PWRDOWN_1__PLL_RAMP_UP_TIME_1_MASK; 2499 if (orig != data) 2500 si_pif_phy0_wreg(adev,ixPB0_PIF_PWRDOWN_1, data); 2501 2502 orig = data = si_pif_phy0_rreg(adev,ixPB0_PIF_PWRDOWN_2); 2503 data &= ~PB0_PIF_PWRDOWN_2__PLL_RAMP_UP_TIME_2_MASK; 2504 if (orig != data) 2505 si_pif_phy0_wreg(adev,ixPB0_PIF_PWRDOWN_2, data); 2506 2507 orig = data = si_pif_phy0_rreg(adev,ixPB0_PIF_PWRDOWN_3); 2508 data &= ~PB0_PIF_PWRDOWN_3__PLL_RAMP_UP_TIME_3_MASK; 2509 if (orig != data) 2510 si_pif_phy0_wreg(adev,ixPB0_PIF_PWRDOWN_3, data); 2511 2512 orig = data = si_pif_phy1_rreg(adev,ixPB1_PIF_PWRDOWN_0); 2513 data &= ~PB1_PIF_PWRDOWN_0__PLL_RAMP_UP_TIME_0_MASK; 2514 if (orig != data) 2515 si_pif_phy1_wreg(adev,ixPB1_PIF_PWRDOWN_0, data); 2516 2517 orig = data = si_pif_phy1_rreg(adev,ixPB1_PIF_PWRDOWN_1); 2518 data &= ~PB1_PIF_PWRDOWN_1__PLL_RAMP_UP_TIME_1_MASK; 2519 if (orig != data) 2520 si_pif_phy1_wreg(adev,ixPB1_PIF_PWRDOWN_1, data); 2521 2522 orig = data = si_pif_phy1_rreg(adev,ixPB1_PIF_PWRDOWN_2); 2523 data &= ~PB1_PIF_PWRDOWN_2__PLL_RAMP_UP_TIME_2_MASK; 2524 if (orig != data) 2525 si_pif_phy1_wreg(adev,ixPB1_PIF_PWRDOWN_2, data); 2526 2527 orig = data = si_pif_phy1_rreg(adev,ixPB1_PIF_PWRDOWN_3); 2528 data &= ~PB1_PIF_PWRDOWN_3__PLL_RAMP_UP_TIME_3_MASK; 2529 if (orig != data) 2530 si_pif_phy1_wreg(adev,ixPB1_PIF_PWRDOWN_3, data); 2531 } 2532 orig = data = RREG32_PCIE_PORT(ixPCIE_LC_LINK_WIDTH_CNTL); 2533 data &= ~PCIE_LC_LINK_WIDTH_CNTL__LC_DYN_LANES_PWR_STATE_MASK; 2534 data |= (3 << PCIE_LC_LINK_WIDTH_CNTL__LC_DYN_LANES_PWR_STATE__SHIFT); 2535 if (orig != data) 2536 WREG32_PCIE_PORT(ixPCIE_LC_LINK_WIDTH_CNTL, data); 2537 2538 orig = data = si_pif_phy0_rreg(adev,ixPB0_PIF_CNTL); 2539 data &= ~PB0_PIF_CNTL__LS2_EXIT_TIME_MASK; 2540 if ((adev->asic_type == CHIP_OLAND) || (adev->asic_type == CHIP_HAINAN)) 2541 data |= (5 << PB0_PIF_CNTL__LS2_EXIT_TIME__SHIFT); 2542 if (orig != data) 2543 si_pif_phy0_wreg(adev,ixPB0_PIF_CNTL, data); 2544 2545 orig = data = si_pif_phy1_rreg(adev,ixPB1_PIF_CNTL); 2546 data &= ~PB1_PIF_CNTL__LS2_EXIT_TIME_MASK; 2547 if ((adev->asic_type == CHIP_OLAND) || (adev->asic_type == CHIP_HAINAN)) 2548 data |= (5 << PB1_PIF_CNTL__LS2_EXIT_TIME__SHIFT); 2549 if (orig != data) 2550 si_pif_phy1_wreg(adev,ixPB1_PIF_CNTL, data); 2551 2552 if (!disable_clkreq && 2553 !pci_is_root_bus(adev->pdev->bus)) { 2554 struct pci_dev *root = adev->pdev->bus->self; 2555 u32 lnkcap; 2556 2557 clk_req_support = false; 2558 pcie_capability_read_dword(root, PCI_EXP_LNKCAP, &lnkcap); 2559 if (lnkcap & PCI_EXP_LNKCAP_CLKPM) 2560 clk_req_support = true; 2561 } else { 2562 clk_req_support = false; 2563 } 2564 2565 if (clk_req_support) { 2566 orig = data = RREG32_PCIE_PORT(ixPCIE_LC_CNTL2); 2567 data |= PCIE_LC_CNTL2__LC_ALLOW_PDWN_IN_L1_MASK | PCIE_LC_CNTL2__LC_ALLOW_PDWN_IN_L23_MASK; 2568 if (orig != data) 2569 WREG32_PCIE_PORT(ixPCIE_LC_CNTL2, data); 2570 2571 orig = data = RREG32(mmTHM_CLK_CNTL); 2572 data &= ~(THM_CLK_CNTL__CMON_CLK_SEL_MASK | THM_CLK_CNTL__TMON_CLK_SEL_MASK); 2573 data |= (1 << THM_CLK_CNTL__CMON_CLK_SEL__SHIFT) | (1 << THM_CLK_CNTL__TMON_CLK_SEL__SHIFT); 2574 if (orig != data) 2575 WREG32(mmTHM_CLK_CNTL, data); 2576 2577 orig = data = RREG32(mmMISC_CLK_CNTL); 2578 data &= ~(MISC_CLK_CNTL__DEEP_SLEEP_CLK_SEL_MASK | MISC_CLK_CNTL__ZCLK_SEL_MASK); 2579 data |= (1 << MISC_CLK_CNTL__DEEP_SLEEP_CLK_SEL__SHIFT) | (1 << MISC_CLK_CNTL__ZCLK_SEL__SHIFT); 2580 if (orig != data) 2581 WREG32(mmMISC_CLK_CNTL, data); 2582 2583 orig = data = RREG32(mmCG_CLKPIN_CNTL); 2584 data &= ~CG_CLKPIN_CNTL__BCLK_AS_XCLK_MASK; 2585 if (orig != data) 2586 WREG32(mmCG_CLKPIN_CNTL, data); 2587 2588 orig = data = RREG32(mmCG_CLKPIN_CNTL_2); 2589 data &= ~CG_CLKPIN_CNTL_2__FORCE_BIF_REFCLK_EN_MASK; 2590 if (orig != data) 2591 WREG32(mmCG_CLKPIN_CNTL_2, data); 2592 2593 orig = data = RREG32(mmMPLL_BYPASSCLK_SEL); 2594 data &= ~MPLL_BYPASSCLK_SEL__MPLL_CLKOUT_SEL_MASK; 2595 data |= 4 << MPLL_BYPASSCLK_SEL__MPLL_CLKOUT_SEL__SHIFT; 2596 if (orig != data) 2597 WREG32(mmMPLL_BYPASSCLK_SEL, data); 2598 2599 orig = data = RREG32(mmSPLL_CNTL_MODE); 2600 data &= ~SPLL_CNTL_MODE__SPLL_REFCLK_SEL_MASK; 2601 if (orig != data) 2602 WREG32(mmSPLL_CNTL_MODE, data); 2603 } 2604 } 2605 } else { 2606 if (orig != data) 2607 WREG32_PCIE_PORT(ixPCIE_LC_CNTL, data); 2608 } 2609 2610 orig = data = RREG32_PCIE(ixPCIE_CNTL2); 2611 data |= PCIE_CNTL2__SLV_MEM_LS_EN_MASK | PCIE_CNTL2__MST_MEM_LS_EN_MASK | PCIE_CNTL2__REPLAY_MEM_LS_EN_MASK; 2612 if (orig != data) 2613 WREG32_PCIE(ixPCIE_CNTL2, data); 2614 2615 if (!disable_l0s) { 2616 data = RREG32_PCIE_PORT(ixPCIE_LC_N_FTS_CNTL); 2617 if((data & PCIE_LC_N_FTS_CNTL__LC_N_FTS_MASK) == PCIE_LC_N_FTS_CNTL__LC_N_FTS_MASK) { 2618 data = RREG32_PCIE(ixPCIE_LC_STATUS1); 2619 if ((data & PCIE_LC_STATUS1__LC_REVERSE_XMIT_MASK) && (data & PCIE_LC_STATUS1__LC_REVERSE_RCVR_MASK)) { 2620 orig = data = RREG32_PCIE_PORT(ixPCIE_LC_CNTL); 2621 data &= ~PCIE_LC_CNTL__LC_L0S_INACTIVITY_MASK; 2622 if (orig != data) 2623 WREG32_PCIE_PORT(ixPCIE_LC_CNTL, data); 2624 } 2625 } 2626 } 2627 } 2628 2629 static void si_fix_pci_max_read_req_size(struct amdgpu_device *adev) 2630 { 2631 int readrq; 2632 u16 v; 2633 2634 readrq = pcie_get_readrq(adev->pdev); 2635 v = ffs(readrq) - 8; 2636 if ((v == 0) || (v == 6) || (v == 7)) 2637 pcie_set_readrq(adev->pdev, 512); 2638 } 2639 2640 static int si_common_hw_init(struct amdgpu_ip_block *ip_block) 2641 { 2642 struct amdgpu_device *adev = ip_block->adev; 2643 2644 si_fix_pci_max_read_req_size(adev); 2645 si_init_golden_registers(adev); 2646 si_pcie_gen3_enable(adev); 2647 si_program_aspm(adev); 2648 2649 return 0; 2650 } 2651 2652 static int si_common_hw_fini(struct amdgpu_ip_block *ip_block) 2653 { 2654 return 0; 2655 } 2656 2657 static int si_common_resume(struct amdgpu_ip_block *ip_block) 2658 { 2659 return si_common_hw_init(ip_block); 2660 } 2661 2662 static bool si_common_is_idle(struct amdgpu_ip_block *ip_block) 2663 { 2664 return true; 2665 } 2666 2667 static int si_common_set_clockgating_state(struct amdgpu_ip_block *ip_block, 2668 enum amd_clockgating_state state) 2669 { 2670 return 0; 2671 } 2672 2673 static int si_common_set_powergating_state(struct amdgpu_ip_block *ip_block, 2674 enum amd_powergating_state state) 2675 { 2676 return 0; 2677 } 2678 2679 static const struct amd_ip_funcs si_common_ip_funcs = { 2680 .name = "si_common", 2681 .early_init = si_common_early_init, 2682 .hw_init = si_common_hw_init, 2683 .hw_fini = si_common_hw_fini, 2684 .resume = si_common_resume, 2685 .is_idle = si_common_is_idle, 2686 .set_clockgating_state = si_common_set_clockgating_state, 2687 .set_powergating_state = si_common_set_powergating_state, 2688 }; 2689 2690 static const struct amdgpu_ip_block_version si_common_ip_block = 2691 { 2692 .type = AMD_IP_BLOCK_TYPE_COMMON, 2693 .major = 1, 2694 .minor = 0, 2695 .rev = 0, 2696 .funcs = &si_common_ip_funcs, 2697 }; 2698 2699 int si_set_ip_blocks(struct amdgpu_device *adev) 2700 { 2701 switch (adev->asic_type) { 2702 case CHIP_VERDE: 2703 case CHIP_TAHITI: 2704 case CHIP_PITCAIRN: 2705 amdgpu_device_ip_block_add(adev, &si_common_ip_block); 2706 amdgpu_device_ip_block_add(adev, &gmc_v6_0_ip_block); 2707 amdgpu_device_ip_block_add(adev, &si_ih_ip_block); 2708 amdgpu_device_ip_block_add(adev, &gfx_v6_0_ip_block); 2709 amdgpu_device_ip_block_add(adev, &si_dma_ip_block); 2710 amdgpu_device_ip_block_add(adev, &si_smu_ip_block); 2711 if (adev->enable_virtual_display) 2712 amdgpu_device_ip_block_add(adev, &amdgpu_vkms_ip_block); 2713 #if defined(CONFIG_DRM_AMD_DC) && defined(CONFIG_DRM_AMD_DC_SI) 2714 else if (amdgpu_device_has_dc_support(adev)) 2715 amdgpu_device_ip_block_add(adev, &dm_ip_block); 2716 #endif 2717 else 2718 amdgpu_device_ip_block_add(adev, &dce_v6_0_ip_block); 2719 amdgpu_device_ip_block_add(adev, &uvd_v3_1_ip_block); 2720 /* amdgpu_device_ip_block_add(adev, &vce_v1_0_ip_block); */ 2721 break; 2722 case CHIP_OLAND: 2723 amdgpu_device_ip_block_add(adev, &si_common_ip_block); 2724 amdgpu_device_ip_block_add(adev, &gmc_v6_0_ip_block); 2725 amdgpu_device_ip_block_add(adev, &si_ih_ip_block); 2726 amdgpu_device_ip_block_add(adev, &gfx_v6_0_ip_block); 2727 amdgpu_device_ip_block_add(adev, &si_dma_ip_block); 2728 amdgpu_device_ip_block_add(adev, &si_smu_ip_block); 2729 if (adev->enable_virtual_display) 2730 amdgpu_device_ip_block_add(adev, &amdgpu_vkms_ip_block); 2731 #if defined(CONFIG_DRM_AMD_DC) && defined(CONFIG_DRM_AMD_DC_SI) 2732 else if (amdgpu_device_has_dc_support(adev)) 2733 amdgpu_device_ip_block_add(adev, &dm_ip_block); 2734 #endif 2735 else 2736 amdgpu_device_ip_block_add(adev, &dce_v6_4_ip_block); 2737 amdgpu_device_ip_block_add(adev, &uvd_v3_1_ip_block); 2738 /* amdgpu_device_ip_block_add(adev, &vce_v1_0_ip_block); */ 2739 break; 2740 case CHIP_HAINAN: 2741 amdgpu_device_ip_block_add(adev, &si_common_ip_block); 2742 amdgpu_device_ip_block_add(adev, &gmc_v6_0_ip_block); 2743 amdgpu_device_ip_block_add(adev, &si_ih_ip_block); 2744 amdgpu_device_ip_block_add(adev, &gfx_v6_0_ip_block); 2745 amdgpu_device_ip_block_add(adev, &si_dma_ip_block); 2746 amdgpu_device_ip_block_add(adev, &si_smu_ip_block); 2747 if (adev->enable_virtual_display) 2748 amdgpu_device_ip_block_add(adev, &amdgpu_vkms_ip_block); 2749 break; 2750 default: 2751 BUG(); 2752 } 2753 return 0; 2754 } 2755 2756