1 /* 2 * Copyright 2010 Advanced Micro Devices, Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: Alex Deucher 23 */ 24 #include <linux/firmware.h> 25 #include <linux/platform_device.h> 26 #include <linux/slab.h> 27 #include "drmP.h" 28 #include "radeon.h" 29 #include "radeon_asic.h" 30 #include "radeon_drm.h" 31 #include "evergreend.h" 32 #include "atom.h" 33 #include "avivod.h" 34 #include "evergreen_reg.h" 35 36 #define EVERGREEN_PFP_UCODE_SIZE 1120 37 #define EVERGREEN_PM4_UCODE_SIZE 1376 38 39 static void evergreen_gpu_init(struct radeon_device *rdev); 40 void evergreen_fini(struct radeon_device *rdev); 41 42 void evergreen_pm_misc(struct radeon_device *rdev) 43 { 44 45 } 46 47 void evergreen_pm_prepare(struct radeon_device *rdev) 48 { 49 struct drm_device *ddev = rdev->ddev; 50 struct drm_crtc *crtc; 51 struct radeon_crtc *radeon_crtc; 52 u32 tmp; 53 54 /* disable any active CRTCs */ 55 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) { 56 radeon_crtc = to_radeon_crtc(crtc); 57 if (radeon_crtc->enabled) { 58 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset); 59 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE; 60 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp); 61 } 62 } 63 } 64 65 void evergreen_pm_finish(struct radeon_device *rdev) 66 { 67 struct drm_device *ddev = rdev->ddev; 68 struct drm_crtc *crtc; 69 struct radeon_crtc *radeon_crtc; 70 u32 tmp; 71 72 /* enable any active CRTCs */ 73 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) { 74 radeon_crtc = to_radeon_crtc(crtc); 75 if (radeon_crtc->enabled) { 76 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset); 77 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE; 78 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp); 79 } 80 } 81 } 82 83 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd) 84 { 85 bool connected = false; 86 87 switch (hpd) { 88 case RADEON_HPD_1: 89 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE) 90 connected = true; 91 break; 92 case RADEON_HPD_2: 93 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE) 94 connected = true; 95 break; 96 case RADEON_HPD_3: 97 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE) 98 connected = true; 99 break; 100 case RADEON_HPD_4: 101 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE) 102 connected = true; 103 break; 104 case RADEON_HPD_5: 105 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE) 106 connected = true; 107 break; 108 case RADEON_HPD_6: 109 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE) 110 connected = true; 111 break; 112 default: 113 break; 114 } 115 116 return connected; 117 } 118 119 void evergreen_hpd_set_polarity(struct radeon_device *rdev, 120 enum radeon_hpd_id hpd) 121 { 122 u32 tmp; 123 bool connected = evergreen_hpd_sense(rdev, hpd); 124 125 switch (hpd) { 126 case RADEON_HPD_1: 127 tmp = RREG32(DC_HPD1_INT_CONTROL); 128 if (connected) 129 tmp &= ~DC_HPDx_INT_POLARITY; 130 else 131 tmp |= DC_HPDx_INT_POLARITY; 132 WREG32(DC_HPD1_INT_CONTROL, tmp); 133 break; 134 case RADEON_HPD_2: 135 tmp = RREG32(DC_HPD2_INT_CONTROL); 136 if (connected) 137 tmp &= ~DC_HPDx_INT_POLARITY; 138 else 139 tmp |= DC_HPDx_INT_POLARITY; 140 WREG32(DC_HPD2_INT_CONTROL, tmp); 141 break; 142 case RADEON_HPD_3: 143 tmp = RREG32(DC_HPD3_INT_CONTROL); 144 if (connected) 145 tmp &= ~DC_HPDx_INT_POLARITY; 146 else 147 tmp |= DC_HPDx_INT_POLARITY; 148 WREG32(DC_HPD3_INT_CONTROL, tmp); 149 break; 150 case RADEON_HPD_4: 151 tmp = RREG32(DC_HPD4_INT_CONTROL); 152 if (connected) 153 tmp &= ~DC_HPDx_INT_POLARITY; 154 else 155 tmp |= DC_HPDx_INT_POLARITY; 156 WREG32(DC_HPD4_INT_CONTROL, tmp); 157 break; 158 case RADEON_HPD_5: 159 tmp = RREG32(DC_HPD5_INT_CONTROL); 160 if (connected) 161 tmp &= ~DC_HPDx_INT_POLARITY; 162 else 163 tmp |= DC_HPDx_INT_POLARITY; 164 WREG32(DC_HPD5_INT_CONTROL, tmp); 165 break; 166 case RADEON_HPD_6: 167 tmp = RREG32(DC_HPD6_INT_CONTROL); 168 if (connected) 169 tmp &= ~DC_HPDx_INT_POLARITY; 170 else 171 tmp |= DC_HPDx_INT_POLARITY; 172 WREG32(DC_HPD6_INT_CONTROL, tmp); 173 break; 174 default: 175 break; 176 } 177 } 178 179 void evergreen_hpd_init(struct radeon_device *rdev) 180 { 181 struct drm_device *dev = rdev->ddev; 182 struct drm_connector *connector; 183 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) | 184 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN; 185 186 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 187 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 188 switch (radeon_connector->hpd.hpd) { 189 case RADEON_HPD_1: 190 WREG32(DC_HPD1_CONTROL, tmp); 191 rdev->irq.hpd[0] = true; 192 break; 193 case RADEON_HPD_2: 194 WREG32(DC_HPD2_CONTROL, tmp); 195 rdev->irq.hpd[1] = true; 196 break; 197 case RADEON_HPD_3: 198 WREG32(DC_HPD3_CONTROL, tmp); 199 rdev->irq.hpd[2] = true; 200 break; 201 case RADEON_HPD_4: 202 WREG32(DC_HPD4_CONTROL, tmp); 203 rdev->irq.hpd[3] = true; 204 break; 205 case RADEON_HPD_5: 206 WREG32(DC_HPD5_CONTROL, tmp); 207 rdev->irq.hpd[4] = true; 208 break; 209 case RADEON_HPD_6: 210 WREG32(DC_HPD6_CONTROL, tmp); 211 rdev->irq.hpd[5] = true; 212 break; 213 default: 214 break; 215 } 216 } 217 if (rdev->irq.installed) 218 evergreen_irq_set(rdev); 219 } 220 221 void evergreen_hpd_fini(struct radeon_device *rdev) 222 { 223 struct drm_device *dev = rdev->ddev; 224 struct drm_connector *connector; 225 226 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 227 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 228 switch (radeon_connector->hpd.hpd) { 229 case RADEON_HPD_1: 230 WREG32(DC_HPD1_CONTROL, 0); 231 rdev->irq.hpd[0] = false; 232 break; 233 case RADEON_HPD_2: 234 WREG32(DC_HPD2_CONTROL, 0); 235 rdev->irq.hpd[1] = false; 236 break; 237 case RADEON_HPD_3: 238 WREG32(DC_HPD3_CONTROL, 0); 239 rdev->irq.hpd[2] = false; 240 break; 241 case RADEON_HPD_4: 242 WREG32(DC_HPD4_CONTROL, 0); 243 rdev->irq.hpd[3] = false; 244 break; 245 case RADEON_HPD_5: 246 WREG32(DC_HPD5_CONTROL, 0); 247 rdev->irq.hpd[4] = false; 248 break; 249 case RADEON_HPD_6: 250 WREG32(DC_HPD6_CONTROL, 0); 251 rdev->irq.hpd[5] = false; 252 break; 253 default: 254 break; 255 } 256 } 257 } 258 259 void evergreen_bandwidth_update(struct radeon_device *rdev) 260 { 261 /* XXX */ 262 } 263 264 static int evergreen_mc_wait_for_idle(struct radeon_device *rdev) 265 { 266 unsigned i; 267 u32 tmp; 268 269 for (i = 0; i < rdev->usec_timeout; i++) { 270 /* read MC_STATUS */ 271 tmp = RREG32(SRBM_STATUS) & 0x1F00; 272 if (!tmp) 273 return 0; 274 udelay(1); 275 } 276 return -1; 277 } 278 279 /* 280 * GART 281 */ 282 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev) 283 { 284 unsigned i; 285 u32 tmp; 286 287 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1)); 288 for (i = 0; i < rdev->usec_timeout; i++) { 289 /* read MC_STATUS */ 290 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE); 291 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT; 292 if (tmp == 2) { 293 printk(KERN_WARNING "[drm] r600 flush TLB failed\n"); 294 return; 295 } 296 if (tmp) { 297 return; 298 } 299 udelay(1); 300 } 301 } 302 303 int evergreen_pcie_gart_enable(struct radeon_device *rdev) 304 { 305 u32 tmp; 306 int r; 307 308 if (rdev->gart.table.vram.robj == NULL) { 309 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); 310 return -EINVAL; 311 } 312 r = radeon_gart_table_vram_pin(rdev); 313 if (r) 314 return r; 315 radeon_gart_restore(rdev); 316 /* Setup L2 cache */ 317 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING | 318 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE | 319 EFFECTIVE_L2_QUEUE_SIZE(7)); 320 WREG32(VM_L2_CNTL2, 0); 321 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2)); 322 /* Setup TLB control */ 323 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING | 324 SYSTEM_ACCESS_MODE_NOT_IN_SYS | 325 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU | 326 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5); 327 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp); 328 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp); 329 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp); 330 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp); 331 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp); 332 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp); 333 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp); 334 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); 335 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); 336 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); 337 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) | 338 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT); 339 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR, 340 (u32)(rdev->dummy_page.addr >> 12)); 341 WREG32(VM_CONTEXT1_CNTL, 0); 342 343 evergreen_pcie_gart_tlb_flush(rdev); 344 rdev->gart.ready = true; 345 return 0; 346 } 347 348 void evergreen_pcie_gart_disable(struct radeon_device *rdev) 349 { 350 u32 tmp; 351 int r; 352 353 /* Disable all tables */ 354 WREG32(VM_CONTEXT0_CNTL, 0); 355 WREG32(VM_CONTEXT1_CNTL, 0); 356 357 /* Setup L2 cache */ 358 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING | 359 EFFECTIVE_L2_QUEUE_SIZE(7)); 360 WREG32(VM_L2_CNTL2, 0); 361 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2)); 362 /* Setup TLB control */ 363 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5); 364 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp); 365 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp); 366 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp); 367 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp); 368 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp); 369 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp); 370 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp); 371 if (rdev->gart.table.vram.robj) { 372 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false); 373 if (likely(r == 0)) { 374 radeon_bo_kunmap(rdev->gart.table.vram.robj); 375 radeon_bo_unpin(rdev->gart.table.vram.robj); 376 radeon_bo_unreserve(rdev->gart.table.vram.robj); 377 } 378 } 379 } 380 381 void evergreen_pcie_gart_fini(struct radeon_device *rdev) 382 { 383 evergreen_pcie_gart_disable(rdev); 384 radeon_gart_table_vram_free(rdev); 385 radeon_gart_fini(rdev); 386 } 387 388 389 void evergreen_agp_enable(struct radeon_device *rdev) 390 { 391 u32 tmp; 392 393 /* Setup L2 cache */ 394 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING | 395 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE | 396 EFFECTIVE_L2_QUEUE_SIZE(7)); 397 WREG32(VM_L2_CNTL2, 0); 398 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2)); 399 /* Setup TLB control */ 400 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING | 401 SYSTEM_ACCESS_MODE_NOT_IN_SYS | 402 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU | 403 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5); 404 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp); 405 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp); 406 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp); 407 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp); 408 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp); 409 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp); 410 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp); 411 WREG32(VM_CONTEXT0_CNTL, 0); 412 WREG32(VM_CONTEXT1_CNTL, 0); 413 } 414 415 static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save) 416 { 417 save->vga_control[0] = RREG32(D1VGA_CONTROL); 418 save->vga_control[1] = RREG32(D2VGA_CONTROL); 419 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL); 420 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL); 421 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL); 422 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL); 423 save->vga_render_control = RREG32(VGA_RENDER_CONTROL); 424 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL); 425 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET); 426 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET); 427 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET); 428 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET); 429 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET); 430 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET); 431 432 /* Stop all video */ 433 WREG32(VGA_RENDER_CONTROL, 0); 434 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1); 435 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1); 436 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1); 437 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1); 438 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1); 439 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1); 440 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); 441 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); 442 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); 443 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); 444 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); 445 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); 446 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); 447 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); 448 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); 449 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); 450 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); 451 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); 452 453 WREG32(D1VGA_CONTROL, 0); 454 WREG32(D2VGA_CONTROL, 0); 455 WREG32(EVERGREEN_D3VGA_CONTROL, 0); 456 WREG32(EVERGREEN_D4VGA_CONTROL, 0); 457 WREG32(EVERGREEN_D5VGA_CONTROL, 0); 458 WREG32(EVERGREEN_D6VGA_CONTROL, 0); 459 } 460 461 static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save) 462 { 463 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET, 464 upper_32_bits(rdev->mc.vram_start)); 465 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET, 466 upper_32_bits(rdev->mc.vram_start)); 467 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET, 468 (u32)rdev->mc.vram_start); 469 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET, 470 (u32)rdev->mc.vram_start); 471 472 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET, 473 upper_32_bits(rdev->mc.vram_start)); 474 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET, 475 upper_32_bits(rdev->mc.vram_start)); 476 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET, 477 (u32)rdev->mc.vram_start); 478 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET, 479 (u32)rdev->mc.vram_start); 480 481 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET, 482 upper_32_bits(rdev->mc.vram_start)); 483 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET, 484 upper_32_bits(rdev->mc.vram_start)); 485 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET, 486 (u32)rdev->mc.vram_start); 487 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET, 488 (u32)rdev->mc.vram_start); 489 490 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET, 491 upper_32_bits(rdev->mc.vram_start)); 492 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET, 493 upper_32_bits(rdev->mc.vram_start)); 494 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET, 495 (u32)rdev->mc.vram_start); 496 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET, 497 (u32)rdev->mc.vram_start); 498 499 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET, 500 upper_32_bits(rdev->mc.vram_start)); 501 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET, 502 upper_32_bits(rdev->mc.vram_start)); 503 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET, 504 (u32)rdev->mc.vram_start); 505 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET, 506 (u32)rdev->mc.vram_start); 507 508 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET, 509 upper_32_bits(rdev->mc.vram_start)); 510 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET, 511 upper_32_bits(rdev->mc.vram_start)); 512 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET, 513 (u32)rdev->mc.vram_start); 514 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET, 515 (u32)rdev->mc.vram_start); 516 517 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start)); 518 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start); 519 /* Unlock host access */ 520 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control); 521 mdelay(1); 522 /* Restore video state */ 523 WREG32(D1VGA_CONTROL, save->vga_control[0]); 524 WREG32(D2VGA_CONTROL, save->vga_control[1]); 525 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]); 526 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]); 527 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]); 528 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]); 529 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1); 530 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1); 531 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1); 532 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1); 533 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1); 534 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1); 535 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]); 536 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]); 537 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]); 538 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]); 539 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]); 540 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]); 541 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); 542 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); 543 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); 544 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); 545 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); 546 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); 547 WREG32(VGA_RENDER_CONTROL, save->vga_render_control); 548 } 549 550 static void evergreen_mc_program(struct radeon_device *rdev) 551 { 552 struct evergreen_mc_save save; 553 u32 tmp; 554 int i, j; 555 556 /* Initialize HDP */ 557 for (i = 0, j = 0; i < 32; i++, j += 0x18) { 558 WREG32((0x2c14 + j), 0x00000000); 559 WREG32((0x2c18 + j), 0x00000000); 560 WREG32((0x2c1c + j), 0x00000000); 561 WREG32((0x2c20 + j), 0x00000000); 562 WREG32((0x2c24 + j), 0x00000000); 563 } 564 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0); 565 566 evergreen_mc_stop(rdev, &save); 567 if (evergreen_mc_wait_for_idle(rdev)) { 568 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); 569 } 570 /* Lockout access through VGA aperture*/ 571 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE); 572 /* Update configuration */ 573 if (rdev->flags & RADEON_IS_AGP) { 574 if (rdev->mc.vram_start < rdev->mc.gtt_start) { 575 /* VRAM before AGP */ 576 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, 577 rdev->mc.vram_start >> 12); 578 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, 579 rdev->mc.gtt_end >> 12); 580 } else { 581 /* VRAM after AGP */ 582 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, 583 rdev->mc.gtt_start >> 12); 584 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, 585 rdev->mc.vram_end >> 12); 586 } 587 } else { 588 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, 589 rdev->mc.vram_start >> 12); 590 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, 591 rdev->mc.vram_end >> 12); 592 } 593 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0); 594 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; 595 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); 596 WREG32(MC_VM_FB_LOCATION, tmp); 597 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); 598 WREG32(HDP_NONSURFACE_INFO, (2 << 7)); 599 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF); 600 if (rdev->flags & RADEON_IS_AGP) { 601 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16); 602 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16); 603 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22); 604 } else { 605 WREG32(MC_VM_AGP_BASE, 0); 606 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF); 607 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF); 608 } 609 if (evergreen_mc_wait_for_idle(rdev)) { 610 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); 611 } 612 evergreen_mc_resume(rdev, &save); 613 /* we need to own VRAM, so turn off the VGA renderer here 614 * to stop it overwriting our objects */ 615 rv515_vga_render_disable(rdev); 616 } 617 618 /* 619 * CP. 620 */ 621 622 static int evergreen_cp_load_microcode(struct radeon_device *rdev) 623 { 624 const __be32 *fw_data; 625 int i; 626 627 if (!rdev->me_fw || !rdev->pfp_fw) 628 return -EINVAL; 629 630 r700_cp_stop(rdev); 631 WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0)); 632 633 fw_data = (const __be32 *)rdev->pfp_fw->data; 634 WREG32(CP_PFP_UCODE_ADDR, 0); 635 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++) 636 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++)); 637 WREG32(CP_PFP_UCODE_ADDR, 0); 638 639 fw_data = (const __be32 *)rdev->me_fw->data; 640 WREG32(CP_ME_RAM_WADDR, 0); 641 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++) 642 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++)); 643 644 WREG32(CP_PFP_UCODE_ADDR, 0); 645 WREG32(CP_ME_RAM_WADDR, 0); 646 WREG32(CP_ME_RAM_RADDR, 0); 647 return 0; 648 } 649 650 int evergreen_cp_resume(struct radeon_device *rdev) 651 { 652 u32 tmp; 653 u32 rb_bufsz; 654 int r; 655 656 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */ 657 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP | 658 SOFT_RESET_PA | 659 SOFT_RESET_SH | 660 SOFT_RESET_VGT | 661 SOFT_RESET_SX)); 662 RREG32(GRBM_SOFT_RESET); 663 mdelay(15); 664 WREG32(GRBM_SOFT_RESET, 0); 665 RREG32(GRBM_SOFT_RESET); 666 667 /* Set ring buffer size */ 668 rb_bufsz = drm_order(rdev->cp.ring_size / 8); 669 tmp = RB_NO_UPDATE | (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; 670 #ifdef __BIG_ENDIAN 671 tmp |= BUF_SWAP_32BIT; 672 #endif 673 WREG32(CP_RB_CNTL, tmp); 674 WREG32(CP_SEM_WAIT_TIMER, 0x4); 675 676 /* Set the write pointer delay */ 677 WREG32(CP_RB_WPTR_DELAY, 0); 678 679 /* Initialize the ring buffer's read and write pointers */ 680 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA); 681 WREG32(CP_RB_RPTR_WR, 0); 682 WREG32(CP_RB_WPTR, 0); 683 WREG32(CP_RB_RPTR_ADDR, rdev->cp.gpu_addr & 0xFFFFFFFF); 684 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->cp.gpu_addr)); 685 mdelay(1); 686 WREG32(CP_RB_CNTL, tmp); 687 688 WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8); 689 WREG32(CP_DEBUG, (1 << 27) | (1 << 28)); 690 691 rdev->cp.rptr = RREG32(CP_RB_RPTR); 692 rdev->cp.wptr = RREG32(CP_RB_WPTR); 693 694 r600_cp_start(rdev); 695 rdev->cp.ready = true; 696 r = radeon_ring_test(rdev); 697 if (r) { 698 rdev->cp.ready = false; 699 return r; 700 } 701 return 0; 702 } 703 704 /* 705 * Core functions 706 */ 707 static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev, 708 u32 num_tile_pipes, 709 u32 num_backends, 710 u32 backend_disable_mask) 711 { 712 u32 backend_map = 0; 713 u32 enabled_backends_mask = 0; 714 u32 enabled_backends_count = 0; 715 u32 cur_pipe; 716 u32 swizzle_pipe[EVERGREEN_MAX_PIPES]; 717 u32 cur_backend = 0; 718 u32 i; 719 bool force_no_swizzle; 720 721 if (num_tile_pipes > EVERGREEN_MAX_PIPES) 722 num_tile_pipes = EVERGREEN_MAX_PIPES; 723 if (num_tile_pipes < 1) 724 num_tile_pipes = 1; 725 if (num_backends > EVERGREEN_MAX_BACKENDS) 726 num_backends = EVERGREEN_MAX_BACKENDS; 727 if (num_backends < 1) 728 num_backends = 1; 729 730 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) { 731 if (((backend_disable_mask >> i) & 1) == 0) { 732 enabled_backends_mask |= (1 << i); 733 ++enabled_backends_count; 734 } 735 if (enabled_backends_count == num_backends) 736 break; 737 } 738 739 if (enabled_backends_count == 0) { 740 enabled_backends_mask = 1; 741 enabled_backends_count = 1; 742 } 743 744 if (enabled_backends_count != num_backends) 745 num_backends = enabled_backends_count; 746 747 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES); 748 switch (rdev->family) { 749 case CHIP_CEDAR: 750 case CHIP_REDWOOD: 751 force_no_swizzle = false; 752 break; 753 case CHIP_CYPRESS: 754 case CHIP_HEMLOCK: 755 case CHIP_JUNIPER: 756 default: 757 force_no_swizzle = true; 758 break; 759 } 760 if (force_no_swizzle) { 761 bool last_backend_enabled = false; 762 763 force_no_swizzle = false; 764 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) { 765 if (((enabled_backends_mask >> i) & 1) == 1) { 766 if (last_backend_enabled) 767 force_no_swizzle = true; 768 last_backend_enabled = true; 769 } else 770 last_backend_enabled = false; 771 } 772 } 773 774 switch (num_tile_pipes) { 775 case 1: 776 case 3: 777 case 5: 778 case 7: 779 DRM_ERROR("odd number of pipes!\n"); 780 break; 781 case 2: 782 swizzle_pipe[0] = 0; 783 swizzle_pipe[1] = 1; 784 break; 785 case 4: 786 if (force_no_swizzle) { 787 swizzle_pipe[0] = 0; 788 swizzle_pipe[1] = 1; 789 swizzle_pipe[2] = 2; 790 swizzle_pipe[3] = 3; 791 } else { 792 swizzle_pipe[0] = 0; 793 swizzle_pipe[1] = 2; 794 swizzle_pipe[2] = 1; 795 swizzle_pipe[3] = 3; 796 } 797 break; 798 case 6: 799 if (force_no_swizzle) { 800 swizzle_pipe[0] = 0; 801 swizzle_pipe[1] = 1; 802 swizzle_pipe[2] = 2; 803 swizzle_pipe[3] = 3; 804 swizzle_pipe[4] = 4; 805 swizzle_pipe[5] = 5; 806 } else { 807 swizzle_pipe[0] = 0; 808 swizzle_pipe[1] = 2; 809 swizzle_pipe[2] = 4; 810 swizzle_pipe[3] = 1; 811 swizzle_pipe[4] = 3; 812 swizzle_pipe[5] = 5; 813 } 814 break; 815 case 8: 816 if (force_no_swizzle) { 817 swizzle_pipe[0] = 0; 818 swizzle_pipe[1] = 1; 819 swizzle_pipe[2] = 2; 820 swizzle_pipe[3] = 3; 821 swizzle_pipe[4] = 4; 822 swizzle_pipe[5] = 5; 823 swizzle_pipe[6] = 6; 824 swizzle_pipe[7] = 7; 825 } else { 826 swizzle_pipe[0] = 0; 827 swizzle_pipe[1] = 2; 828 swizzle_pipe[2] = 4; 829 swizzle_pipe[3] = 6; 830 swizzle_pipe[4] = 1; 831 swizzle_pipe[5] = 3; 832 swizzle_pipe[6] = 5; 833 swizzle_pipe[7] = 7; 834 } 835 break; 836 } 837 838 for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) { 839 while (((1 << cur_backend) & enabled_backends_mask) == 0) 840 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS; 841 842 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4))); 843 844 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS; 845 } 846 847 return backend_map; 848 } 849 850 static void evergreen_gpu_init(struct radeon_device *rdev) 851 { 852 u32 cc_rb_backend_disable = 0; 853 u32 cc_gc_shader_pipe_config; 854 u32 gb_addr_config = 0; 855 u32 mc_shared_chmap, mc_arb_ramcfg; 856 u32 gb_backend_map; 857 u32 grbm_gfx_index; 858 u32 sx_debug_1; 859 u32 smx_dc_ctl0; 860 u32 sq_config; 861 u32 sq_lds_resource_mgmt; 862 u32 sq_gpr_resource_mgmt_1; 863 u32 sq_gpr_resource_mgmt_2; 864 u32 sq_gpr_resource_mgmt_3; 865 u32 sq_thread_resource_mgmt; 866 u32 sq_thread_resource_mgmt_2; 867 u32 sq_stack_resource_mgmt_1; 868 u32 sq_stack_resource_mgmt_2; 869 u32 sq_stack_resource_mgmt_3; 870 u32 vgt_cache_invalidation; 871 u32 hdp_host_path_cntl; 872 int i, j, num_shader_engines, ps_thread_count; 873 874 switch (rdev->family) { 875 case CHIP_CYPRESS: 876 case CHIP_HEMLOCK: 877 rdev->config.evergreen.num_ses = 2; 878 rdev->config.evergreen.max_pipes = 4; 879 rdev->config.evergreen.max_tile_pipes = 8; 880 rdev->config.evergreen.max_simds = 10; 881 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses; 882 rdev->config.evergreen.max_gprs = 256; 883 rdev->config.evergreen.max_threads = 248; 884 rdev->config.evergreen.max_gs_threads = 32; 885 rdev->config.evergreen.max_stack_entries = 512; 886 rdev->config.evergreen.sx_num_of_sets = 4; 887 rdev->config.evergreen.sx_max_export_size = 256; 888 rdev->config.evergreen.sx_max_export_pos_size = 64; 889 rdev->config.evergreen.sx_max_export_smx_size = 192; 890 rdev->config.evergreen.max_hw_contexts = 8; 891 rdev->config.evergreen.sq_num_cf_insts = 2; 892 893 rdev->config.evergreen.sc_prim_fifo_size = 0x100; 894 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; 895 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; 896 break; 897 case CHIP_JUNIPER: 898 rdev->config.evergreen.num_ses = 1; 899 rdev->config.evergreen.max_pipes = 4; 900 rdev->config.evergreen.max_tile_pipes = 4; 901 rdev->config.evergreen.max_simds = 10; 902 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses; 903 rdev->config.evergreen.max_gprs = 256; 904 rdev->config.evergreen.max_threads = 248; 905 rdev->config.evergreen.max_gs_threads = 32; 906 rdev->config.evergreen.max_stack_entries = 512; 907 rdev->config.evergreen.sx_num_of_sets = 4; 908 rdev->config.evergreen.sx_max_export_size = 256; 909 rdev->config.evergreen.sx_max_export_pos_size = 64; 910 rdev->config.evergreen.sx_max_export_smx_size = 192; 911 rdev->config.evergreen.max_hw_contexts = 8; 912 rdev->config.evergreen.sq_num_cf_insts = 2; 913 914 rdev->config.evergreen.sc_prim_fifo_size = 0x100; 915 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; 916 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; 917 break; 918 case CHIP_REDWOOD: 919 rdev->config.evergreen.num_ses = 1; 920 rdev->config.evergreen.max_pipes = 4; 921 rdev->config.evergreen.max_tile_pipes = 4; 922 rdev->config.evergreen.max_simds = 5; 923 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses; 924 rdev->config.evergreen.max_gprs = 256; 925 rdev->config.evergreen.max_threads = 248; 926 rdev->config.evergreen.max_gs_threads = 32; 927 rdev->config.evergreen.max_stack_entries = 256; 928 rdev->config.evergreen.sx_num_of_sets = 4; 929 rdev->config.evergreen.sx_max_export_size = 256; 930 rdev->config.evergreen.sx_max_export_pos_size = 64; 931 rdev->config.evergreen.sx_max_export_smx_size = 192; 932 rdev->config.evergreen.max_hw_contexts = 8; 933 rdev->config.evergreen.sq_num_cf_insts = 2; 934 935 rdev->config.evergreen.sc_prim_fifo_size = 0x100; 936 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; 937 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; 938 break; 939 case CHIP_CEDAR: 940 default: 941 rdev->config.evergreen.num_ses = 1; 942 rdev->config.evergreen.max_pipes = 2; 943 rdev->config.evergreen.max_tile_pipes = 2; 944 rdev->config.evergreen.max_simds = 2; 945 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses; 946 rdev->config.evergreen.max_gprs = 256; 947 rdev->config.evergreen.max_threads = 192; 948 rdev->config.evergreen.max_gs_threads = 16; 949 rdev->config.evergreen.max_stack_entries = 256; 950 rdev->config.evergreen.sx_num_of_sets = 4; 951 rdev->config.evergreen.sx_max_export_size = 128; 952 rdev->config.evergreen.sx_max_export_pos_size = 32; 953 rdev->config.evergreen.sx_max_export_smx_size = 96; 954 rdev->config.evergreen.max_hw_contexts = 4; 955 rdev->config.evergreen.sq_num_cf_insts = 1; 956 957 rdev->config.evergreen.sc_prim_fifo_size = 0x40; 958 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; 959 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; 960 break; 961 } 962 963 /* Initialize HDP */ 964 for (i = 0, j = 0; i < 32; i++, j += 0x18) { 965 WREG32((0x2c14 + j), 0x00000000); 966 WREG32((0x2c18 + j), 0x00000000); 967 WREG32((0x2c1c + j), 0x00000000); 968 WREG32((0x2c20 + j), 0x00000000); 969 WREG32((0x2c24 + j), 0x00000000); 970 } 971 972 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff)); 973 974 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2; 975 976 cc_gc_shader_pipe_config |= 977 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes) 978 & EVERGREEN_MAX_PIPES_MASK); 979 cc_gc_shader_pipe_config |= 980 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds) 981 & EVERGREEN_MAX_SIMDS_MASK); 982 983 cc_rb_backend_disable = 984 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends) 985 & EVERGREEN_MAX_BACKENDS_MASK); 986 987 988 mc_shared_chmap = RREG32(MC_SHARED_CHMAP); 989 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG); 990 991 switch (rdev->config.evergreen.max_tile_pipes) { 992 case 1: 993 default: 994 gb_addr_config |= NUM_PIPES(0); 995 break; 996 case 2: 997 gb_addr_config |= NUM_PIPES(1); 998 break; 999 case 4: 1000 gb_addr_config |= NUM_PIPES(2); 1001 break; 1002 case 8: 1003 gb_addr_config |= NUM_PIPES(3); 1004 break; 1005 } 1006 1007 gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT); 1008 gb_addr_config |= BANK_INTERLEAVE_SIZE(0); 1009 gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1); 1010 gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1); 1011 gb_addr_config |= NUM_GPUS(0); /* Hemlock? */ 1012 gb_addr_config |= MULTI_GPU_TILE_SIZE(2); 1013 1014 if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2) 1015 gb_addr_config |= ROW_SIZE(2); 1016 else 1017 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT); 1018 1019 if (rdev->ddev->pdev->device == 0x689e) { 1020 u32 efuse_straps_4; 1021 u32 efuse_straps_3; 1022 u8 efuse_box_bit_131_124; 1023 1024 WREG32(RCU_IND_INDEX, 0x204); 1025 efuse_straps_4 = RREG32(RCU_IND_DATA); 1026 WREG32(RCU_IND_INDEX, 0x203); 1027 efuse_straps_3 = RREG32(RCU_IND_DATA); 1028 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28)); 1029 1030 switch(efuse_box_bit_131_124) { 1031 case 0x00: 1032 gb_backend_map = 0x76543210; 1033 break; 1034 case 0x55: 1035 gb_backend_map = 0x77553311; 1036 break; 1037 case 0x56: 1038 gb_backend_map = 0x77553300; 1039 break; 1040 case 0x59: 1041 gb_backend_map = 0x77552211; 1042 break; 1043 case 0x66: 1044 gb_backend_map = 0x77443300; 1045 break; 1046 case 0x99: 1047 gb_backend_map = 0x66552211; 1048 break; 1049 case 0x5a: 1050 gb_backend_map = 0x77552200; 1051 break; 1052 case 0xaa: 1053 gb_backend_map = 0x66442200; 1054 break; 1055 case 0x95: 1056 gb_backend_map = 0x66553311; 1057 break; 1058 default: 1059 DRM_ERROR("bad backend map, using default\n"); 1060 gb_backend_map = 1061 evergreen_get_tile_pipe_to_backend_map(rdev, 1062 rdev->config.evergreen.max_tile_pipes, 1063 rdev->config.evergreen.max_backends, 1064 ((EVERGREEN_MAX_BACKENDS_MASK << 1065 rdev->config.evergreen.max_backends) & 1066 EVERGREEN_MAX_BACKENDS_MASK)); 1067 break; 1068 } 1069 } else if (rdev->ddev->pdev->device == 0x68b9) { 1070 u32 efuse_straps_3; 1071 u8 efuse_box_bit_127_124; 1072 1073 WREG32(RCU_IND_INDEX, 0x203); 1074 efuse_straps_3 = RREG32(RCU_IND_DATA); 1075 efuse_box_bit_127_124 = (u8)(efuse_straps_3 & 0xF0000000) >> 28; 1076 1077 switch(efuse_box_bit_127_124) { 1078 case 0x0: 1079 gb_backend_map = 0x00003210; 1080 break; 1081 case 0x5: 1082 case 0x6: 1083 case 0x9: 1084 case 0xa: 1085 gb_backend_map = 0x00003311; 1086 break; 1087 default: 1088 DRM_ERROR("bad backend map, using default\n"); 1089 gb_backend_map = 1090 evergreen_get_tile_pipe_to_backend_map(rdev, 1091 rdev->config.evergreen.max_tile_pipes, 1092 rdev->config.evergreen.max_backends, 1093 ((EVERGREEN_MAX_BACKENDS_MASK << 1094 rdev->config.evergreen.max_backends) & 1095 EVERGREEN_MAX_BACKENDS_MASK)); 1096 break; 1097 } 1098 } else 1099 gb_backend_map = 1100 evergreen_get_tile_pipe_to_backend_map(rdev, 1101 rdev->config.evergreen.max_tile_pipes, 1102 rdev->config.evergreen.max_backends, 1103 ((EVERGREEN_MAX_BACKENDS_MASK << 1104 rdev->config.evergreen.max_backends) & 1105 EVERGREEN_MAX_BACKENDS_MASK)); 1106 1107 WREG32(GB_BACKEND_MAP, gb_backend_map); 1108 WREG32(GB_ADDR_CONFIG, gb_addr_config); 1109 WREG32(DMIF_ADDR_CONFIG, gb_addr_config); 1110 WREG32(HDP_ADDR_CONFIG, gb_addr_config); 1111 1112 num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1; 1113 grbm_gfx_index = INSTANCE_BROADCAST_WRITES; 1114 1115 for (i = 0; i < rdev->config.evergreen.num_ses; i++) { 1116 u32 rb = cc_rb_backend_disable | (0xf0 << 16); 1117 u32 sp = cc_gc_shader_pipe_config; 1118 u32 gfx = grbm_gfx_index | SE_INDEX(i); 1119 1120 if (i == num_shader_engines) { 1121 rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK); 1122 sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK); 1123 } 1124 1125 WREG32(GRBM_GFX_INDEX, gfx); 1126 WREG32(RLC_GFX_INDEX, gfx); 1127 1128 WREG32(CC_RB_BACKEND_DISABLE, rb); 1129 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb); 1130 WREG32(GC_USER_RB_BACKEND_DISABLE, rb); 1131 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp); 1132 } 1133 1134 grbm_gfx_index |= SE_BROADCAST_WRITES; 1135 WREG32(GRBM_GFX_INDEX, grbm_gfx_index); 1136 WREG32(RLC_GFX_INDEX, grbm_gfx_index); 1137 1138 WREG32(CGTS_SYS_TCC_DISABLE, 0); 1139 WREG32(CGTS_TCC_DISABLE, 0); 1140 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0); 1141 WREG32(CGTS_USER_TCC_DISABLE, 0); 1142 1143 /* set HW defaults for 3D engine */ 1144 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) | 1145 ROQ_IB2_START(0x2b))); 1146 1147 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30)); 1148 1149 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO | 1150 SYNC_GRADIENT | 1151 SYNC_WALKER | 1152 SYNC_ALIGNER)); 1153 1154 sx_debug_1 = RREG32(SX_DEBUG_1); 1155 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS; 1156 WREG32(SX_DEBUG_1, sx_debug_1); 1157 1158 1159 smx_dc_ctl0 = RREG32(SMX_DC_CTL0); 1160 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff); 1161 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets); 1162 WREG32(SMX_DC_CTL0, smx_dc_ctl0); 1163 1164 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) | 1165 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) | 1166 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1))); 1167 1168 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) | 1169 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) | 1170 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size))); 1171 1172 WREG32(VGT_NUM_INSTANCES, 1); 1173 WREG32(SPI_CONFIG_CNTL, 0); 1174 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4)); 1175 WREG32(CP_PERFMON_CNTL, 0); 1176 1177 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) | 1178 FETCH_FIFO_HIWATER(0x4) | 1179 DONE_FIFO_HIWATER(0xe0) | 1180 ALU_UPDATE_FIFO_HIWATER(0x8))); 1181 1182 sq_config = RREG32(SQ_CONFIG); 1183 sq_config &= ~(PS_PRIO(3) | 1184 VS_PRIO(3) | 1185 GS_PRIO(3) | 1186 ES_PRIO(3)); 1187 sq_config |= (VC_ENABLE | 1188 EXPORT_SRC_C | 1189 PS_PRIO(0) | 1190 VS_PRIO(1) | 1191 GS_PRIO(2) | 1192 ES_PRIO(3)); 1193 1194 if (rdev->family == CHIP_CEDAR) 1195 /* no vertex cache */ 1196 sq_config &= ~VC_ENABLE; 1197 1198 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT); 1199 1200 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32); 1201 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32); 1202 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4); 1203 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32); 1204 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32); 1205 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32); 1206 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32); 1207 1208 if (rdev->family == CHIP_CEDAR) 1209 ps_thread_count = 96; 1210 else 1211 ps_thread_count = 128; 1212 1213 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count); 1214 sq_thread_resource_mgmt |= NUM_VS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8; 1215 sq_thread_resource_mgmt |= NUM_GS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8; 1216 sq_thread_resource_mgmt |= NUM_ES_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8; 1217 sq_thread_resource_mgmt_2 = NUM_HS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8; 1218 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8; 1219 1220 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); 1221 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); 1222 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); 1223 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); 1224 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); 1225 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); 1226 1227 WREG32(SQ_CONFIG, sq_config); 1228 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1); 1229 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2); 1230 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3); 1231 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt); 1232 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2); 1233 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1); 1234 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2); 1235 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3); 1236 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0); 1237 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt); 1238 1239 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) | 1240 FORCE_EOV_MAX_REZ_CNT(255))); 1241 1242 if (rdev->family == CHIP_CEDAR) 1243 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY); 1244 else 1245 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC); 1246 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO); 1247 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation); 1248 1249 WREG32(VGT_GS_VERTEX_REUSE, 16); 1250 WREG32(PA_SC_LINE_STIPPLE_STATE, 0); 1251 1252 WREG32(CB_PERF_CTR0_SEL_0, 0); 1253 WREG32(CB_PERF_CTR0_SEL_1, 0); 1254 WREG32(CB_PERF_CTR1_SEL_0, 0); 1255 WREG32(CB_PERF_CTR1_SEL_1, 0); 1256 WREG32(CB_PERF_CTR2_SEL_0, 0); 1257 WREG32(CB_PERF_CTR2_SEL_1, 0); 1258 WREG32(CB_PERF_CTR3_SEL_0, 0); 1259 WREG32(CB_PERF_CTR3_SEL_1, 0); 1260 1261 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL); 1262 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl); 1263 1264 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3)); 1265 1266 udelay(50); 1267 1268 } 1269 1270 int evergreen_mc_init(struct radeon_device *rdev) 1271 { 1272 u32 tmp; 1273 int chansize, numchan; 1274 1275 /* Get VRAM informations */ 1276 rdev->mc.vram_is_ddr = true; 1277 tmp = RREG32(MC_ARB_RAMCFG); 1278 if (tmp & CHANSIZE_OVERRIDE) { 1279 chansize = 16; 1280 } else if (tmp & CHANSIZE_MASK) { 1281 chansize = 64; 1282 } else { 1283 chansize = 32; 1284 } 1285 tmp = RREG32(MC_SHARED_CHMAP); 1286 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) { 1287 case 0: 1288 default: 1289 numchan = 1; 1290 break; 1291 case 1: 1292 numchan = 2; 1293 break; 1294 case 2: 1295 numchan = 4; 1296 break; 1297 case 3: 1298 numchan = 8; 1299 break; 1300 } 1301 rdev->mc.vram_width = numchan * chansize; 1302 /* Could aper size report 0 ? */ 1303 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0); 1304 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0); 1305 /* Setup GPU memory space */ 1306 /* size in MB on evergreen */ 1307 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024; 1308 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024; 1309 rdev->mc.visible_vram_size = rdev->mc.aper_size; 1310 r600_vram_gtt_location(rdev, &rdev->mc); 1311 radeon_update_bandwidth_info(rdev); 1312 1313 return 0; 1314 } 1315 1316 bool evergreen_gpu_is_lockup(struct radeon_device *rdev) 1317 { 1318 /* FIXME: implement for evergreen */ 1319 return false; 1320 } 1321 1322 static int evergreen_gpu_soft_reset(struct radeon_device *rdev) 1323 { 1324 struct evergreen_mc_save save; 1325 u32 srbm_reset = 0; 1326 u32 grbm_reset = 0; 1327 1328 dev_info(rdev->dev, "GPU softreset \n"); 1329 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n", 1330 RREG32(GRBM_STATUS)); 1331 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n", 1332 RREG32(GRBM_STATUS_SE0)); 1333 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n", 1334 RREG32(GRBM_STATUS_SE1)); 1335 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n", 1336 RREG32(SRBM_STATUS)); 1337 evergreen_mc_stop(rdev, &save); 1338 if (evergreen_mc_wait_for_idle(rdev)) { 1339 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); 1340 } 1341 /* Disable CP parsing/prefetching */ 1342 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT); 1343 1344 /* reset all the gfx blocks */ 1345 grbm_reset = (SOFT_RESET_CP | 1346 SOFT_RESET_CB | 1347 SOFT_RESET_DB | 1348 SOFT_RESET_PA | 1349 SOFT_RESET_SC | 1350 SOFT_RESET_SPI | 1351 SOFT_RESET_SH | 1352 SOFT_RESET_SX | 1353 SOFT_RESET_TC | 1354 SOFT_RESET_TA | 1355 SOFT_RESET_VC | 1356 SOFT_RESET_VGT); 1357 1358 dev_info(rdev->dev, " GRBM_SOFT_RESET=0x%08X\n", grbm_reset); 1359 WREG32(GRBM_SOFT_RESET, grbm_reset); 1360 (void)RREG32(GRBM_SOFT_RESET); 1361 udelay(50); 1362 WREG32(GRBM_SOFT_RESET, 0); 1363 (void)RREG32(GRBM_SOFT_RESET); 1364 1365 /* reset all the system blocks */ 1366 srbm_reset = SRBM_SOFT_RESET_ALL_MASK; 1367 1368 dev_info(rdev->dev, " SRBM_SOFT_RESET=0x%08X\n", srbm_reset); 1369 WREG32(SRBM_SOFT_RESET, srbm_reset); 1370 (void)RREG32(SRBM_SOFT_RESET); 1371 udelay(50); 1372 WREG32(SRBM_SOFT_RESET, 0); 1373 (void)RREG32(SRBM_SOFT_RESET); 1374 /* Wait a little for things to settle down */ 1375 udelay(50); 1376 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n", 1377 RREG32(GRBM_STATUS)); 1378 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n", 1379 RREG32(GRBM_STATUS_SE0)); 1380 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n", 1381 RREG32(GRBM_STATUS_SE1)); 1382 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n", 1383 RREG32(SRBM_STATUS)); 1384 /* After reset we need to reinit the asic as GPU often endup in an 1385 * incoherent state. 1386 */ 1387 atom_asic_init(rdev->mode_info.atom_context); 1388 evergreen_mc_resume(rdev, &save); 1389 return 0; 1390 } 1391 1392 int evergreen_asic_reset(struct radeon_device *rdev) 1393 { 1394 return evergreen_gpu_soft_reset(rdev); 1395 } 1396 1397 /* Interrupts */ 1398 1399 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc) 1400 { 1401 switch (crtc) { 1402 case 0: 1403 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET); 1404 case 1: 1405 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET); 1406 case 2: 1407 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET); 1408 case 3: 1409 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET); 1410 case 4: 1411 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET); 1412 case 5: 1413 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET); 1414 default: 1415 return 0; 1416 } 1417 } 1418 1419 void evergreen_disable_interrupt_state(struct radeon_device *rdev) 1420 { 1421 u32 tmp; 1422 1423 WREG32(CP_INT_CNTL, 0); 1424 WREG32(GRBM_INT_CNTL, 0); 1425 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); 1426 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); 1427 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); 1428 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); 1429 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); 1430 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); 1431 1432 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); 1433 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); 1434 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); 1435 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); 1436 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); 1437 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); 1438 1439 WREG32(DACA_AUTODETECT_INT_CONTROL, 0); 1440 WREG32(DACB_AUTODETECT_INT_CONTROL, 0); 1441 1442 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY; 1443 WREG32(DC_HPD1_INT_CONTROL, tmp); 1444 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY; 1445 WREG32(DC_HPD2_INT_CONTROL, tmp); 1446 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY; 1447 WREG32(DC_HPD3_INT_CONTROL, tmp); 1448 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY; 1449 WREG32(DC_HPD4_INT_CONTROL, tmp); 1450 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY; 1451 WREG32(DC_HPD5_INT_CONTROL, tmp); 1452 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY; 1453 WREG32(DC_HPD6_INT_CONTROL, tmp); 1454 1455 } 1456 1457 int evergreen_irq_set(struct radeon_device *rdev) 1458 { 1459 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE; 1460 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0; 1461 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6; 1462 u32 grbm_int_cntl = 0; 1463 1464 if (!rdev->irq.installed) { 1465 WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n"); 1466 return -EINVAL; 1467 } 1468 /* don't enable anything if the ih is disabled */ 1469 if (!rdev->ih.enabled) { 1470 r600_disable_interrupts(rdev); 1471 /* force the active interrupt state to all disabled */ 1472 evergreen_disable_interrupt_state(rdev); 1473 return 0; 1474 } 1475 1476 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN; 1477 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN; 1478 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN; 1479 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN; 1480 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN; 1481 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN; 1482 1483 if (rdev->irq.sw_int) { 1484 DRM_DEBUG("evergreen_irq_set: sw int\n"); 1485 cp_int_cntl |= RB_INT_ENABLE; 1486 } 1487 if (rdev->irq.crtc_vblank_int[0]) { 1488 DRM_DEBUG("evergreen_irq_set: vblank 0\n"); 1489 crtc1 |= VBLANK_INT_MASK; 1490 } 1491 if (rdev->irq.crtc_vblank_int[1]) { 1492 DRM_DEBUG("evergreen_irq_set: vblank 1\n"); 1493 crtc2 |= VBLANK_INT_MASK; 1494 } 1495 if (rdev->irq.crtc_vblank_int[2]) { 1496 DRM_DEBUG("evergreen_irq_set: vblank 2\n"); 1497 crtc3 |= VBLANK_INT_MASK; 1498 } 1499 if (rdev->irq.crtc_vblank_int[3]) { 1500 DRM_DEBUG("evergreen_irq_set: vblank 3\n"); 1501 crtc4 |= VBLANK_INT_MASK; 1502 } 1503 if (rdev->irq.crtc_vblank_int[4]) { 1504 DRM_DEBUG("evergreen_irq_set: vblank 4\n"); 1505 crtc5 |= VBLANK_INT_MASK; 1506 } 1507 if (rdev->irq.crtc_vblank_int[5]) { 1508 DRM_DEBUG("evergreen_irq_set: vblank 5\n"); 1509 crtc6 |= VBLANK_INT_MASK; 1510 } 1511 if (rdev->irq.hpd[0]) { 1512 DRM_DEBUG("evergreen_irq_set: hpd 1\n"); 1513 hpd1 |= DC_HPDx_INT_EN; 1514 } 1515 if (rdev->irq.hpd[1]) { 1516 DRM_DEBUG("evergreen_irq_set: hpd 2\n"); 1517 hpd2 |= DC_HPDx_INT_EN; 1518 } 1519 if (rdev->irq.hpd[2]) { 1520 DRM_DEBUG("evergreen_irq_set: hpd 3\n"); 1521 hpd3 |= DC_HPDx_INT_EN; 1522 } 1523 if (rdev->irq.hpd[3]) { 1524 DRM_DEBUG("evergreen_irq_set: hpd 4\n"); 1525 hpd4 |= DC_HPDx_INT_EN; 1526 } 1527 if (rdev->irq.hpd[4]) { 1528 DRM_DEBUG("evergreen_irq_set: hpd 5\n"); 1529 hpd5 |= DC_HPDx_INT_EN; 1530 } 1531 if (rdev->irq.hpd[5]) { 1532 DRM_DEBUG("evergreen_irq_set: hpd 6\n"); 1533 hpd6 |= DC_HPDx_INT_EN; 1534 } 1535 if (rdev->irq.gui_idle) { 1536 DRM_DEBUG("gui idle\n"); 1537 grbm_int_cntl |= GUI_IDLE_INT_ENABLE; 1538 } 1539 1540 WREG32(CP_INT_CNTL, cp_int_cntl); 1541 WREG32(GRBM_INT_CNTL, grbm_int_cntl); 1542 1543 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1); 1544 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2); 1545 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3); 1546 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4); 1547 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5); 1548 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6); 1549 1550 WREG32(DC_HPD1_INT_CONTROL, hpd1); 1551 WREG32(DC_HPD2_INT_CONTROL, hpd2); 1552 WREG32(DC_HPD3_INT_CONTROL, hpd3); 1553 WREG32(DC_HPD4_INT_CONTROL, hpd4); 1554 WREG32(DC_HPD5_INT_CONTROL, hpd5); 1555 WREG32(DC_HPD6_INT_CONTROL, hpd6); 1556 1557 return 0; 1558 } 1559 1560 static inline void evergreen_irq_ack(struct radeon_device *rdev, 1561 u32 *disp_int, 1562 u32 *disp_int_cont, 1563 u32 *disp_int_cont2, 1564 u32 *disp_int_cont3, 1565 u32 *disp_int_cont4, 1566 u32 *disp_int_cont5) 1567 { 1568 u32 tmp; 1569 1570 *disp_int = RREG32(DISP_INTERRUPT_STATUS); 1571 *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); 1572 *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2); 1573 *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3); 1574 *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4); 1575 *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5); 1576 1577 if (*disp_int & LB_D1_VBLANK_INTERRUPT) 1578 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK); 1579 if (*disp_int & LB_D1_VLINE_INTERRUPT) 1580 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK); 1581 1582 if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT) 1583 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK); 1584 if (*disp_int_cont & LB_D2_VLINE_INTERRUPT) 1585 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK); 1586 1587 if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) 1588 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK); 1589 if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT) 1590 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK); 1591 1592 if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) 1593 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK); 1594 if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT) 1595 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK); 1596 1597 if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) 1598 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK); 1599 if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT) 1600 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK); 1601 1602 if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) 1603 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK); 1604 if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT) 1605 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK); 1606 1607 if (*disp_int & DC_HPD1_INTERRUPT) { 1608 tmp = RREG32(DC_HPD1_INT_CONTROL); 1609 tmp |= DC_HPDx_INT_ACK; 1610 WREG32(DC_HPD1_INT_CONTROL, tmp); 1611 } 1612 if (*disp_int_cont & DC_HPD2_INTERRUPT) { 1613 tmp = RREG32(DC_HPD2_INT_CONTROL); 1614 tmp |= DC_HPDx_INT_ACK; 1615 WREG32(DC_HPD2_INT_CONTROL, tmp); 1616 } 1617 if (*disp_int_cont2 & DC_HPD3_INTERRUPT) { 1618 tmp = RREG32(DC_HPD3_INT_CONTROL); 1619 tmp |= DC_HPDx_INT_ACK; 1620 WREG32(DC_HPD3_INT_CONTROL, tmp); 1621 } 1622 if (*disp_int_cont3 & DC_HPD4_INTERRUPT) { 1623 tmp = RREG32(DC_HPD4_INT_CONTROL); 1624 tmp |= DC_HPDx_INT_ACK; 1625 WREG32(DC_HPD4_INT_CONTROL, tmp); 1626 } 1627 if (*disp_int_cont4 & DC_HPD5_INTERRUPT) { 1628 tmp = RREG32(DC_HPD5_INT_CONTROL); 1629 tmp |= DC_HPDx_INT_ACK; 1630 WREG32(DC_HPD5_INT_CONTROL, tmp); 1631 } 1632 if (*disp_int_cont5 & DC_HPD6_INTERRUPT) { 1633 tmp = RREG32(DC_HPD5_INT_CONTROL); 1634 tmp |= DC_HPDx_INT_ACK; 1635 WREG32(DC_HPD6_INT_CONTROL, tmp); 1636 } 1637 } 1638 1639 void evergreen_irq_disable(struct radeon_device *rdev) 1640 { 1641 u32 disp_int, disp_int_cont, disp_int_cont2; 1642 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5; 1643 1644 r600_disable_interrupts(rdev); 1645 /* Wait and acknowledge irq */ 1646 mdelay(1); 1647 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2, 1648 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5); 1649 evergreen_disable_interrupt_state(rdev); 1650 } 1651 1652 static void evergreen_irq_suspend(struct radeon_device *rdev) 1653 { 1654 evergreen_irq_disable(rdev); 1655 r600_rlc_stop(rdev); 1656 } 1657 1658 static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev) 1659 { 1660 u32 wptr, tmp; 1661 1662 /* XXX use writeback */ 1663 wptr = RREG32(IH_RB_WPTR); 1664 1665 if (wptr & RB_OVERFLOW) { 1666 /* When a ring buffer overflow happen start parsing interrupt 1667 * from the last not overwritten vector (wptr + 16). Hopefully 1668 * this should allow us to catchup. 1669 */ 1670 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n", 1671 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask); 1672 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; 1673 tmp = RREG32(IH_RB_CNTL); 1674 tmp |= IH_WPTR_OVERFLOW_CLEAR; 1675 WREG32(IH_RB_CNTL, tmp); 1676 } 1677 return (wptr & rdev->ih.ptr_mask); 1678 } 1679 1680 int evergreen_irq_process(struct radeon_device *rdev) 1681 { 1682 u32 wptr = evergreen_get_ih_wptr(rdev); 1683 u32 rptr = rdev->ih.rptr; 1684 u32 src_id, src_data; 1685 u32 ring_index; 1686 u32 disp_int, disp_int_cont, disp_int_cont2; 1687 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5; 1688 unsigned long flags; 1689 bool queue_hotplug = false; 1690 1691 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr); 1692 if (!rdev->ih.enabled) 1693 return IRQ_NONE; 1694 1695 spin_lock_irqsave(&rdev->ih.lock, flags); 1696 1697 if (rptr == wptr) { 1698 spin_unlock_irqrestore(&rdev->ih.lock, flags); 1699 return IRQ_NONE; 1700 } 1701 if (rdev->shutdown) { 1702 spin_unlock_irqrestore(&rdev->ih.lock, flags); 1703 return IRQ_NONE; 1704 } 1705 1706 restart_ih: 1707 /* display interrupts */ 1708 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2, 1709 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5); 1710 1711 rdev->ih.wptr = wptr; 1712 while (rptr != wptr) { 1713 /* wptr/rptr are in bytes! */ 1714 ring_index = rptr / 4; 1715 src_id = rdev->ih.ring[ring_index] & 0xff; 1716 src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff; 1717 1718 switch (src_id) { 1719 case 1: /* D1 vblank/vline */ 1720 switch (src_data) { 1721 case 0: /* D1 vblank */ 1722 if (disp_int & LB_D1_VBLANK_INTERRUPT) { 1723 drm_handle_vblank(rdev->ddev, 0); 1724 wake_up(&rdev->irq.vblank_queue); 1725 disp_int &= ~LB_D1_VBLANK_INTERRUPT; 1726 DRM_DEBUG("IH: D1 vblank\n"); 1727 } 1728 break; 1729 case 1: /* D1 vline */ 1730 if (disp_int & LB_D1_VLINE_INTERRUPT) { 1731 disp_int &= ~LB_D1_VLINE_INTERRUPT; 1732 DRM_DEBUG("IH: D1 vline\n"); 1733 } 1734 break; 1735 default: 1736 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); 1737 break; 1738 } 1739 break; 1740 case 2: /* D2 vblank/vline */ 1741 switch (src_data) { 1742 case 0: /* D2 vblank */ 1743 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) { 1744 drm_handle_vblank(rdev->ddev, 1); 1745 wake_up(&rdev->irq.vblank_queue); 1746 disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; 1747 DRM_DEBUG("IH: D2 vblank\n"); 1748 } 1749 break; 1750 case 1: /* D2 vline */ 1751 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) { 1752 disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; 1753 DRM_DEBUG("IH: D2 vline\n"); 1754 } 1755 break; 1756 default: 1757 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); 1758 break; 1759 } 1760 break; 1761 case 3: /* D3 vblank/vline */ 1762 switch (src_data) { 1763 case 0: /* D3 vblank */ 1764 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) { 1765 drm_handle_vblank(rdev->ddev, 2); 1766 wake_up(&rdev->irq.vblank_queue); 1767 disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; 1768 DRM_DEBUG("IH: D3 vblank\n"); 1769 } 1770 break; 1771 case 1: /* D3 vline */ 1772 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) { 1773 disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; 1774 DRM_DEBUG("IH: D3 vline\n"); 1775 } 1776 break; 1777 default: 1778 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); 1779 break; 1780 } 1781 break; 1782 case 4: /* D4 vblank/vline */ 1783 switch (src_data) { 1784 case 0: /* D4 vblank */ 1785 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) { 1786 drm_handle_vblank(rdev->ddev, 3); 1787 wake_up(&rdev->irq.vblank_queue); 1788 disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; 1789 DRM_DEBUG("IH: D4 vblank\n"); 1790 } 1791 break; 1792 case 1: /* D4 vline */ 1793 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) { 1794 disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; 1795 DRM_DEBUG("IH: D4 vline\n"); 1796 } 1797 break; 1798 default: 1799 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); 1800 break; 1801 } 1802 break; 1803 case 5: /* D5 vblank/vline */ 1804 switch (src_data) { 1805 case 0: /* D5 vblank */ 1806 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) { 1807 drm_handle_vblank(rdev->ddev, 4); 1808 wake_up(&rdev->irq.vblank_queue); 1809 disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; 1810 DRM_DEBUG("IH: D5 vblank\n"); 1811 } 1812 break; 1813 case 1: /* D5 vline */ 1814 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) { 1815 disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; 1816 DRM_DEBUG("IH: D5 vline\n"); 1817 } 1818 break; 1819 default: 1820 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); 1821 break; 1822 } 1823 break; 1824 case 6: /* D6 vblank/vline */ 1825 switch (src_data) { 1826 case 0: /* D6 vblank */ 1827 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) { 1828 drm_handle_vblank(rdev->ddev, 5); 1829 wake_up(&rdev->irq.vblank_queue); 1830 disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; 1831 DRM_DEBUG("IH: D6 vblank\n"); 1832 } 1833 break; 1834 case 1: /* D6 vline */ 1835 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) { 1836 disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; 1837 DRM_DEBUG("IH: D6 vline\n"); 1838 } 1839 break; 1840 default: 1841 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); 1842 break; 1843 } 1844 break; 1845 case 42: /* HPD hotplug */ 1846 switch (src_data) { 1847 case 0: 1848 if (disp_int & DC_HPD1_INTERRUPT) { 1849 disp_int &= ~DC_HPD1_INTERRUPT; 1850 queue_hotplug = true; 1851 DRM_DEBUG("IH: HPD1\n"); 1852 } 1853 break; 1854 case 1: 1855 if (disp_int_cont & DC_HPD2_INTERRUPT) { 1856 disp_int_cont &= ~DC_HPD2_INTERRUPT; 1857 queue_hotplug = true; 1858 DRM_DEBUG("IH: HPD2\n"); 1859 } 1860 break; 1861 case 2: 1862 if (disp_int_cont2 & DC_HPD3_INTERRUPT) { 1863 disp_int_cont2 &= ~DC_HPD3_INTERRUPT; 1864 queue_hotplug = true; 1865 DRM_DEBUG("IH: HPD3\n"); 1866 } 1867 break; 1868 case 3: 1869 if (disp_int_cont3 & DC_HPD4_INTERRUPT) { 1870 disp_int_cont3 &= ~DC_HPD4_INTERRUPT; 1871 queue_hotplug = true; 1872 DRM_DEBUG("IH: HPD4\n"); 1873 } 1874 break; 1875 case 4: 1876 if (disp_int_cont4 & DC_HPD5_INTERRUPT) { 1877 disp_int_cont4 &= ~DC_HPD5_INTERRUPT; 1878 queue_hotplug = true; 1879 DRM_DEBUG("IH: HPD5\n"); 1880 } 1881 break; 1882 case 5: 1883 if (disp_int_cont5 & DC_HPD6_INTERRUPT) { 1884 disp_int_cont5 &= ~DC_HPD6_INTERRUPT; 1885 queue_hotplug = true; 1886 DRM_DEBUG("IH: HPD6\n"); 1887 } 1888 break; 1889 default: 1890 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); 1891 break; 1892 } 1893 break; 1894 case 176: /* CP_INT in ring buffer */ 1895 case 177: /* CP_INT in IB1 */ 1896 case 178: /* CP_INT in IB2 */ 1897 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data); 1898 radeon_fence_process(rdev); 1899 break; 1900 case 181: /* CP EOP event */ 1901 DRM_DEBUG("IH: CP EOP\n"); 1902 break; 1903 case 233: /* GUI IDLE */ 1904 DRM_DEBUG("IH: CP EOP\n"); 1905 rdev->pm.gui_idle = true; 1906 wake_up(&rdev->irq.idle_queue); 1907 break; 1908 default: 1909 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); 1910 break; 1911 } 1912 1913 /* wptr/rptr are in bytes! */ 1914 rptr += 16; 1915 rptr &= rdev->ih.ptr_mask; 1916 } 1917 /* make sure wptr hasn't changed while processing */ 1918 wptr = evergreen_get_ih_wptr(rdev); 1919 if (wptr != rdev->ih.wptr) 1920 goto restart_ih; 1921 if (queue_hotplug) 1922 queue_work(rdev->wq, &rdev->hotplug_work); 1923 rdev->ih.rptr = rptr; 1924 WREG32(IH_RB_RPTR, rdev->ih.rptr); 1925 spin_unlock_irqrestore(&rdev->ih.lock, flags); 1926 return IRQ_HANDLED; 1927 } 1928 1929 static int evergreen_startup(struct radeon_device *rdev) 1930 { 1931 int r; 1932 1933 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { 1934 r = r600_init_microcode(rdev); 1935 if (r) { 1936 DRM_ERROR("Failed to load firmware!\n"); 1937 return r; 1938 } 1939 } 1940 1941 evergreen_mc_program(rdev); 1942 if (rdev->flags & RADEON_IS_AGP) { 1943 evergreen_agp_enable(rdev); 1944 } else { 1945 r = evergreen_pcie_gart_enable(rdev); 1946 if (r) 1947 return r; 1948 } 1949 evergreen_gpu_init(rdev); 1950 #if 0 1951 if (!rdev->r600_blit.shader_obj) { 1952 r = r600_blit_init(rdev); 1953 if (r) { 1954 DRM_ERROR("radeon: failed blitter (%d).\n", r); 1955 return r; 1956 } 1957 } 1958 1959 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false); 1960 if (unlikely(r != 0)) 1961 return r; 1962 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM, 1963 &rdev->r600_blit.shader_gpu_addr); 1964 radeon_bo_unreserve(rdev->r600_blit.shader_obj); 1965 if (r) { 1966 DRM_ERROR("failed to pin blit object %d\n", r); 1967 return r; 1968 } 1969 #endif 1970 1971 /* Enable IRQ */ 1972 r = r600_irq_init(rdev); 1973 if (r) { 1974 DRM_ERROR("radeon: IH init failed (%d).\n", r); 1975 radeon_irq_kms_fini(rdev); 1976 return r; 1977 } 1978 evergreen_irq_set(rdev); 1979 1980 r = radeon_ring_init(rdev, rdev->cp.ring_size); 1981 if (r) 1982 return r; 1983 r = evergreen_cp_load_microcode(rdev); 1984 if (r) 1985 return r; 1986 r = evergreen_cp_resume(rdev); 1987 if (r) 1988 return r; 1989 /* write back buffer are not vital so don't worry about failure */ 1990 r600_wb_enable(rdev); 1991 1992 return 0; 1993 } 1994 1995 int evergreen_resume(struct radeon_device *rdev) 1996 { 1997 int r; 1998 1999 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw, 2000 * posting will perform necessary task to bring back GPU into good 2001 * shape. 2002 */ 2003 /* post card */ 2004 atom_asic_init(rdev->mode_info.atom_context); 2005 /* Initialize clocks */ 2006 r = radeon_clocks_init(rdev); 2007 if (r) { 2008 return r; 2009 } 2010 2011 r = evergreen_startup(rdev); 2012 if (r) { 2013 DRM_ERROR("r600 startup failed on resume\n"); 2014 return r; 2015 } 2016 2017 r = r600_ib_test(rdev); 2018 if (r) { 2019 DRM_ERROR("radeon: failled testing IB (%d).\n", r); 2020 return r; 2021 } 2022 2023 return r; 2024 2025 } 2026 2027 int evergreen_suspend(struct radeon_device *rdev) 2028 { 2029 #if 0 2030 int r; 2031 #endif 2032 /* FIXME: we should wait for ring to be empty */ 2033 r700_cp_stop(rdev); 2034 rdev->cp.ready = false; 2035 evergreen_irq_suspend(rdev); 2036 r600_wb_disable(rdev); 2037 evergreen_pcie_gart_disable(rdev); 2038 #if 0 2039 /* unpin shaders bo */ 2040 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false); 2041 if (likely(r == 0)) { 2042 radeon_bo_unpin(rdev->r600_blit.shader_obj); 2043 radeon_bo_unreserve(rdev->r600_blit.shader_obj); 2044 } 2045 #endif 2046 return 0; 2047 } 2048 2049 static bool evergreen_card_posted(struct radeon_device *rdev) 2050 { 2051 u32 reg; 2052 2053 /* first check CRTCs */ 2054 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) | 2055 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) | 2056 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) | 2057 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) | 2058 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) | 2059 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET); 2060 if (reg & EVERGREEN_CRTC_MASTER_EN) 2061 return true; 2062 2063 /* then check MEM_SIZE, in case the crtcs are off */ 2064 if (RREG32(CONFIG_MEMSIZE)) 2065 return true; 2066 2067 return false; 2068 } 2069 2070 /* Plan is to move initialization in that function and use 2071 * helper function so that radeon_device_init pretty much 2072 * do nothing more than calling asic specific function. This 2073 * should also allow to remove a bunch of callback function 2074 * like vram_info. 2075 */ 2076 int evergreen_init(struct radeon_device *rdev) 2077 { 2078 int r; 2079 2080 r = radeon_dummy_page_init(rdev); 2081 if (r) 2082 return r; 2083 /* This don't do much */ 2084 r = radeon_gem_init(rdev); 2085 if (r) 2086 return r; 2087 /* Read BIOS */ 2088 if (!radeon_get_bios(rdev)) { 2089 if (ASIC_IS_AVIVO(rdev)) 2090 return -EINVAL; 2091 } 2092 /* Must be an ATOMBIOS */ 2093 if (!rdev->is_atom_bios) { 2094 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n"); 2095 return -EINVAL; 2096 } 2097 r = radeon_atombios_init(rdev); 2098 if (r) 2099 return r; 2100 /* Post card if necessary */ 2101 if (!evergreen_card_posted(rdev)) { 2102 if (!rdev->bios) { 2103 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); 2104 return -EINVAL; 2105 } 2106 DRM_INFO("GPU not posted. posting now...\n"); 2107 atom_asic_init(rdev->mode_info.atom_context); 2108 } 2109 /* Initialize scratch registers */ 2110 r600_scratch_init(rdev); 2111 /* Initialize surface registers */ 2112 radeon_surface_init(rdev); 2113 /* Initialize clocks */ 2114 radeon_get_clock_info(rdev->ddev); 2115 r = radeon_clocks_init(rdev); 2116 if (r) 2117 return r; 2118 /* Fence driver */ 2119 r = radeon_fence_driver_init(rdev); 2120 if (r) 2121 return r; 2122 /* initialize AGP */ 2123 if (rdev->flags & RADEON_IS_AGP) { 2124 r = radeon_agp_init(rdev); 2125 if (r) 2126 radeon_agp_disable(rdev); 2127 } 2128 /* initialize memory controller */ 2129 r = evergreen_mc_init(rdev); 2130 if (r) 2131 return r; 2132 /* Memory manager */ 2133 r = radeon_bo_init(rdev); 2134 if (r) 2135 return r; 2136 2137 r = radeon_irq_kms_init(rdev); 2138 if (r) 2139 return r; 2140 2141 rdev->cp.ring_obj = NULL; 2142 r600_ring_init(rdev, 1024 * 1024); 2143 2144 rdev->ih.ring_obj = NULL; 2145 r600_ih_ring_init(rdev, 64 * 1024); 2146 2147 r = r600_pcie_gart_init(rdev); 2148 if (r) 2149 return r; 2150 2151 rdev->accel_working = false; 2152 r = evergreen_startup(rdev); 2153 if (r) { 2154 dev_err(rdev->dev, "disabling GPU acceleration\n"); 2155 r700_cp_fini(rdev); 2156 r600_wb_fini(rdev); 2157 r600_irq_fini(rdev); 2158 radeon_irq_kms_fini(rdev); 2159 evergreen_pcie_gart_fini(rdev); 2160 rdev->accel_working = false; 2161 } 2162 if (rdev->accel_working) { 2163 r = radeon_ib_pool_init(rdev); 2164 if (r) { 2165 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r); 2166 rdev->accel_working = false; 2167 } 2168 r = r600_ib_test(rdev); 2169 if (r) { 2170 DRM_ERROR("radeon: failed testing IB (%d).\n", r); 2171 rdev->accel_working = false; 2172 } 2173 } 2174 return 0; 2175 } 2176 2177 void evergreen_fini(struct radeon_device *rdev) 2178 { 2179 /*r600_blit_fini(rdev);*/ 2180 r700_cp_fini(rdev); 2181 r600_wb_fini(rdev); 2182 r600_irq_fini(rdev); 2183 radeon_irq_kms_fini(rdev); 2184 evergreen_pcie_gart_fini(rdev); 2185 radeon_gem_fini(rdev); 2186 radeon_fence_driver_fini(rdev); 2187 radeon_clocks_fini(rdev); 2188 radeon_agp_fini(rdev); 2189 radeon_bo_fini(rdev); 2190 radeon_atombios_fini(rdev); 2191 kfree(rdev->bios); 2192 rdev->bios = NULL; 2193 radeon_dummy_page_fini(rdev); 2194 } 2195